hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f71f46e9b66f50d5da46a8294920b9f874abe804 | 7,438 | py | Python | oauth2_provider/oauth2_backends.py | Transparent-CDN/django-oauth-toolkit | 0fb3d5a959ef2108c606e71064986b239540cab5 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2020-02-28T11:09:33.000Z | 2020-02-28T11:09:33.000Z | oauth2_provider/oauth2_backends.py | Transparent-CDN/django-oauth-toolkit | 0fb3d5a959ef2108c606e71064986b239540cab5 | [
"BSD-2-Clause-FreeBSD"
] | 4 | 2019-03-22T17:06:36.000Z | 2019-06-20T02:41:33.000Z | oauth2_provider/oauth2_backends.py | drchrono/django-oauth-toolkit | 846ab0ba8acaa3e4870b424700544aa6329511e4 | [
"BSD-2-Clause-FreeBSD"
] | 1 | 2019-10-19T01:03:44.000Z | 2019-10-19T01:03:44.000Z | import json
from urllib.parse import urlparse, urlunparse
from oauthlib import oauth2
from oauthlib.common import quote, urlencode, urlencoded
from .exceptions import FatalClientError, OAuthToolkitError
from .settings import oauth2_settings
class OAuthLibCore(object):
"""
TODO: add docs
"""
def __init__(self, server=None):
"""
:params server: An instance of oauthlib.oauth2.Server class
"""
self.server = server or oauth2_settings.OAUTH2_SERVER_CLASS(oauth2_settings.OAUTH2_VALIDATOR_CLASS())
def _get_escaped_full_path(self, request):
"""
Django considers "safe" some characters that aren't so for oauthlib.
We have to search for them and properly escape.
"""
parsed = list(urlparse(request.get_full_path()))
unsafe = set(c for c in parsed[4]).difference(urlencoded)
for c in unsafe:
parsed[4] = parsed[4].replace(c, quote(c, safe=b""))
return urlunparse(parsed)
def _get_extra_credentials(self, request):
"""
Produce extra credentials for token response. This dictionary will be
merged with the response.
See also: `oauthlib.oauth2.rfc6749.TokenEndpoint.create_token_response`
:param request: The current django.http.HttpRequest object
:return: dictionary of extra credentials or None (default)
"""
return None
def _extract_params(self, request):
"""
Extract parameters from the Django request object.
Such parameters will then be passed to OAuthLib to build its own
Request object. The body should be encoded using OAuthLib urlencoded.
"""
uri = self._get_escaped_full_path(request)
http_method = request.method
headers = self.extract_headers(request)
body = urlencode(self.extract_body(request))
return uri, http_method, body, headers
def extract_headers(self, request):
"""
Extracts headers from the Django request object
:param request: The current django.http.HttpRequest object
:return: a dictionary with OAuthLib needed headers
"""
headers = request.META.copy()
if "wsgi.input" in headers:
del headers["wsgi.input"]
if "wsgi.errors" in headers:
del headers["wsgi.errors"]
if "HTTP_AUTHORIZATION" in headers:
headers["Authorization"] = headers["HTTP_AUTHORIZATION"]
return headers
def extract_body(self, request):
"""
Extracts the POST body from the Django request object
:param request: The current django.http.HttpRequest object
:return: provided POST parameters
"""
return request.POST.items()
def validate_authorization_request(self, request):
"""
A wrapper method that calls validate_authorization_request on `server_class` instance.
:param request: The current django.http.HttpRequest object
"""
try:
uri, http_method, body, headers = self._extract_params(request)
scopes, credentials = self.server.validate_authorization_request(
uri, http_method=http_method, body=body, headers=headers)
return scopes, credentials
except oauth2.FatalClientError as error:
raise FatalClientError(error=error)
except oauth2.OAuth2Error as error:
raise OAuthToolkitError(error=error)
def create_authorization_response(self, request, scopes, credentials, allow):
"""
A wrapper method that calls create_authorization_response on `server_class`
instance.
:param request: The current django.http.HttpRequest object
:param scopes: A list of provided scopes
:param credentials: Authorization credentials dictionary containing
`client_id`, `state`, `redirect_uri`, `response_type`
:param allow: True if the user authorize the client, otherwise False
"""
try:
if not allow:
raise oauth2.AccessDeniedError(
state=credentials.get("state", None))
# add current user to credentials. this will be used by OAUTH2_VALIDATOR_CLASS
credentials["user"] = request.user
headers, body, status = self.server.create_authorization_response(
uri=credentials["redirect_uri"], scopes=scopes, credentials=credentials)
uri = headers.get("Location", None)
return uri, headers, body, status
except oauth2.FatalClientError as error:
raise FatalClientError(error=error, redirect_uri=credentials["redirect_uri"])
except oauth2.OAuth2Error as error:
raise OAuthToolkitError(error=error, redirect_uri=credentials["redirect_uri"])
def create_token_response(self, request):
"""
A wrapper method that calls create_token_response on `server_class` instance.
:param request: The current django.http.HttpRequest object
"""
uri, http_method, body, headers = self._extract_params(request)
extra_credentials = self._get_extra_credentials(request)
headers, body, status = self.server.create_token_response(uri, http_method, body,
headers, extra_credentials)
uri = headers.get("Location", None)
return uri, headers, body, status
def create_revocation_response(self, request):
"""
A wrapper method that calls create_revocation_response on a
`server_class` instance.
:param request: The current django.http.HttpRequest object
"""
uri, http_method, body, headers = self._extract_params(request)
headers, body, status = self.server.create_revocation_response(
uri, http_method, body, headers)
uri = headers.get("Location", None)
return uri, headers, body, status
def verify_request(self, request, scopes):
"""
A wrapper method that calls verify_request on `server_class` instance.
:param request: The current django.http.HttpRequest object
:param scopes: A list of scopes required to verify so that request is verified
"""
uri, http_method, body, headers = self._extract_params(request)
valid, r = self.server.verify_request(uri, http_method, body, headers, scopes=scopes)
return valid, r
class JSONOAuthLibCore(OAuthLibCore):
"""
Extends the default OAuthLibCore to parse correctly application/json requests
"""
def extract_body(self, request):
"""
Extracts the JSON body from the Django request object
:param request: The current django.http.HttpRequest object
:return: provided POST parameters "urlencodable"
"""
try:
body = json.loads(request.body.decode("utf-8")).items()
except AttributeError:
body = ""
except ValueError:
body = ""
return body
def get_oauthlib_core():
"""
Utility function that take a request and returns an instance of
`oauth2_provider.backends.OAuthLibCore`
"""
validator = oauth2_settings.OAUTH2_VALIDATOR_CLASS()
server = oauth2_settings.OAUTH2_SERVER_CLASS(validator)
return oauth2_settings.OAUTH2_BACKEND_CLASS(server)
| 37.565657 | 109 | 0.655956 | import json
from urllib.parse import urlparse, urlunparse
from oauthlib import oauth2
from oauthlib.common import quote, urlencode, urlencoded
from .exceptions import FatalClientError, OAuthToolkitError
from .settings import oauth2_settings
class OAuthLibCore(object):
def __init__(self, server=None):
self.server = server or oauth2_settings.OAUTH2_SERVER_CLASS(oauth2_settings.OAUTH2_VALIDATOR_CLASS())
def _get_escaped_full_path(self, request):
parsed = list(urlparse(request.get_full_path()))
unsafe = set(c for c in parsed[4]).difference(urlencoded)
for c in unsafe:
parsed[4] = parsed[4].replace(c, quote(c, safe=b""))
return urlunparse(parsed)
def _get_extra_credentials(self, request):
return None
def _extract_params(self, request):
uri = self._get_escaped_full_path(request)
http_method = request.method
headers = self.extract_headers(request)
body = urlencode(self.extract_body(request))
return uri, http_method, body, headers
def extract_headers(self, request):
headers = request.META.copy()
if "wsgi.input" in headers:
del headers["wsgi.input"]
if "wsgi.errors" in headers:
del headers["wsgi.errors"]
if "HTTP_AUTHORIZATION" in headers:
headers["Authorization"] = headers["HTTP_AUTHORIZATION"]
return headers
def extract_body(self, request):
return request.POST.items()
def validate_authorization_request(self, request):
try:
uri, http_method, body, headers = self._extract_params(request)
scopes, credentials = self.server.validate_authorization_request(
uri, http_method=http_method, body=body, headers=headers)
return scopes, credentials
except oauth2.FatalClientError as error:
raise FatalClientError(error=error)
except oauth2.OAuth2Error as error:
raise OAuthToolkitError(error=error)
def create_authorization_response(self, request, scopes, credentials, allow):
try:
if not allow:
raise oauth2.AccessDeniedError(
state=credentials.get("state", None))
credentials["user"] = request.user
headers, body, status = self.server.create_authorization_response(
uri=credentials["redirect_uri"], scopes=scopes, credentials=credentials)
uri = headers.get("Location", None)
return uri, headers, body, status
except oauth2.FatalClientError as error:
raise FatalClientError(error=error, redirect_uri=credentials["redirect_uri"])
except oauth2.OAuth2Error as error:
raise OAuthToolkitError(error=error, redirect_uri=credentials["redirect_uri"])
def create_token_response(self, request):
uri, http_method, body, headers = self._extract_params(request)
extra_credentials = self._get_extra_credentials(request)
headers, body, status = self.server.create_token_response(uri, http_method, body,
headers, extra_credentials)
uri = headers.get("Location", None)
return uri, headers, body, status
def create_revocation_response(self, request):
uri, http_method, body, headers = self._extract_params(request)
headers, body, status = self.server.create_revocation_response(
uri, http_method, body, headers)
uri = headers.get("Location", None)
return uri, headers, body, status
def verify_request(self, request, scopes):
uri, http_method, body, headers = self._extract_params(request)
valid, r = self.server.verify_request(uri, http_method, body, headers, scopes=scopes)
return valid, r
class JSONOAuthLibCore(OAuthLibCore):
def extract_body(self, request):
try:
body = json.loads(request.body.decode("utf-8")).items()
except AttributeError:
body = ""
except ValueError:
body = ""
return body
def get_oauthlib_core():
validator = oauth2_settings.OAUTH2_VALIDATOR_CLASS()
server = oauth2_settings.OAUTH2_SERVER_CLASS(validator)
return oauth2_settings.OAUTH2_BACKEND_CLASS(server)
| true | true |
f71f48f8e33574e8e90e99e4f9578c5f409fad74 | 946 | py | Python | setup.py | kellyjonbrazil/jtbl | 9bfc755bc964fbed59a4884bc4be605a5065f3d8 | [
"MIT"
] | 108 | 2020-03-10T13:22:03.000Z | 2022-03-30T03:09:38.000Z | setup.py | kellyjonbrazil/jtbl | 9bfc755bc964fbed59a4884bc4be605a5065f3d8 | [
"MIT"
] | 9 | 2020-03-08T00:44:38.000Z | 2022-02-15T19:36:04.000Z | setup.py | kellyjonbrazil/jtbl | 9bfc755bc964fbed59a4884bc4be605a5065f3d8 | [
"MIT"
] | 5 | 2020-03-10T11:34:18.000Z | 2021-08-02T10:57:43.000Z | import setuptools
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='jtbl',
version='1.1.7',
author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com',
description='A simple cli tool to print JSON and JSON Lines data as a table in the terminal.',
install_requires=[
'tabulate>=0.8.6'
],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
python_requires='>=3.6',
url='https://github.com/kellyjonbrazil/jtbl',
packages=setuptools.find_packages(exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']),
entry_points={
'console_scripts': [
'jtbl=jtbl.cli:main'
]
},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Utilities'
]
)
| 28.666667 | 98 | 0.616279 | import setuptools
with open('README.md', 'r') as f:
long_description = f.read()
setuptools.setup(
name='jtbl',
version='1.1.7',
author='Kelly Brazil',
author_email='kellyjonbrazil@gmail.com',
description='A simple cli tool to print JSON and JSON Lines data as a table in the terminal.',
install_requires=[
'tabulate>=0.8.6'
],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
python_requires='>=3.6',
url='https://github.com/kellyjonbrazil/jtbl',
packages=setuptools.find_packages(exclude=['*.tests', '*.tests.*', 'tests.*', 'tests']),
entry_points={
'console_scripts': [
'jtbl=jtbl.cli:main'
]
},
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Utilities'
]
)
| true | true |
f71f497fb7582513c2d45b7633de0c7c9d7f7303 | 3,186 | py | Python | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | null | null | null | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | 1 | 2020-02-05T13:00:29.000Z | 2020-02-05T13:00:29.000Z | talk_lib/tests/testtalk.py | allankellynet/mimas | 10025d43bba9e84f502a266760786842e7158a05 | [
"MIT"
] | null | null | null | #-----------------------------------------------------
# Mimas: conference submission and review system
# (c) Allan Kelly 2016-2020 http://www.allankelly.net
# Licensed under MIT License, see LICENSE file
# -----------------------------------------------------
import unittest
from google.appengine.ext import testbed
from speaker_lib import speaker
from talk_lib import talk
class TestTalk(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def test_field_access(self):
t = talk.Talk()
self.assertEquals(t.title, "")
t.title = "Wonderful"
self.assertEquals(t.title, "Wonderful")
self.assertEquals(t.title, "Wonderful".encode('ascii', 'ignore'))
def test_talk_fields(self):
t = talk.Talk()
self.assertEquals(t.title, "")
t.title = "Great talk"
self.assertEquals(t.title, "Great talk")
def test_store_retrieve(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1 = talk.Talk(parent=spk1.key)
t1.title = "Wonderful"
t1.put()
t2 = talk.Talk(parent=spk1.key)
t2.title = "Great"
t2.put()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 2)
spk2 = speaker.make_new_speaker("nobody@email")
spk2.put()
t3 = talk.Talk(parent=spk2.key)
t3.title = "Smashing"
t3.put()
user2_talks = talk.all_user_talks_by_email(spk2.email)
self.assertEquals(len(user2_talks), 1)
t2.key.delete()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 1)
def test_store_retrieve_by_key(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1 = talk.Talk(parent=spk1.key)
t1.title = "Wonderful"
t1.put()
t2 = talk.Talk(parent=spk1.key)
t2.title = "Great"
t2.put()
user1_talks = talk.speaker_talks_by_key(spk1.key)
self.assertEquals(len(user1_talks), 2)
spk2 = speaker.make_new_speaker("nobody@email")
spk2.put()
t3 = talk.Talk(parent=spk2.key)
t3.title = "Smashing"
t3.put()
user2_talks = talk.speaker_talks_by_key(spk2.key)
self.assertEquals(len(user2_talks), 1)
t2.key.delete()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 1)
def test_no_such_speaker(self):
talks = talk.all_user_talks_by_email("nosuch@nowhere")
self.assertEquals(len(talks), 0)
def test_directory_listing(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1_key = talk.mk_talk(spk1.key, "Wonderful")
t1 = t1_key.get()
self.assertTrue(t1.is_listed())
t1.hide_listing()
self.assertFalse(t1.is_listed())
t1.show_listing()
self.assertTrue(t1.is_listed())
| 29.775701 | 73 | 0.605775 |
import unittest
from google.appengine.ext import testbed
from speaker_lib import speaker
from talk_lib import talk
class TestTalk(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def test_field_access(self):
t = talk.Talk()
self.assertEquals(t.title, "")
t.title = "Wonderful"
self.assertEquals(t.title, "Wonderful")
self.assertEquals(t.title, "Wonderful".encode('ascii', 'ignore'))
def test_talk_fields(self):
t = talk.Talk()
self.assertEquals(t.title, "")
t.title = "Great talk"
self.assertEquals(t.title, "Great talk")
def test_store_retrieve(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1 = talk.Talk(parent=spk1.key)
t1.title = "Wonderful"
t1.put()
t2 = talk.Talk(parent=spk1.key)
t2.title = "Great"
t2.put()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 2)
spk2 = speaker.make_new_speaker("nobody@email")
spk2.put()
t3 = talk.Talk(parent=spk2.key)
t3.title = "Smashing"
t3.put()
user2_talks = talk.all_user_talks_by_email(spk2.email)
self.assertEquals(len(user2_talks), 1)
t2.key.delete()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 1)
def test_store_retrieve_by_key(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1 = talk.Talk(parent=spk1.key)
t1.title = "Wonderful"
t1.put()
t2 = talk.Talk(parent=spk1.key)
t2.title = "Great"
t2.put()
user1_talks = talk.speaker_talks_by_key(spk1.key)
self.assertEquals(len(user1_talks), 2)
spk2 = speaker.make_new_speaker("nobody@email")
spk2.put()
t3 = talk.Talk(parent=spk2.key)
t3.title = "Smashing"
t3.put()
user2_talks = talk.speaker_talks_by_key(spk2.key)
self.assertEquals(len(user2_talks), 1)
t2.key.delete()
user1_talks = talk.all_user_talks_by_email(spk1.email)
self.assertEquals(len(user1_talks), 1)
def test_no_such_speaker(self):
talks = talk.all_user_talks_by_email("nosuch@nowhere")
self.assertEquals(len(talks), 0)
def test_directory_listing(self):
spk1 = speaker.make_new_speaker("who@email")
spk1.put()
t1_key = talk.mk_talk(spk1.key, "Wonderful")
t1 = t1_key.get()
self.assertTrue(t1.is_listed())
t1.hide_listing()
self.assertFalse(t1.is_listed())
t1.show_listing()
self.assertTrue(t1.is_listed())
| true | true |
f71f4d609651d9bc64373e010c165faa55a5f9cf | 3,278 | py | Python | beyond_tutorial/settings.py | shlior7/beyond-tutorial | 502618b125e9a81d334683b845b248fd750abc77 | [
"MIT"
] | null | null | null | beyond_tutorial/settings.py | shlior7/beyond-tutorial | 502618b125e9a81d334683b845b248fd750abc77 | [
"MIT"
] | null | null | null | beyond_tutorial/settings.py | shlior7/beyond-tutorial | 502618b125e9a81d334683b845b248fd750abc77 | [
"MIT"
] | null | null | null | """
Django settings for beyond_tutorial project.
Generated by 'django-admin startproject' using Django 4.0.2.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/4.0/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-s$+txx&pz8eeh$_+wbakb!i+1o%9ijf*=n0e6=k4d^ix_kfv7d'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'msgboard.apps.MsgboardConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'beyond_tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'beyond_tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 27.546218 | 91 | 0.705613 |
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = 'django-insecure-s$+txx&pz8eeh$_+wbakb!i+1o%9ijf*=n0e6=k4d^ix_kfv7d'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'msgboard.apps.MsgboardConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'beyond_tutorial.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'beyond_tutorial.wsgi.application'
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = 'static/'
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| true | true |
f71f4d8ff46ec6e4a8d5c7681ef34b9994b20203 | 13,826 | py | Python | xltable/expression.py | fkarb/xltable | 7a592642d27ad5ee90d2aa8c26338abaa9d84bea | [
"MIT"
] | 4 | 2017-03-09T20:04:35.000Z | 2020-01-18T16:24:33.000Z | xltable/expression.py | fkarb/xltable | 7a592642d27ad5ee90d2aa8c26338abaa9d84bea | [
"MIT"
] | 6 | 2017-12-05T13:22:10.000Z | 2018-01-29T13:50:27.000Z | xltable/expression.py | fkarb/xltable | 7a592642d27ad5ee90d2aa8c26338abaa9d84bea | [
"MIT"
] | 6 | 2017-10-26T16:44:27.000Z | 2021-08-16T19:39:21.000Z | """
Expressions for building excel formulas without having to use concrete positions.
"""
import operator
import re
class Expression(object):
"""
Base class for all worksheet expressions.
Expressions are used to build formulas referencing ranges in the
worksheet by labels which are resolved to cell references when the
worksheet is written out.
Expressions may be combined using binary operators.
"""
def __init__(self, value=None):
if value is not None:
self.value = value
def __add__(self, other):
return BinOp(self, _make_expr(other), "+")
def __sub__(self, other):
return BinOp(self, _make_expr(other), "-")
def __mul__(self, other):
return BinOp(self, _make_expr(other), "*")
def __truediv__(self, other):
return BinOp(self, _make_expr(other), "/")
def __lt__(self, other):
return BinOp(self, _make_expr(other), "<")
def __le__(self, other):
return BinOp(self, _make_expr(other), "<=")
def __eq__(self, other):
return BinOp(self, _make_expr(other), "=")
def __ne__(self, other):
return BinOp(self, _make_expr(other), "<>")
def __gt__(self, other):
return BinOp(self, _make_expr(other), ">")
def __ge__(self, other):
return BinOp(self, _make_expr(other), ">=")
def __and__(self, other):
return BinOp(self, _make_expr(other), "&")
def get_formula(self, workbook, row, col):
return "=%s" % self._strip(self.resolve(workbook, row, col))
@property
def value(self):
"""Set a calculated value for this Expression.
Used when writing formulas using XlsxWriter to give cells
an initial value when the sheet is loaded without being calculated.
"""
try:
if isinstance(self.__value, Expression):
return self.__value.value
return self.__value
except AttributeError:
return 0
@property
def has_value(self):
"""return True if value has been set"""
try:
if isinstance(self.__value, Expression):
return self.__value.has_value
return True
except AttributeError:
return False
@value.setter
def value(self, value):
self.__value = value
@staticmethod
def _strip(x):
# strip off the outer parentheses if they match
return re.sub("^\((.*)\)$", r"\1", x)
def resolve(self, workbook, worksheet, col, row):
raise NotImplementedError("Expression.resolve")
class Cell(Expression):
"""
Reference to a cell in a table.
:param col: Column label this refers to.
:param row: Row label this refers to, or None to use the current row.
:param row_offset: Offset from the row, used when resolving.
:param table: Name of table the column is in, if not in the same table this expression is in.
Use "%s!%s" % (worksheet.name, table.name) if refering to a table in another worksheet
:param col_fixed: If True when converted to an address the column will be fixed.
:param row_fixed: If True when converted to an address the row will be fixed.
"""
def __init__(self, col, row=None, row_offset=0, table=None, col_fixed=None, row_fixed=None, **kwargs):
super(Cell, self).__init__(**kwargs)
self.__col = col
self.__row = row
self.__row_offset = row_offset
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_column_offset(self.__col)
# if the row has been given use fixed references in the formula unless they've been set explicitly
if self.__row is not None:
row = table.get_row_offset(self.__row)
row_fixed = self.__row_fixed if self.__row_fixed is not None else True
col_fixed = self.__col_fixed if self.__col_fixed is not None else True
else:
# otherwise use un-fixed addresses, unless set explicitly
row_fixed = self.__row_fixed if self.__row_fixed is not None else False
col_fixed = self.__col_fixed if self.__col_fixed is not None else False
return _to_addr(worksheet.name,
top + row + self.__row_offset,
left + col_offset,
row_fixed=row_fixed,
col_fixed=col_fixed)
class Column(Expression):
"""
Reference to a column in a table.
:param col: Column label this refers to.
:param include_header: True if this expression should include the column header.
:param table: Name of table the column is in, if not in the same table this expression is in.
Use "%s!%s" % (worksheet.name, table.name) if refering to a table in another worksheet
:param col_fixed: If True when converted to an address the column will be fixed.
:param row_fixed: If True when converted to an address the row will be fixed.
"""
def __init__(self, col, include_header=False, table=None, col_fixed=True, row_fixed=True, **kwargs):
super(Column, self).__init__(**kwargs)
self.__col = col
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_column_offset(self.__col)
row_offset = 0 if self.__include_header else table.header_height
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + row_offset, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + table.height - 1, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Index(Expression):
"""
Reference to a table's index.
:param include_header: True if this expression should include the index header.
:param table: Name of table that owns the index, if not the table this expression is in.
Use "%s!%s" % (worksheet.name, table.name) if refering to a table in another worksheet
:param col_fixed: If True when converted to an address the column will be fixed.
:param row_fixed: If True when converted to an address the row will be fixed.
"""
def __init__(self, include_header=False, table=None, col_fixed=True, row_fixed=True, **kwargs):
super(Index, self).__init__(**kwargs)
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_index_offset()
row_offset = 0 if self.__include_header else table.header_height
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + row_offset, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + table.height - 1, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Range(Expression):
"""
Reference to a range in a table.
:param left_col: Left most column label this refers to.
:param right_col: Right most column label this refers to.
:param top_row: Top most row label, or None to select from the top of the table.
:param bottom_row: Bottom most row label, or None to select to the bottom of the table.
:param include_header: Include table header in the range.
:param table: Name of table the column is in, if not in the same table this expression is in.
Use "%s!%s" % (worksheet.name, table.name) if refering to a table in another worksheet
:param col_fixed: If True when converted to an address the column will be fixed.
:param row_fixed: If True when converted to an address the row will be fixed.
"""
def __init__(self,
left_col,
right_col,
top_row=None,
bottom_row=None,
include_header=True,
table=None,
col_fixed=True,
row_fixed=True,
**kwargs):
super(Range, self).__init__(**kwargs)
self.__left_col = left_col
self.__right_col = right_col
self.__top = top_row
self.__bottom = bottom_row
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
left_col_offset = table.get_column_offset(self.__left_col)
right_col_offset = table.get_column_offset(self.__right_col)
if self.__top is None:
top_row_offset = 0 if self.__include_header else table.header_height
else:
top_row_offset = table.get_row_offset(self.__top)
if self.__bottom is None:
bottom_row_offset = table.height - 1
else:
bottom_row_offset = table.get_row_offset(self.__bottom)
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + top_row_offset, left + left_col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + bottom_row_offset, left + right_col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Formula(Expression):
"""
Formula expression.
E.g. to create a formula like "=SUMPRODUCT(a, b)" where a and b
are columns in a table you would do::
formula = Formula("SUMPRODUCT", Column("col_a"), Column("col_b"))
:param name: Name of Excel function, eg "SUMPRODUCT".
:param args: Expressions to use as arguments to the function.
"""
def __init__(self, name, *args, **kwargs):
super(Formula, self).__init__(**kwargs)
self.__name = name
self.__args = args
def resolve(self, workbook, row, col):
def to_arg(x):
if x is None:
return ""
return self._strip(_make_expr(x).resolve(workbook, row, col))
args = [to_arg(x) for x in self.__args]
return "%s(%s)" % (self.__name, ",".join(args))
class ArrayExpression(Expression):
"""
Wraps an expression in an array formula (ie. surrounds it with {})
:param xltable.Expression expr: Expression to be wrapped
"""
def __init__(self, expr):
Expression.__init__(self, expr)
self.__expr = expr
def resolve(self, workbook, row, col):
return self.__expr.resolve(workbook, row, col)
def get_formula(self, workbook, row, col):
return "{%s}" % self.__expr.get_formula(workbook, row, col).strip("{}")
class BinOp(Expression):
"""
Internal use - composite expression combining two expression with a binary operator.
"""
__operators = {
"+": operator.add,
"-": operator.sub,
"*": operator.mul,
"/": operator.truediv,
">": operator.gt,
"<": operator.lt,
"<=": operator.le,
">=": operator.ge,
"!=": operator.ne,
"=": operator.eq,
"&": operator.and_,
"|": operator.or_,
}
def __init__(self, lhs, rhs, op, **kwargs):
super(BinOp, self).__init__(**kwargs)
self.__lhs = lhs
self.__rhs = rhs
self.__op = op
if lhs.has_value and rhs.has_value:
self.value = self.__operators[op](lhs.value, rhs.value)
def resolve(self, workbook, row, col):
return "(%s%s%s)" % (
self.__lhs.resolve(workbook, row, col),
self.__op,
self.__rhs.resolve(workbook, row, col))
class ConstExpr(Expression):
"""
Internal use - expression for wrapping constants.
"""
def __init__(self, value, **kwargs):
super(ConstExpr, self).__init__(**kwargs)
self.value = value
self.__value = value
def resolve(self, workbook, row, col):
if isinstance(self.__value, str):
return '"%s"' % self.__value
if isinstance(self.__value, bool):
return "TRUE" if self.__value else "FALSE"
return str(self.__value)
def _to_addr(worksheet, row, col, row_fixed=False, col_fixed=False):
"""converts a (0,0) based coordinate to an excel address"""
addr = ""
A = ord('A')
col += 1
while col > 0:
addr = chr(A + ((col - 1) % 26)) + addr
col = (col - 1) // 26
prefix = ("'%s'!" % worksheet) if worksheet else ""
col_modifier = "$" if col_fixed else ""
row_modifier = "$" if row_fixed else ""
return prefix + "%s%s%s%d" % (col_modifier, addr, row_modifier, row+1)
def _make_expr(x):
if isinstance(x, Expression):
return x
return ConstExpr(x)
| 36.67374 | 106 | 0.607913 | import operator
import re
class Expression(object):
def __init__(self, value=None):
if value is not None:
self.value = value
def __add__(self, other):
return BinOp(self, _make_expr(other), "+")
def __sub__(self, other):
return BinOp(self, _make_expr(other), "-")
def __mul__(self, other):
return BinOp(self, _make_expr(other), "*")
def __truediv__(self, other):
return BinOp(self, _make_expr(other), "/")
def __lt__(self, other):
return BinOp(self, _make_expr(other), "<")
def __le__(self, other):
return BinOp(self, _make_expr(other), "<=")
def __eq__(self, other):
return BinOp(self, _make_expr(other), "=")
def __ne__(self, other):
return BinOp(self, _make_expr(other), "<>")
def __gt__(self, other):
return BinOp(self, _make_expr(other), ">")
def __ge__(self, other):
return BinOp(self, _make_expr(other), ">=")
def __and__(self, other):
return BinOp(self, _make_expr(other), "&")
def get_formula(self, workbook, row, col):
return "=%s" % self._strip(self.resolve(workbook, row, col))
@property
def value(self):
try:
if isinstance(self.__value, Expression):
return self.__value.value
return self.__value
except AttributeError:
return 0
@property
def has_value(self):
try:
if isinstance(self.__value, Expression):
return self.__value.has_value
return True
except AttributeError:
return False
@value.setter
def value(self, value):
self.__value = value
@staticmethod
def _strip(x):
return re.sub("^\((.*)\)$", r"\1", x)
def resolve(self, workbook, worksheet, col, row):
raise NotImplementedError("Expression.resolve")
class Cell(Expression):
def __init__(self, col, row=None, row_offset=0, table=None, col_fixed=None, row_fixed=None, **kwargs):
super(Cell, self).__init__(**kwargs)
self.__col = col
self.__row = row
self.__row_offset = row_offset
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_column_offset(self.__col)
if self.__row is not None:
row = table.get_row_offset(self.__row)
row_fixed = self.__row_fixed if self.__row_fixed is not None else True
col_fixed = self.__col_fixed if self.__col_fixed is not None else True
else:
# otherwise use un-fixed addresses, unless set explicitly
row_fixed = self.__row_fixed if self.__row_fixed is not None else False
col_fixed = self.__col_fixed if self.__col_fixed is not None else False
return _to_addr(worksheet.name,
top + row + self.__row_offset,
left + col_offset,
row_fixed=row_fixed,
col_fixed=col_fixed)
class Column(Expression):
def __init__(self, col, include_header=False, table=None, col_fixed=True, row_fixed=True, **kwargs):
super(Column, self).__init__(**kwargs)
self.__col = col
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_column_offset(self.__col)
row_offset = 0 if self.__include_header else table.header_height
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + row_offset, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + table.height - 1, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Index(Expression):
def __init__(self, include_header=False, table=None, col_fixed=True, row_fixed=True, **kwargs):
super(Index, self).__init__(**kwargs)
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
col_offset = table.get_index_offset()
row_offset = 0 if self.__include_header else table.header_height
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + row_offset, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + table.height - 1, left + col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Range(Expression):
def __init__(self,
left_col,
right_col,
top_row=None,
bottom_row=None,
include_header=True,
table=None,
col_fixed=True,
row_fixed=True,
**kwargs):
super(Range, self).__init__(**kwargs)
self.__left_col = left_col
self.__right_col = right_col
self.__top = top_row
self.__bottom = bottom_row
self.__include_header = include_header
self.__table = table
self.__col_fixed = col_fixed
self.__row_fixed = row_fixed
def resolve(self, workbook, row, col):
table, worksheet = workbook.get_table(self.__table)
top, left = worksheet.get_table_pos(table.name)
left_col_offset = table.get_column_offset(self.__left_col)
right_col_offset = table.get_column_offset(self.__right_col)
if self.__top is None:
top_row_offset = 0 if self.__include_header else table.header_height
else:
top_row_offset = table.get_row_offset(self.__top)
if self.__bottom is None:
bottom_row_offset = table.height - 1
else:
bottom_row_offset = table.get_row_offset(self.__bottom)
return "'%s'!%s:%s" % (
worksheet.name,
_to_addr(None, top + top_row_offset, left + left_col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed),
_to_addr(None, top + bottom_row_offset, left + right_col_offset,
row_fixed=self.__row_fixed,
col_fixed=self.__col_fixed))
class Formula(Expression):
def __init__(self, name, *args, **kwargs):
super(Formula, self).__init__(**kwargs)
self.__name = name
self.__args = args
def resolve(self, workbook, row, col):
def to_arg(x):
if x is None:
return ""
return self._strip(_make_expr(x).resolve(workbook, row, col))
args = [to_arg(x) for x in self.__args]
return "%s(%s)" % (self.__name, ",".join(args))
class ArrayExpression(Expression):
def __init__(self, expr):
Expression.__init__(self, expr)
self.__expr = expr
def resolve(self, workbook, row, col):
return self.__expr.resolve(workbook, row, col)
def get_formula(self, workbook, row, col):
return "{%s}" % self.__expr.get_formula(workbook, row, col).strip("{}")
class BinOp(Expression):
__operators = {
"+": operator.add,
"-": operator.sub,
"*": operator.mul,
"/": operator.truediv,
">": operator.gt,
"<": operator.lt,
"<=": operator.le,
">=": operator.ge,
"!=": operator.ne,
"=": operator.eq,
"&": operator.and_,
"|": operator.or_,
}
def __init__(self, lhs, rhs, op, **kwargs):
super(BinOp, self).__init__(**kwargs)
self.__lhs = lhs
self.__rhs = rhs
self.__op = op
if lhs.has_value and rhs.has_value:
self.value = self.__operators[op](lhs.value, rhs.value)
def resolve(self, workbook, row, col):
return "(%s%s%s)" % (
self.__lhs.resolve(workbook, row, col),
self.__op,
self.__rhs.resolve(workbook, row, col))
class ConstExpr(Expression):
def __init__(self, value, **kwargs):
super(ConstExpr, self).__init__(**kwargs)
self.value = value
self.__value = value
def resolve(self, workbook, row, col):
if isinstance(self.__value, str):
return '"%s"' % self.__value
if isinstance(self.__value, bool):
return "TRUE" if self.__value else "FALSE"
return str(self.__value)
def _to_addr(worksheet, row, col, row_fixed=False, col_fixed=False):
addr = ""
A = ord('A')
col += 1
while col > 0:
addr = chr(A + ((col - 1) % 26)) + addr
col = (col - 1) // 26
prefix = ("'%s'!" % worksheet) if worksheet else ""
col_modifier = "$" if col_fixed else ""
row_modifier = "$" if row_fixed else ""
return prefix + "%s%s%s%d" % (col_modifier, addr, row_modifier, row+1)
def _make_expr(x):
if isinstance(x, Expression):
return x
return ConstExpr(x)
| true | true |
f71f4dd1d3b032910ffb279d50397befdfd25e03 | 4,066 | py | Python | benchmark/startQiskit_noisy2042.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_noisy2042.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_noisy2042.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=36
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3]) # number=13
prog.cx(input_qubit[0],input_qubit[3]) # number=17
prog.x(input_qubit[3]) # number=18
prog.cx(input_qubit[0],input_qubit[3]) # number=19
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.cx(input_qubit[0],input_qubit[3]) # number=27
prog.x(input_qubit[3]) # number=28
prog.h(input_qubit[3]) # number=30
prog.cz(input_qubit[0],input_qubit[3]) # number=31
prog.h(input_qubit[3]) # number=32
prog.cx(input_qubit[3],input_qubit[0]) # number=20
prog.h(input_qubit[0]) # number=33
prog.cz(input_qubit[3],input_qubit[0]) # number=34
prog.h(input_qubit[0]) # number=35
prog.z(input_qubit[3]) # number=24
prog.cx(input_qubit[3],input_qubit[0]) # number=25
prog.cx(input_qubit[3],input_qubit[0]) # number=22
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy2042.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 34.457627 | 140 | 0.65396 |
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.y(input_qubit[3])
prog.h(input_qubit[0])
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.cx(input_qubit[3],input_qubit[0])
prog.h(input_qubit[0])
prog.cz(input_qubit[3],input_qubit[0])
prog.h(input_qubit[0])
prog.z(input_qubit[3])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.h(input_qubit[3])
prog.h(input_qubit[0])
prog.y(input_qubit[2])
prog.y(input_qubit[2])
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy2042.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| true | true |
f71f4e31ae0227fac2aaa54777be8a7905234464 | 4,056 | py | Python | code/edgesense copy/python/tutorial.py | albertocottica/microfoundations-community-management | d18e902a431213ed8a464ce92424e9a078f8f9e6 | [
"MIT"
] | 2 | 2020-04-08T20:47:42.000Z | 2020-08-24T08:29:42.000Z | code/edgesense copy/python/tutorial.py | albertocottica/microfoundations-community-management | d18e902a431213ed8a464ce92424e9a078f8f9e6 | [
"MIT"
] | null | null | null | code/edgesense copy/python/tutorial.py | albertocottica/microfoundations-community-management | d18e902a431213ed8a464ce92424e9a078f8f9e6 | [
"MIT"
] | 1 | 2020-05-10T15:06:24.000Z | 2020-05-10T15:06:24.000Z | # This program rearranges raw Egderyders data and builds two lists of dicts, userlist and ciommentslist, containing
# of the data needed to buildm graphs. These objects are then saved into files.
import os, sys
import json
import csv
from datetime import datetime
import time
import logging
import re
from edgesense.utils.logger_initializer import initialize_logger
from edgesense.utils.resource import mkdir
def parse_options(argv):
import getopt
basepath = '.'
timestamp = datetime.now()
tag = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
filename = tag+".csv"
try:
opts, args = getopt.getopt(argv,"s:f:",["source=","file="])
except getopt.GetoptError:
print 'tutorial.py -s <source dir> -f <output filename>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'tutorial.py -s <source dir> -f <output filename>'
sys.exit()
elif opt in ("-s", "--source"):
basepath = arg
elif opt in ("-f", "--filename"):
filename = arg
destination_path = os.path.abspath(os.path.join(basepath, "output"))
mkdir(destination_path)
outuput_filename = os.path.join(destination_path, filename)
source_path = os.path.abspath(basepath)
logging.info("parsing files %(s)s to %(f)s" % {'s': source_path, 'f': outuput_filename})
return (source_path,outuput_filename)
def main(argv):
initialize_logger('./log')
source_path, outuput_filename = parse_options(argv)
logging.info("Tutorial result processing - started")
all_files = [ f for f in os.listdir(source_path) if os.path.isfile(os.path.join(source_path,f)) ]
runs = {}
timestamp = datetime.now()
base_run_id = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
fake_run_id = 1
for filename in all_files:
logging.info("Tutorial result processing - loading:"+os.path.join(source_path,filename))
f = open(os.path.join(source_path,filename), 'r')
try:
parsed = json.load(f)
if parsed.has_key('run_id'):
run_id = parsed['run_id']
else:
run_id = base_run_id+'--'+str(fake_run_id)
fake_run_id += 1
if not runs.has_key(run_id):
runs[run_id] = {}
run_obj = runs[run_id]
run_obj['run_id'] = run_id
if parsed.has_key('base'):
run_obj['base'] = parsed['base']
m = re.search('(\d\d\d\d)-(\d\d)-(\d\d)-\d\d-\d\d-\d\d$', parsed['base'])
if m:
run_obj['date'] = m.group(1)+"-"+m.group(2)+"-"+m.group(3)
if parsed.has_key('comments'):
run_obj['comments'] = parsed['comments'].encode('utf-8').strip()
# collect the tutorial answer results
if parsed.has_key('answers'):
for a in parsed['answers']:
run_obj[a['step']] = a['success']
# collect the tutorial survey results
if parsed.has_key('surveys'):
for a in parsed['surveys']:
run_obj[a['step']] = a['value']
except:
logging.info("Tutorial result processing - error parsing:"+os.path.join(source_path,filename))
# save the runs to a CSV file
logging.info("Tutorial result processing - Writing:"+outuput_filename)
headers = [ 'run_id','base', 'date', \
'betweenness_bin', 'relationship_percentage', \
'posts_percentage', 'comments_share', \
'modularity_increase', 'survey-1', \
'survey-2', 'survey-3', 'survey-4', \
'survey-5', 'comments']
with open(outuput_filename, 'wb') as f:
w = csv.DictWriter(f, headers)
w.writeheader()
w.writerows(runs.values())
logging.info("Tutorial result processing - Completed")
if __name__ == "__main__":
main(sys.argv[1:])
| 35.578947 | 116 | 0.568787 |
import os, sys
import json
import csv
from datetime import datetime
import time
import logging
import re
from edgesense.utils.logger_initializer import initialize_logger
from edgesense.utils.resource import mkdir
def parse_options(argv):
import getopt
basepath = '.'
timestamp = datetime.now()
tag = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
filename = tag+".csv"
try:
opts, args = getopt.getopt(argv,"s:f:",["source=","file="])
except getopt.GetoptError:
print 'tutorial.py -s <source dir> -f <output filename>'
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print 'tutorial.py -s <source dir> -f <output filename>'
sys.exit()
elif opt in ("-s", "--source"):
basepath = arg
elif opt in ("-f", "--filename"):
filename = arg
destination_path = os.path.abspath(os.path.join(basepath, "output"))
mkdir(destination_path)
outuput_filename = os.path.join(destination_path, filename)
source_path = os.path.abspath(basepath)
logging.info("parsing files %(s)s to %(f)s" % {'s': source_path, 'f': outuput_filename})
return (source_path,outuput_filename)
def main(argv):
initialize_logger('./log')
source_path, outuput_filename = parse_options(argv)
logging.info("Tutorial result processing - started")
all_files = [ f for f in os.listdir(source_path) if os.path.isfile(os.path.join(source_path,f)) ]
runs = {}
timestamp = datetime.now()
base_run_id = timestamp.strftime('%Y-%m-%d-%H-%M-%S')
fake_run_id = 1
for filename in all_files:
logging.info("Tutorial result processing - loading:"+os.path.join(source_path,filename))
f = open(os.path.join(source_path,filename), 'r')
try:
parsed = json.load(f)
if parsed.has_key('run_id'):
run_id = parsed['run_id']
else:
run_id = base_run_id+'--'+str(fake_run_id)
fake_run_id += 1
if not runs.has_key(run_id):
runs[run_id] = {}
run_obj = runs[run_id]
run_obj['run_id'] = run_id
if parsed.has_key('base'):
run_obj['base'] = parsed['base']
m = re.search('(\d\d\d\d)-(\d\d)-(\d\d)-\d\d-\d\d-\d\d$', parsed['base'])
if m:
run_obj['date'] = m.group(1)+"-"+m.group(2)+"-"+m.group(3)
if parsed.has_key('comments'):
run_obj['comments'] = parsed['comments'].encode('utf-8').strip()
if parsed.has_key('answers'):
for a in parsed['answers']:
run_obj[a['step']] = a['success']
if parsed.has_key('surveys'):
for a in parsed['surveys']:
run_obj[a['step']] = a['value']
except:
logging.info("Tutorial result processing - error parsing:"+os.path.join(source_path,filename))
logging.info("Tutorial result processing - Writing:"+outuput_filename)
headers = [ 'run_id','base', 'date', \
'betweenness_bin', 'relationship_percentage', \
'posts_percentage', 'comments_share', \
'modularity_increase', 'survey-1', \
'survey-2', 'survey-3', 'survey-4', \
'survey-5', 'comments']
with open(outuput_filename, 'wb') as f:
w = csv.DictWriter(f, headers)
w.writeheader()
w.writerows(runs.values())
logging.info("Tutorial result processing - Completed")
if __name__ == "__main__":
main(sys.argv[1:])
| false | true |
f71f513fecec1a24f3bc3562ef0e4939b4598d59 | 604 | py | Python | discord_styler/__init__.py | miaowware/discord-styled-text | 9e02375b0ba947628bf7a7c853efc433f74d9373 | [
"BSD-3-Clause"
] | 1 | 2022-01-23T23:26:53.000Z | 2022-01-23T23:26:53.000Z | discord_styler/__init__.py | miaowware/discord-styled-text | 9e02375b0ba947628bf7a7c853efc433f74d9373 | [
"BSD-3-Clause"
] | 3 | 2021-08-28T01:46:36.000Z | 2021-09-07T02:59:03.000Z | discord_styler/__init__.py | miaowware/discord-styled-text | 9e02375b0ba947628bf7a7c853efc433f74d9373 | [
"BSD-3-Clause"
] | null | null | null | """
discord-styled-text
---
A small library to style text for Discord without having to remember any syntax
Copyright 2021 classabbyamp, 0x5c
Released under the terms of the BSD 3-Clause license.
"""
from .__info__ import __version__
from .styler import StyledText, Italic, Bold, Underline, Strikethrough, InlineCode, Spoiler, BlockQuote
from .styler import CodeBlock
from .styler import TitledURL, NonEmbeddingURL
from .styler import MentionABC, UserMention, RoleMention, ChannelMention
from .styler import TimeStyle, TimeStamp
from .escape import escape_markdown, escape_mentions, escape_everything
| 33.555556 | 103 | 0.816225 |
from .__info__ import __version__
from .styler import StyledText, Italic, Bold, Underline, Strikethrough, InlineCode, Spoiler, BlockQuote
from .styler import CodeBlock
from .styler import TitledURL, NonEmbeddingURL
from .styler import MentionABC, UserMention, RoleMention, ChannelMention
from .styler import TimeStyle, TimeStamp
from .escape import escape_markdown, escape_mentions, escape_everything
| true | true |
f71f51b989d608434d95424eaab6a007063a211a | 27,424 | py | Python | Multiagent/pacman.py | zengziyunthomas/Artifical-Intelligence | 4862a65bc8743e89b3c92d94eeca973f8b1851f3 | [
"MIT"
] | 1 | 2022-01-07T08:03:55.000Z | 2022-01-07T08:03:55.000Z | Multiagent/pacman.py | zengziyunthomas/Artifical-Intelligence | 4862a65bc8743e89b3c92d94eeca973f8b1851f3 | [
"MIT"
] | null | null | null | Multiagent/pacman.py | zengziyunthomas/Artifical-Intelligence | 4862a65bc8743e89b3c92d94eeca973f8b1851f3 | [
"MIT"
] | null | null | null | # pacman.py
# ---------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel (pabbeel@cs.berkeley.edu).
"""
Pacman.py holds the logic for the classic pacman game along with the main
code to run a game. This file is divided into three sections:
(i) Your interface to the pacman world:
Pacman is a complex environment. You probably don't want to
read through all of the code we wrote to make the game runs
correctly. This section contains the parts of the code
that you will need to understand in order to complete the
project. There is also some code in game.py that you should
understand.
(ii) The hidden secrets of pacman:
This section contains all of the logic code that the pacman
environment uses to decide who can move where, who dies when
things collide, etc. You shouldn't need to read this section
of code, but you can if you want.
(iii) Framework to start a game:
The final section contains the code for reading the command
you use to set up the game, then starting up a new game, along with
linking in all the external parts (agent functions, graphics).
Check this section out to see all the options available to you.
To play your first game, type 'python pacman.py' from the command line.
The keys are 'a', 's', 'd', and 'w' to move (or arrow keys). Have fun!
"""
from game import GameStateData
from game import Game
from game import Directions
from game import Actions
from util import nearestPoint
from util import manhattanDistance
import util
import layout
import sys
import types
import time
import random
import os
###################################################
# YOUR INTERFACE TO THE PACMAN WORLD: A GameState #
###################################################
class GameState:
"""
A GameState specifies the full game state, including the food, capsules,
agent configurations and score changes.
GameStates are used by the Game object to capture the actual state of the game and
can be used by agents to reason about the game.
Much of the information in a GameState is stored in a GameStateData object. We
strongly suggest that you access that data via the accessor methods below rather
than referring to the GameStateData object directly.
Note that in classic Pacman, Pacman is always agent 0.
"""
####################################################
# Accessor methods: use these to access state data #
####################################################
# static variable keeps track of which states have had getLegalActions called
explored = set()
def getAndResetExplored():
tmp = GameState.explored.copy()
GameState.explored = set()
return tmp
getAndResetExplored = staticmethod(getAndResetExplored)
def getLegalActions(self, agentIndex=0):
"""
Returns the legal actions for the agent specified.
"""
# GameState.explored.add(self)
if self.isWin() or self.isLose():
return []
if agentIndex == 0: # Pacman is moving
return PacmanRules.getLegalActions(self)
else:
return GhostRules.getLegalActions(self, agentIndex)
def getNextState(self, agentIndex, action):
"""
Returns the child state after the specified agent takes the action.
"""
# Check that children exist
if self.isWin() or self.isLose():
raise Exception('Can\'t generate a child of a terminal state.')
# Copy current state
state = GameState(self)
# Let agent's logic deal with its action's effects on the board
if agentIndex == 0: # Pacman is moving
state.data._eaten = [False for i in range(state.getNumAgents())]
PacmanRules.applyAction(state, action)
else: # A ghost is moving
GhostRules.applyAction(state, action, agentIndex)
# Time passes
if agentIndex == 0:
state.data.scoreChange += -TIME_PENALTY # Penalty for waiting around
else:
GhostRules.decrementTimer(state.data.agentStates[agentIndex])
# Resolve multi-agent effects
GhostRules.checkDeath(state, agentIndex)
# Book keeping
state.data._agentMoved = agentIndex
state.data.score += state.data.scoreChange
GameState.explored.add(self)
GameState.explored.add(state)
return state
def getLegalPacmanActions(self):
return self.getLegalActions(0)
def getPacmanNextState(self, action):
"""
Generates the child state after the specified pacman move
"""
return self.getNextState(0, action)
def getPacmanState(self):
"""
Returns an AgentState object for pacman (in game.py)
state.pos gives the current position
state.direction gives the travel vector
"""
return self.data.agentStates[0].copy()
def getPacmanPosition(self):
return self.data.agentStates[0].getPosition()
def getGhostStates(self):
return self.data.agentStates[1:]
def getGhostState(self, agentIndex):
if agentIndex == 0 or agentIndex >= self.getNumAgents():
raise Exception("Invalid index passed to getGhostState")
return self.data.agentStates[agentIndex]
def getGhostPosition(self, agentIndex):
if agentIndex == 0:
raise Exception("Pacman's index passed to getGhostPosition")
return self.data.agentStates[agentIndex].getPosition()
def getGhostPositions(self):
return [s.getPosition() for s in self.getGhostStates()]
def getNumAgents(self):
return len(self.data.agentStates)
def getScore(self):
return float(self.data.score)
def getCapsules(self):
"""
Returns a list of positions (x,y) of the remaining capsules.
"""
return self.data.capsules
def getNumFood(self):
return self.data.food.count()
def getFood(self):
"""
Returns a Grid of boolean food indicator variables.
Grids can be accessed via list notation, so to check
if there is food at (x,y), just call
currentFood = state.getFood()
if currentFood[x][y] == True: ...
"""
return self.data.food
def getWalls(self):
"""
Returns a Grid of boolean wall indicator variables.
Grids can be accessed via list notation, so to check
if there is a wall at (x,y), just call
walls = state.getWalls()
if walls[x][y] == True: ...
"""
return self.data.layout.walls
def hasFood(self, x, y):
return self.data.food[x][y]
def hasWall(self, x, y):
return self.data.layout.walls[x][y]
def isLose(self):
return self.data._lose
def isWin(self):
return self.data._win
#############################################
# Helper methods: #
# You shouldn't need to call these directly #
#############################################
def __init__(self, prevState=None):
"""
Generates a new state by copying information from its predecessor.
"""
if prevState != None: # Initial state
self.data = GameStateData(prevState.data)
else:
self.data = GameStateData()
def deepCopy(self):
state = GameState(self)
state.data = self.data.deepCopy()
return state
def __eq__(self, other):
"""
Allows two states to be compared.
"""
return hasattr(other, 'data') and self.data == other.data
def __hash__(self):
"""
Allows states to be keys of dictionaries.
"""
return hash(self.data)
def __str__(self):
return str(self.data)
def initialize(self, layout, numGhostAgents=1000):
"""
Creates an initial game state from a layout array (see layout.py).
"""
self.data.initialize(layout, numGhostAgents)
############################################################################
# THE HIDDEN SECRETS OF PACMAN #
# #
# You shouldn't need to look through the code in this section of the file. #
############################################################################
SCARED_TIME = 40 # Moves ghosts are scared
COLLISION_TOLERANCE = 0.7 # How close ghosts must be to Pacman to kill
TIME_PENALTY = 1 # Number of points lost each round
class ClassicGameRules:
"""
These game rules manage the control flow of a game, deciding when
and how the game starts and ends.
"""
def __init__(self, timeout=30):
self.timeout = timeout
def newGame(self, layout, pacmanAgent, ghostAgents, display, quiet=False, catchExceptions=False):
agents = [pacmanAgent] + ghostAgents[:layout.getNumGhosts()]
initState = GameState()
initState.initialize(layout, len(ghostAgents))
game = Game(agents, display, self, catchExceptions=catchExceptions)
game.state = initState
self.initialState = initState.deepCopy()
self.quiet = quiet
return game
def process(self, state, game):
"""
Checks to see whether it is time to end the game.
"""
if state.isWin():
self.win(state, game)
if state.isLose():
self.lose(state, game)
def win(self, state, game):
if not self.quiet:
print("Pacman emerges victorious! Score: %d" % state.data.score)
game.gameOver = True
def lose(self, state, game):
if not self.quiet:
print("Pacman died! Score: %d" % state.data.score)
game.gameOver = True
def getProgress(self, game):
return float(game.state.getNumFood()) / self.initialState.getNumFood()
def agentCrash(self, game, agentIndex):
if agentIndex == 0:
print("Pacman crashed")
else:
print("A ghost crashed")
def getMaxTotalTime(self, agentIndex):
return self.timeout
def getMaxStartupTime(self, agentIndex):
return self.timeout
def getMoveWarningTime(self, agentIndex):
return self.timeout
def getMoveTimeout(self, agentIndex):
return self.timeout
def getMaxTimeWarnings(self, agentIndex):
return 0
class PacmanRules:
"""
These functions govern how pacman interacts with his environment under
the classic game rules.
"""
PACMAN_SPEED = 1
def getLegalActions(state):
"""
Returns a list of possible actions.
"""
return Actions.getPossibleActions(state.getPacmanState().configuration, state.data.layout.walls)
getLegalActions = staticmethod(getLegalActions)
def applyAction(state, action):
"""
Edits the state to reflect the results of the action.
"""
legal = PacmanRules.getLegalActions(state)
if action not in legal:
raise Exception("Illegal action " + str(action))
pacmanState = state.data.agentStates[0]
# Update Configuration
vector = Actions.directionToVector(action, PacmanRules.PACMAN_SPEED)
pacmanState.configuration = pacmanState.configuration.getNextState(
vector)
# Eat
next = pacmanState.configuration.getPosition()
nearest = nearestPoint(next)
if manhattanDistance(nearest, next) <= 0.5:
# Remove food
PacmanRules.consume(nearest, state)
applyAction = staticmethod(applyAction)
def consume(position, state):
x, y = position
# Eat food
if state.data.food[x][y]:
state.data.scoreChange += 10
state.data.food = state.data.food.copy()
state.data.food[x][y] = False
state.data._foodEaten = position
# TODO: cache numFood?
numFood = state.getNumFood()
if numFood == 0 and not state.data._lose:
state.data.scoreChange += 500
state.data._win = True
# Eat capsule
if(position in state.getCapsules()):
state.data.capsules.remove(position)
state.data._capsuleEaten = position
# Reset all ghosts' scared timers
for index in range(1, len(state.data.agentStates)):
state.data.agentStates[index].scaredTimer = SCARED_TIME
consume = staticmethod(consume)
class GhostRules:
"""
These functions dictate how ghosts interact with their environment.
"""
GHOST_SPEED = 1.0
def getLegalActions(state, ghostIndex):
"""
Ghosts cannot stop, and cannot turn around unless they
reach a dead end, but can turn 90 degrees at intersections.
"""
conf = state.getGhostState(ghostIndex).configuration
possibleActions = Actions.getPossibleActions(
conf, state.data.layout.walls)
reverse = Actions.reverseDirection(conf.direction)
if Directions.STOP in possibleActions:
possibleActions.remove(Directions.STOP)
if reverse in possibleActions and len(possibleActions) > 1:
possibleActions.remove(reverse)
return possibleActions
getLegalActions = staticmethod(getLegalActions)
def applyAction(state, action, ghostIndex):
legal = GhostRules.getLegalActions(state, ghostIndex)
if action not in legal:
raise Exception("Illegal ghost action " + str(action))
ghostState = state.data.agentStates[ghostIndex]
speed = GhostRules.GHOST_SPEED
if ghostState.scaredTimer > 0:
speed /= 2.0
vector = Actions.directionToVector(action, speed)
ghostState.configuration = ghostState.configuration.getNextState(
vector)
applyAction = staticmethod(applyAction)
def decrementTimer(ghostState):
timer = ghostState.scaredTimer
if timer == 1:
ghostState.configuration.pos = nearestPoint(
ghostState.configuration.pos)
ghostState.scaredTimer = max(0, timer - 1)
decrementTimer = staticmethod(decrementTimer)
def checkDeath(state, agentIndex):
pacmanPosition = state.getPacmanPosition()
if agentIndex == 0: # Pacman just moved; Anyone can kill him
for index in range(1, len(state.data.agentStates)):
ghostState = state.data.agentStates[index]
ghostPosition = ghostState.configuration.getPosition()
if GhostRules.canKill(pacmanPosition, ghostPosition):
GhostRules.collide(state, ghostState, index)
else:
ghostState = state.data.agentStates[agentIndex]
ghostPosition = ghostState.configuration.getPosition()
if GhostRules.canKill(pacmanPosition, ghostPosition):
GhostRules.collide(state, ghostState, agentIndex)
checkDeath = staticmethod(checkDeath)
def collide(state, ghostState, agentIndex):
if ghostState.scaredTimer > 0:
state.data.scoreChange += 200
GhostRules.placeGhost(state, ghostState)
ghostState.scaredTimer = 0
# Added for first-person
state.data._eaten[agentIndex] = True
else:
if not state.data._win:
state.data.scoreChange -= 500
state.data._lose = True
collide = staticmethod(collide)
def canKill(pacmanPosition, ghostPosition):
return manhattanDistance(ghostPosition, pacmanPosition) <= COLLISION_TOLERANCE
canKill = staticmethod(canKill)
def placeGhost(state, ghostState):
ghostState.configuration = ghostState.start
placeGhost = staticmethod(placeGhost)
#############################
# FRAMEWORK TO START A GAME #
#############################
def default(str):
return str + ' [Default: %default]'
def parseAgentArgs(str):
if str == None:
return {}
pieces = str.split(',')
opts = {}
for p in pieces:
if '=' in p:
key, val = p.split('=')
else:
key, val = p, 1
opts[key] = val
return opts
def readCommand(argv):
"""
Processes the command used to run pacman from the command line.
"""
from optparse import OptionParser
usageStr = """
USAGE: python pacman.py <options>
EXAMPLES: (1) python pacman.py
- starts an interactive game
(2) python pacman.py --layout smallClassic --zoom 2
OR python pacman.py -l smallClassic -z 2
- starts an interactive game on a smaller board, zoomed in
"""
parser = OptionParser(usageStr)
parser.add_option('-n', '--numGames', dest='numGames', type='int',
help=default('the number of GAMES to play'), metavar='GAMES', default=1)
parser.add_option('-l', '--layout', dest='layout',
help=default(
'the LAYOUT_FILE from which to load the map layout'),
metavar='LAYOUT_FILE', default='mediumClassic')
parser.add_option('-p', '--pacman', dest='pacman',
help=default(
'the agent TYPE in the pacmanAgents module to use'),
metavar='TYPE', default='KeyboardAgent')
parser.add_option('-t', '--textGraphics', action='store_true', dest='textGraphics',
help='Display output as text only', default=False)
parser.add_option('-q', '--quietTextGraphics', action='store_true', dest='quietGraphics',
help='Generate minimal output and no graphics', default=False)
parser.add_option('-g', '--ghosts', dest='ghost',
help=default(
'the ghost agent TYPE in the ghostAgents module to use'),
metavar='TYPE', default='RandomGhost')
parser.add_option('-k', '--numghosts', type='int', dest='numGhosts',
help=default('The maximum number of ghosts to use'), default=4)
parser.add_option('-z', '--zoom', type='float', dest='zoom',
help=default('Zoom the size of the graphics window'), default=1.0)
parser.add_option('-f', '--fixRandomSeed', action='store_true', dest='fixRandomSeed',
help='Fixes the random seed to always play the same game', default=False)
parser.add_option('-r', '--recordActions', action='store_true', dest='record',
help='Writes game histories to a file (named by the time they were played)', default=False)
parser.add_option('--replay', dest='gameToReplay',
help='A recorded game file (pickle) to replay', default=None)
parser.add_option('-a', '--agentArgs', dest='agentArgs',
help='Comma separated values sent to agent. e.g. "opt1=val1,opt2,opt3=val3"')
parser.add_option('-x', '--numTraining', dest='numTraining', type='int',
help=default('How many episodes are training (suppresses output)'), default=0)
parser.add_option('--frameTime', dest='frameTime', type='float',
help=default('Time to delay between frames; <0 means keyboard'), default=0.1)
parser.add_option('-c', '--catchExceptions', action='store_true', dest='catchExceptions',
help='Turns on exception handling and timeouts during games', default=False)
parser.add_option('--timeout', dest='timeout', type='int',
help=default('Maximum length of time an agent can spend computing in a single game'), default=30)
options, otherjunk = parser.parse_args(argv)
if len(otherjunk) != 0:
raise Exception('Command line input not understood: ' + str(otherjunk))
args = dict()
# Fix the random seed
if options.fixRandomSeed:
random.seed('cs188')
# Choose a layout
args['layout'] = layout.getLayout(options.layout)
if args['layout'] == None:
raise Exception("The layout " + options.layout + " cannot be found")
# Choose a Pacman agent
noKeyboard = options.gameToReplay == None and (
options.textGraphics or options.quietGraphics)
pacmanType = loadAgent(options.pacman, noKeyboard)
agentOpts = parseAgentArgs(options.agentArgs)
if options.numTraining > 0:
args['numTraining'] = options.numTraining
if 'numTraining' not in agentOpts:
agentOpts['numTraining'] = options.numTraining
pacman = pacmanType(**agentOpts) # Instantiate Pacman with agentArgs
args['pacman'] = pacman
# Don't display training games
if 'numTrain' in agentOpts:
options.numQuiet = int(agentOpts['numTrain'])
options.numIgnore = int(agentOpts['numTrain'])
# Choose a ghost agent
ghostType = loadAgent(options.ghost, noKeyboard)
args['ghosts'] = [ghostType(i+1) for i in range(options.numGhosts)]
# Choose a display format
if options.quietGraphics:
import textDisplay
args['display'] = textDisplay.NullGraphics()
elif options.textGraphics:
import textDisplay
textDisplay.SLEEP_TIME = options.frameTime
args['display'] = textDisplay.PacmanGraphics()
else:
import graphicsDisplay
args['display'] = graphicsDisplay.PacmanGraphics(
options.zoom, frameTime=options.frameTime)
args['numGames'] = options.numGames
args['record'] = options.record
args['catchExceptions'] = options.catchExceptions
args['timeout'] = options.timeout
# Special case: recorded games don't use the runGames method or args structure
if options.gameToReplay != None:
print('Replaying recorded game %s.' % options.gameToReplay)
import pickle
f = open(options.gameToReplay)
try:
recorded = pickle.load(f)
finally:
f.close()
recorded['display'] = args['display']
replayGame(**recorded)
sys.exit(0)
return args
def loadAgent(pacman, nographics):
# Looks through all pythonPath Directories for the right module,
pythonPathStr = os.path.expandvars("$PYTHONPATH")
if pythonPathStr.find(';') == -1:
pythonPathDirs = pythonPathStr.split(':')
else:
pythonPathDirs = pythonPathStr.split(';')
pythonPathDirs.append('.')
for moduleDir in pythonPathDirs:
if not os.path.isdir(moduleDir):
continue
moduleNames = [f for f in os.listdir(
moduleDir) if f.endswith('gents.py')]
for modulename in moduleNames:
try:
module = __import__(modulename[:-3])
except ImportError:
continue
if pacman in dir(module):
if nographics and modulename == 'keyboardAgents.py':
raise Exception(
'Using the keyboard requires graphics (not text display)')
return getattr(module, pacman)
raise Exception('The agent ' + pacman +
' is not specified in any *Agents.py.')
def replayGame(layout, actions, display):
import pacmanAgents
import ghostAgents
rules = ClassicGameRules()
agents = [pacmanAgents.GreedyAgent()] + [ghostAgents.RandomGhost(i+1)
for i in range(layout.getNumGhosts())]
game = rules.newGame(layout, agents[0], agents[1:], display)
state = game.state
display.initialize(state.data)
for action in actions:
# Execute the action
state = state.getNextState(*action)
# Change the display
display.update(state.data)
# Allow for game specific conditions (winning, losing, etc.)
rules.process(state, game)
display.finish()
def runGames(layout, pacman, ghosts, display, numGames, record, numTraining=0, catchExceptions=False, timeout=30):
import __main__
__main__.__dict__['_display'] = display
rules = ClassicGameRules(timeout)
games = []
for i in range(numGames):
beQuiet = i < numTraining
if beQuiet:
# Suppress output and graphics
import textDisplay
gameDisplay = textDisplay.NullGraphics()
rules.quiet = True
else:
gameDisplay = display
rules.quiet = False
game = rules.newGame(layout, pacman, ghosts,
gameDisplay, beQuiet, catchExceptions)
game.run()
if not beQuiet:
games.append(game)
if record:
import time
import pickle
fname = ('recorded-game-%d' % (i + 1)) + \
'-'.join([str(t) for t in time.localtime()[1:6]])
f = file(fname, 'w')
components = {'layout': layout, 'actions': game.moveHistory}
pickle.dump(components, f)
f.close()
if (numGames-numTraining) > 0:
scores = [game.state.getScore() for game in games]
wins = [game.state.isWin() for game in games]
winRate = wins.count(True) / float(len(wins))
print('Average Score:', sum(scores) / float(len(scores)))
print('Scores: ', ', '.join([str(score) for score in scores]))
print('Win Rate: %d/%d (%.2f)' %
(wins.count(True), len(wins), winRate))
print('Record: ', ', '.join(
[['Loss', 'Win'][int(w)] for w in wins]))
return games
if __name__ == '__main__':
"""
The main function called when pacman.py is run
from the command line:
> python pacman.py
See the usage string for more details.
> python pacman.py --help
"""
args = readCommand(sys.argv[1:]) # Get game components based on input
runGames(**args)
# import cProfile
# cProfile.run("runGames( **args )")
pass
| 37.109608 | 120 | 0.592656 |
from game import GameStateData
from game import Game
from game import Directions
from game import Actions
from util import nearestPoint
from util import manhattanDistance
import util
import layout
import sys
import types
import time
import random
import os
imer = staticmethod(decrementTimer)
def checkDeath(state, agentIndex):
pacmanPosition = state.getPacmanPosition()
if agentIndex == 0: # Pacman just moved; Anyone can kill him
for index in range(1, len(state.data.agentStates)):
ghostState = state.data.agentStates[index]
ghostPosition = ghostState.configuration.getPosition()
if GhostRules.canKill(pacmanPosition, ghostPosition):
GhostRules.collide(state, ghostState, index)
else:
ghostState = state.data.agentStates[agentIndex]
ghostPosition = ghostState.configuration.getPosition()
if GhostRules.canKill(pacmanPosition, ghostPosition):
GhostRules.collide(state, ghostState, agentIndex)
checkDeath = staticmethod(checkDeath)
def collide(state, ghostState, agentIndex):
if ghostState.scaredTimer > 0:
state.data.scoreChange += 200
GhostRules.placeGhost(state, ghostState)
ghostState.scaredTimer = 0
# Added for first-person
state.data._eaten[agentIndex] = True
else:
if not state.data._win:
state.data.scoreChange -= 500
state.data._lose = True
collide = staticmethod(collide)
def canKill(pacmanPosition, ghostPosition):
return manhattanDistance(ghostPosition, pacmanPosition) <= COLLISION_TOLERANCE
canKill = staticmethod(canKill)
def placeGhost(state, ghostState):
ghostState.configuration = ghostState.start
placeGhost = staticmethod(placeGhost)
#############################
# FRAMEWORK TO START A GAME #
#############################
def default(str):
return str + ' [Default: %default]'
def parseAgentArgs(str):
if str == None:
return {}
pieces = str.split(',')
opts = {}
for p in pieces:
if '=' in p:
key, val = p.split('=')
else:
key, val = p, 1
opts[key] = val
return opts
def readCommand(argv):
from optparse import OptionParser
usageStr = """
USAGE: python pacman.py <options>
EXAMPLES: (1) python pacman.py
- starts an interactive game
(2) python pacman.py --layout smallClassic --zoom 2
OR python pacman.py -l smallClassic -z 2
- starts an interactive game on a smaller board, zoomed in
"""
parser = OptionParser(usageStr)
parser.add_option('-n', '--numGames', dest='numGames', type='int',
help=default('the number of GAMES to play'), metavar='GAMES', default=1)
parser.add_option('-l', '--layout', dest='layout',
help=default(
'the LAYOUT_FILE from which to load the map layout'),
metavar='LAYOUT_FILE', default='mediumClassic')
parser.add_option('-p', '--pacman', dest='pacman',
help=default(
'the agent TYPE in the pacmanAgents module to use'),
metavar='TYPE', default='KeyboardAgent')
parser.add_option('-t', '--textGraphics', action='store_true', dest='textGraphics',
help='Display output as text only', default=False)
parser.add_option('-q', '--quietTextGraphics', action='store_true', dest='quietGraphics',
help='Generate minimal output and no graphics', default=False)
parser.add_option('-g', '--ghosts', dest='ghost',
help=default(
'the ghost agent TYPE in the ghostAgents module to use'),
metavar='TYPE', default='RandomGhost')
parser.add_option('-k', '--numghosts', type='int', dest='numGhosts',
help=default('The maximum number of ghosts to use'), default=4)
parser.add_option('-z', '--zoom', type='float', dest='zoom',
help=default('Zoom the size of the graphics window'), default=1.0)
parser.add_option('-f', '--fixRandomSeed', action='store_true', dest='fixRandomSeed',
help='Fixes the random seed to always play the same game', default=False)
parser.add_option('-r', '--recordActions', action='store_true', dest='record',
help='Writes game histories to a file (named by the time they were played)', default=False)
parser.add_option('--replay', dest='gameToReplay',
help='A recorded game file (pickle) to replay', default=None)
parser.add_option('-a', '--agentArgs', dest='agentArgs',
help='Comma separated values sent to agent. e.g. "opt1=val1,opt2,opt3=val3"')
parser.add_option('-x', '--numTraining', dest='numTraining', type='int',
help=default('How many episodes are training (suppresses output)'), default=0)
parser.add_option('--frameTime', dest='frameTime', type='float',
help=default('Time to delay between frames; <0 means keyboard'), default=0.1)
parser.add_option('-c', '--catchExceptions', action='store_true', dest='catchExceptions',
help='Turns on exception handling and timeouts during games', default=False)
parser.add_option('--timeout', dest='timeout', type='int',
help=default('Maximum length of time an agent can spend computing in a single game'), default=30)
options, otherjunk = parser.parse_args(argv)
if len(otherjunk) != 0:
raise Exception('Command line input not understood: ' + str(otherjunk))
args = dict()
# Fix the random seed
if options.fixRandomSeed:
random.seed('cs188')
# Choose a layout
args['layout'] = layout.getLayout(options.layout)
if args['layout'] == None:
raise Exception("The layout " + options.layout + " cannot be found")
# Choose a Pacman agent
noKeyboard = options.gameToReplay == None and (
options.textGraphics or options.quietGraphics)
pacmanType = loadAgent(options.pacman, noKeyboard)
agentOpts = parseAgentArgs(options.agentArgs)
if options.numTraining > 0:
args['numTraining'] = options.numTraining
if 'numTraining' not in agentOpts:
agentOpts['numTraining'] = options.numTraining
pacman = pacmanType(**agentOpts) # Instantiate Pacman with agentArgs
args['pacman'] = pacman
# Don't display training games
if 'numTrain' in agentOpts:
options.numQuiet = int(agentOpts['numTrain'])
options.numIgnore = int(agentOpts['numTrain'])
ghostType = loadAgent(options.ghost, noKeyboard)
args['ghosts'] = [ghostType(i+1) for i in range(options.numGhosts)]
if options.quietGraphics:
import textDisplay
args['display'] = textDisplay.NullGraphics()
elif options.textGraphics:
import textDisplay
textDisplay.SLEEP_TIME = options.frameTime
args['display'] = textDisplay.PacmanGraphics()
else:
import graphicsDisplay
args['display'] = graphicsDisplay.PacmanGraphics(
options.zoom, frameTime=options.frameTime)
args['numGames'] = options.numGames
args['record'] = options.record
args['catchExceptions'] = options.catchExceptions
args['timeout'] = options.timeout
if options.gameToReplay != None:
print('Replaying recorded game %s.' % options.gameToReplay)
import pickle
f = open(options.gameToReplay)
try:
recorded = pickle.load(f)
finally:
f.close()
recorded['display'] = args['display']
replayGame(**recorded)
sys.exit(0)
return args
def loadAgent(pacman, nographics):
# Looks through all pythonPath Directories for the right module,
pythonPathStr = os.path.expandvars("$PYTHONPATH")
if pythonPathStr.find(';') == -1:
pythonPathDirs = pythonPathStr.split(':')
else:
pythonPathDirs = pythonPathStr.split(';')
pythonPathDirs.append('.')
for moduleDir in pythonPathDirs:
if not os.path.isdir(moduleDir):
continue
moduleNames = [f for f in os.listdir(
moduleDir) if f.endswith('gents.py')]
for modulename in moduleNames:
try:
module = __import__(modulename[:-3])
except ImportError:
continue
if pacman in dir(module):
if nographics and modulename == 'keyboardAgents.py':
raise Exception(
'Using the keyboard requires graphics (not text display)')
return getattr(module, pacman)
raise Exception('The agent ' + pacman +
' is not specified in any *Agents.py.')
def replayGame(layout, actions, display):
import pacmanAgents
import ghostAgents
rules = ClassicGameRules()
agents = [pacmanAgents.GreedyAgent()] + [ghostAgents.RandomGhost(i+1)
for i in range(layout.getNumGhosts())]
game = rules.newGame(layout, agents[0], agents[1:], display)
state = game.state
display.initialize(state.data)
for action in actions:
# Execute the action
state = state.getNextState(*action)
# Change the display
display.update(state.data)
# Allow for game specific conditions (winning, losing, etc.)
rules.process(state, game)
display.finish()
def runGames(layout, pacman, ghosts, display, numGames, record, numTraining=0, catchExceptions=False, timeout=30):
import __main__
__main__.__dict__['_display'] = display
rules = ClassicGameRules(timeout)
games = []
for i in range(numGames):
beQuiet = i < numTraining
if beQuiet:
# Suppress output and graphics
import textDisplay
gameDisplay = textDisplay.NullGraphics()
rules.quiet = True
else:
gameDisplay = display
rules.quiet = False
game = rules.newGame(layout, pacman, ghosts,
gameDisplay, beQuiet, catchExceptions)
game.run()
if not beQuiet:
games.append(game)
if record:
import time
import pickle
fname = ('recorded-game-%d' % (i + 1)) + \
'-'.join([str(t) for t in time.localtime()[1:6]])
f = file(fname, 'w')
components = {'layout': layout, 'actions': game.moveHistory}
pickle.dump(components, f)
f.close()
if (numGames-numTraining) > 0:
scores = [game.state.getScore() for game in games]
wins = [game.state.isWin() for game in games]
winRate = wins.count(True) / float(len(wins))
print('Average Score:', sum(scores) / float(len(scores)))
print('Scores: ', ', '.join([str(score) for score in scores]))
print('Win Rate: %d/%d (%.2f)' %
(wins.count(True), len(wins), winRate))
print('Record: ', ', '.join(
[['Loss', 'Win'][int(w)] for w in wins]))
return games
if __name__ == '__main__':
args = readCommand(sys.argv[1:]) # Get game components based on input
runGames(**args)
# import cProfile
# cProfile.run("runGames( **args )")
pass
| true | true |
f71f51cf95ba54a5f6398ad0ae300442232506f4 | 3,554 | py | Python | examples/Yellow_Sea/make_YELLOW_grd_v1.py | bilgetutak/pyroms | 3b0550f26f4ac181b7812e14a7167cd1ca0797f0 | [
"BSD-3-Clause"
] | 75 | 2016-04-05T07:15:57.000Z | 2022-03-04T22:49:54.000Z | examples/Yellow_Sea/make_YELLOW_grd_v1.py | hadfieldnz/pyroms-mgh | cd0fe39075825f97a7caf64e2c4c5a19f23302fd | [
"BSD-3-Clause"
] | 27 | 2017-02-26T04:27:49.000Z | 2021-12-01T17:26:56.000Z | examples/Yellow_Sea/make_YELLOW_grd_v1.py | hadfieldnz/pyroms-mgh | cd0fe39075825f97a7caf64e2c4c5a19f23302fd | [
"BSD-3-Clause"
] | 56 | 2016-05-11T06:19:14.000Z | 2022-03-22T19:04:17.000Z | import os
from pyroms import _iso
import numpy as np
from mpl_toolkits.basemap import Basemap, shiftgrid
from scipy.interpolate import griddata
import matplotlib.colors as colors
from scipy.signal import medfilt2d
import netCDF4
import pyroms
from bathy_smoother import *
# Grid dimension
Lm = 140
Mm = 120
lon0=117.5 ; lat0 = 41.
lon1=117.5 ; lat1 = 34.5
lon2 = 127. ; lat2 = 34.5
lon3 = 127. ; lat3 = 41.
map = Basemap(projection='lcc', lat_0=35., lat_1=30., lat_2=40, lon_0 =123, \
width=2000000, height=2000000, resolution='i')
lonp = np.array([lon0, lon1, lon2, lon3])
latp = np.array([lat0, lat1, lat2, lat3])
beta = np.array([1, 1, 1, 1])
#generate the new grid
# Do this if you aren't going to move the grid corners interactively.
hgrd = pyroms.grid.Gridgen(lonp, latp, beta, (Mm+3, Lm+3), proj=map)
# Do this if you are going to use the Boundary Interactor
#map.drawcoastlines()
#xp, yp = map(lonp, latp)
#bry = pyroms.hgrid.BoundaryInteractor(xp, yp, beta, shp=(Mm+3,Lm+3), proj=map)
#hgrd=bry.grd
lonv, latv = list(map(hgrd.x_vert, hgrd.y_vert, inverse=True))
hgrd = pyroms.grid.CGrid_geo(lonv, latv, map)
# generate the mask
#for verts in map.coastsegs:
# hgrd.mask_polygon(verts)
# alternate version from johan.navarro.padron
for xx,yy in map.coastpolygons:
xa = np.array(xx, np.float32)
ya = np.array(yy,np.float32)
vv = np.zeros((xa.shape[0],2))
vv[:, 0] = xa
vv[:, 1] = ya
hgrd.mask_polygon(vv,mask_value=0)
# Edit the land mask interactively.
#pyroms.grid.edit_mask_mesh(hgrd, proj=map)
#edit_mask_mesh_ij is a faster version using imshow... but no map projection.
coast = pyroms.utility.get_coast_from_map(map)
pyroms.grid.edit_mask_mesh_ij(hgrd, coast=coast)
#### Use the following to interpolate from etopo2 bathymetry.
# generate the bathy
# read in topo data (on a regular lat/lon grid)
# this topo come with basemap so you should have it on your laptop.
# just update datadir with the appropriate path
# you can get this data from matplolib svn with
# svn co https://matplotlib.svn.sourceforge.net/svnroot/matplotlib/trunk/htdocs/screenshots/data/"
datadir = 'data/'
topo = np.loadtxt(os.path.join(datadir, 'etopo20data.gz'))
lons = np.loadtxt(os.path.join(datadir, 'etopo20lons.gz'))
lats = np.loadtxt(os.path.join(datadir, 'etopo20lats.gz'))
# depth positive
topo = -topo
# fix minimum depth
hmin = 5
topo = np.where(topo < hmin, hmin, topo)
# interpolate new bathymetry
lon, lat = np.meshgrid(lons, lats)
h = griddata((lon.flat,lat.flat),topo.flat,(hgrd.lon_rho,hgrd.lat_rho), method='linear')
# insure that depth is always deeper than hmin
h = np.where(h < hmin, hmin, h)
# set depth to hmin where masked
idx = np.where(hgrd.mask_rho == 0)
h[idx] = hmin
# save raw bathymetry
hraw = h.copy()
# check bathymetry roughness
RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho)
print('Max Roughness value is: ', RoughMat.max())
# smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006)
rx0_max = 0.35
h = bathy_smoothing.smoothing_Positive_rx0(hgrd.mask_rho, h, rx0_max)
# check bathymetry roughness again
RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho)
print('Max Roughness value is: ', RoughMat.max())
# vertical coordinate
theta_b = 2
theta_s = 7.0
Tcline = 50
N = 30
vgrd = pyroms.vgrid.s_coordinate_4(h, theta_b, theta_s, Tcline, N, hraw=hraw)
# ROMS grid
grd_name = 'YELLOW'
grd = pyroms.grid.ROMS_Grid(grd_name, hgrd, vgrd)
# write grid to netcdf file
pyroms.grid.write_ROMS_grid(grd, filename='YELLOW_grd_v1.nc')
| 29.865546 | 98 | 0.728475 | import os
from pyroms import _iso
import numpy as np
from mpl_toolkits.basemap import Basemap, shiftgrid
from scipy.interpolate import griddata
import matplotlib.colors as colors
from scipy.signal import medfilt2d
import netCDF4
import pyroms
from bathy_smoother import *
Lm = 140
Mm = 120
lon0=117.5 ; lat0 = 41.
lon1=117.5 ; lat1 = 34.5
lon2 = 127. ; lat2 = 34.5
lon3 = 127. ; lat3 = 41.
map = Basemap(projection='lcc', lat_0=35., lat_1=30., lat_2=40, lon_0 =123, \
width=2000000, height=2000000, resolution='i')
lonp = np.array([lon0, lon1, lon2, lon3])
latp = np.array([lat0, lat1, lat2, lat3])
beta = np.array([1, 1, 1, 1])
hgrd = pyroms.grid.Gridgen(lonp, latp, beta, (Mm+3, Lm+3), proj=map)
# Do this if you are going to use the Boundary Interactor
#map.drawcoastlines()
#xp, yp = map(lonp, latp)
#bry = pyroms.hgrid.BoundaryInteractor(xp, yp, beta, shp=(Mm+3,Lm+3), proj=map)
#hgrd=bry.grd
lonv, latv = list(map(hgrd.x_vert, hgrd.y_vert, inverse=True))
hgrd = pyroms.grid.CGrid_geo(lonv, latv, map)
# generate the mask
#for verts in map.coastsegs:
# hgrd.mask_polygon(verts)
# alternate version from johan.navarro.padron
for xx,yy in map.coastpolygons:
xa = np.array(xx, np.float32)
ya = np.array(yy,np.float32)
vv = np.zeros((xa.shape[0],2))
vv[:, 0] = xa
vv[:, 1] = ya
hgrd.mask_polygon(vv,mask_value=0)
# Edit the land mask interactively.
#pyroms.grid.edit_mask_mesh(hgrd, proj=map)
#edit_mask_mesh_ij is a faster version using imshow... but no map projection.
coast = pyroms.utility.get_coast_from_map(map)
pyroms.grid.edit_mask_mesh_ij(hgrd, coast=coast)
#### Use the following to interpolate from etopo2 bathymetry.
# generate the bathy
# read in topo data (on a regular lat/lon grid)
# this topo come with basemap so you should have it on your laptop.
# just update datadir with the appropriate path
# you can get this data from matplolib svn with
# svn co https://matplotlib.svn.sourceforge.net/svnroot/matplotlib/trunk/htdocs/screenshots/data/"
datadir = 'data/'
topo = np.loadtxt(os.path.join(datadir, 'etopo20data.gz'))
lons = np.loadtxt(os.path.join(datadir, 'etopo20lons.gz'))
lats = np.loadtxt(os.path.join(datadir, 'etopo20lats.gz'))
# depth positive
topo = -topo
# fix minimum depth
hmin = 5
topo = np.where(topo < hmin, hmin, topo)
# interpolate new bathymetry
lon, lat = np.meshgrid(lons, lats)
h = griddata((lon.flat,lat.flat),topo.flat,(hgrd.lon_rho,hgrd.lat_rho), method='linear')
# insure that depth is always deeper than hmin
h = np.where(h < hmin, hmin, h)
# set depth to hmin where masked
idx = np.where(hgrd.mask_rho == 0)
h[idx] = hmin
# save raw bathymetry
hraw = h.copy()
# check bathymetry roughness
RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho)
print('Max Roughness value is: ', RoughMat.max())
# smooth the raw bathy using the direct iterative method from Martinho and Batteen (2006)
rx0_max = 0.35
h = bathy_smoothing.smoothing_Positive_rx0(hgrd.mask_rho, h, rx0_max)
# check bathymetry roughness again
RoughMat = bathy_tools.RoughnessMatrix(h, hgrd.mask_rho)
print('Max Roughness value is: ', RoughMat.max())
# vertical coordinate
theta_b = 2
theta_s = 7.0
Tcline = 50
N = 30
vgrd = pyroms.vgrid.s_coordinate_4(h, theta_b, theta_s, Tcline, N, hraw=hraw)
# ROMS grid
grd_name = 'YELLOW'
grd = pyroms.grid.ROMS_Grid(grd_name, hgrd, vgrd)
# write grid to netcdf file
pyroms.grid.write_ROMS_grid(grd, filename='YELLOW_grd_v1.nc')
| true | true |
f71f51f2b02de08ee56c301cd81086d983759417 | 4,082 | py | Python | tests/config/test_config_provider.py | sturmianseq/thundra-agent-python | 4cee02d790eb7b8e4dea4e2e9dcd1f67533b1c56 | [
"Apache-2.0"
] | 22 | 2018-03-05T20:02:46.000Z | 2021-04-09T12:00:18.000Z | tests/config/test_config_provider.py | sturmianseq/thundra-agent-python | 4cee02d790eb7b8e4dea4e2e9dcd1f67533b1c56 | [
"Apache-2.0"
] | 13 | 2018-03-26T07:57:57.000Z | 2021-06-29T14:22:52.000Z | tests/config/test_config_provider.py | thundra-io/thundra-agent-python | 448e18c17d8730c381b2e2a773782cf80c5a7cfb | [
"Apache-2.0"
] | 3 | 2021-08-07T14:19:23.000Z | 2021-12-08T15:35:40.000Z | import os
import pytest
from thundra.config.config_provider import ConfigProvider
@pytest.fixture()
def config_options():
return {
'config': {
'my': {
'key': 'my-value'
},
'lambda': {
'my': {
'key2': 'my-value2'
}
},
'thundra': {
'agent': {
'my': {
'key3': 'my-value3'
},
'lambda': {
'my': {
'key4': 'my-value4'
}
}
}
}
}
}
@pytest.fixture()
def options_with_different_type():
return {
'config': {
'thundra': {
'agent': {
'application': {
'className': 'TEST'
},
'debug': {
'enable': True
},
'lambda': {
'debugger.broker.port': 444
}
}
}
}
}
def test_config_from_environment_variable(monkeypatch):
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_TEST_KEY', 'test_value')
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_LAMBDA_TEST_KEY2', 'test_value2')
ConfigProvider.__init__()
monkeypatch.delitem(os.environ, 'THUNDRA_AGENT_TEST_KEY')
monkeypatch.delitem(os.environ, 'THUNDRA_AGENT_LAMBDA_TEST_KEY2')
assert ConfigProvider.get('thundra.agent.test.key') == 'test_value'
assert ConfigProvider.get('thundra.agent.lambda.test.key2') == 'test_value2'
assert ConfigProvider.get('THUNDRA_AGENT_TEST_KEY') is None
assert ConfigProvider.get('THUNDRA_AGENT_LAMBDA_TEST_KEY2') is None
def test_config_from_options(config_options):
ConfigProvider.__init__(options=config_options)
assert ConfigProvider.get('thundra.agent.my.key') == 'my-value'
assert ConfigProvider.get('thundra.agent.lambda.my.key2') == 'my-value2'
assert ConfigProvider.get('thundra.agent.my.key3') == 'my-value3'
assert ConfigProvider.get('thundra.agent.lambda.my.key4') == 'my-value4'
assert ConfigProvider.get('thundra.agent.my.key2') == 'my-value2'
assert ConfigProvider.get('thundra.agent.my.key4') == 'my-value4'
assert ConfigProvider.get('thundra.agent.my.key5') is None
def test_config_environment_variable_override_options(monkeypatch, config_options):
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_MY_KEY', 'my_value_from_env')
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_LAMBDA_MY_KEY2', 'my_value_from_env2')
ConfigProvider.__init__(options=config_options)
assert ConfigProvider.get('thundra.agent.my.key') == 'my_value_from_env'
assert ConfigProvider.get('thundra.agent.lambda.my.key2') == 'my_value_from_env2'
assert ConfigProvider.get('thundra.agent.my.key2') == 'my_value_from_env2'
def test_config_variable_correct_type(monkeypatch, options_with_different_type):
monkeypatch.setitem(os.environ, 'thundra_agent_lambda_debugger_port', '3000')
monkeypatch.setitem(os.environ, 'thundra_agent_trace_integrations_aws_dynamodb_traceInjection_enable', 'true')
ConfigProvider.__init__(options=options_with_different_type)
assert ConfigProvider.get('thundra.agent.lambda.debugger.port') == 3000
assert ConfigProvider.get('thundra.agent.trace.integrations.aws.dynamodb.traceinjection.enable') is True
assert ConfigProvider.get('thundra.agent.lambda.debugger.broker.port') == 444
assert ConfigProvider.get('thundra.agent.application.classname') == 'TEST'
assert ConfigProvider.get('thundra.agent.debug.enable') is True
def test_config_correct_default_value():
ConfigProvider.__init__()
assert ConfigProvider.get('thundra.agent.debug.enable') is False
assert ConfigProvider.get('thundra.agent.debug.enable', True) is True
assert ConfigProvider.get('thundra.agent.lambda.debugger.logs.enable') is False
| 34.888889 | 114 | 0.629838 | import os
import pytest
from thundra.config.config_provider import ConfigProvider
@pytest.fixture()
def config_options():
return {
'config': {
'my': {
'key': 'my-value'
},
'lambda': {
'my': {
'key2': 'my-value2'
}
},
'thundra': {
'agent': {
'my': {
'key3': 'my-value3'
},
'lambda': {
'my': {
'key4': 'my-value4'
}
}
}
}
}
}
@pytest.fixture()
def options_with_different_type():
return {
'config': {
'thundra': {
'agent': {
'application': {
'className': 'TEST'
},
'debug': {
'enable': True
},
'lambda': {
'debugger.broker.port': 444
}
}
}
}
}
def test_config_from_environment_variable(monkeypatch):
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_TEST_KEY', 'test_value')
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_LAMBDA_TEST_KEY2', 'test_value2')
ConfigProvider.__init__()
monkeypatch.delitem(os.environ, 'THUNDRA_AGENT_TEST_KEY')
monkeypatch.delitem(os.environ, 'THUNDRA_AGENT_LAMBDA_TEST_KEY2')
assert ConfigProvider.get('thundra.agent.test.key') == 'test_value'
assert ConfigProvider.get('thundra.agent.lambda.test.key2') == 'test_value2'
assert ConfigProvider.get('THUNDRA_AGENT_TEST_KEY') is None
assert ConfigProvider.get('THUNDRA_AGENT_LAMBDA_TEST_KEY2') is None
def test_config_from_options(config_options):
ConfigProvider.__init__(options=config_options)
assert ConfigProvider.get('thundra.agent.my.key') == 'my-value'
assert ConfigProvider.get('thundra.agent.lambda.my.key2') == 'my-value2'
assert ConfigProvider.get('thundra.agent.my.key3') == 'my-value3'
assert ConfigProvider.get('thundra.agent.lambda.my.key4') == 'my-value4'
assert ConfigProvider.get('thundra.agent.my.key2') == 'my-value2'
assert ConfigProvider.get('thundra.agent.my.key4') == 'my-value4'
assert ConfigProvider.get('thundra.agent.my.key5') is None
def test_config_environment_variable_override_options(monkeypatch, config_options):
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_MY_KEY', 'my_value_from_env')
monkeypatch.setitem(os.environ, 'THUNDRA_AGENT_LAMBDA_MY_KEY2', 'my_value_from_env2')
ConfigProvider.__init__(options=config_options)
assert ConfigProvider.get('thundra.agent.my.key') == 'my_value_from_env'
assert ConfigProvider.get('thundra.agent.lambda.my.key2') == 'my_value_from_env2'
assert ConfigProvider.get('thundra.agent.my.key2') == 'my_value_from_env2'
def test_config_variable_correct_type(monkeypatch, options_with_different_type):
monkeypatch.setitem(os.environ, 'thundra_agent_lambda_debugger_port', '3000')
monkeypatch.setitem(os.environ, 'thundra_agent_trace_integrations_aws_dynamodb_traceInjection_enable', 'true')
ConfigProvider.__init__(options=options_with_different_type)
assert ConfigProvider.get('thundra.agent.lambda.debugger.port') == 3000
assert ConfigProvider.get('thundra.agent.trace.integrations.aws.dynamodb.traceinjection.enable') is True
assert ConfigProvider.get('thundra.agent.lambda.debugger.broker.port') == 444
assert ConfigProvider.get('thundra.agent.application.classname') == 'TEST'
assert ConfigProvider.get('thundra.agent.debug.enable') is True
def test_config_correct_default_value():
ConfigProvider.__init__()
assert ConfigProvider.get('thundra.agent.debug.enable') is False
assert ConfigProvider.get('thundra.agent.debug.enable', True) is True
assert ConfigProvider.get('thundra.agent.lambda.debugger.logs.enable') is False
| true | true |
f71f521b683a7942f71c9124e2203f4da258ee2b | 4,799 | py | Python | tests/test_optimalK.py | alinaselega/gap_statistic | 2b94c46b676eef839f7709441a89bdc5796b2d31 | [
"MIT",
"Unlicense"
] | 132 | 2016-11-01T07:08:21.000Z | 2022-03-30T13:41:31.000Z | tests/test_optimalK.py | alinaselega/gap_statistic | 2b94c46b676eef839f7709441a89bdc5796b2d31 | [
"MIT",
"Unlicense"
] | 37 | 2016-10-18T12:18:35.000Z | 2022-02-23T04:22:19.000Z | tests/test_optimalK.py | alinaselega/gap_statistic | 2b94c46b676eef839f7709441a89bdc5796b2d31 | [
"MIT",
"Unlicense"
] | 43 | 2017-01-08T18:35:45.000Z | 2022-02-17T14:07:20.000Z | # -*- coding: utf-8 -*-
import os
import pytest
import numpy as np
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans, MeanShift
from gap_statistic import OptimalK
def test_bad_init_config():
"""
Cannot define own clustering function and try to use Rust backend
"""
with pytest.raises(ValueError):
OptimalK(parallel_backend="rust", clusterer=lambda x, k: print("just testing"))
@pytest.mark.parametrize("ClusterModel", [KMeans, MeanShift])
def test_alternative_clusting_method(ClusterModel):
"""
Test that users can supply alternative clustering method as dep injection
"""
def clusterer(X: np.ndarray, k: int, another_test_arg):
"""
Function to wrap a sklearn model as a clusterer for OptimalK
First two arguments are always the data matrix, and k, and can supply
"""
m = ClusterModel()
m.fit(X)
assert another_test_arg == "test"
return m.cluster_centers_, m.predict(X)
optimalk = OptimalK(
n_jobs=-1,
parallel_backend="joblib",
clusterer=clusterer,
clusterer_kwargs={"another_test_arg": "test"},
)
X, y = make_blobs(n_samples=50, n_features=2, centers=3)
n_clusters = optimalk(X, n_refs=3, cluster_array=np.arange(1, 5))
assert isinstance(n_clusters, int)
@pytest.mark.parametrize(
"parallel_backend, n_jobs, n_clusters",
[
pytest.param(
"joblib", 1, 3, id="parallel_backend='joblib', n_jobs=1, n_clusters=3"
),
pytest.param(None, 1, 3, id="parallel_backend=None, n_jobs=1, n_clusters=3"),
# TODO: Add back this test param in rust side extension
# pytest.param(
# "rust", 1, 3, id="parallel_backend='rust', n_jobs=1, n_clusters=3"
# ),
],
)
def test_optimalk(parallel_backend, n_jobs, n_clusters):
"""
Test core functionality of OptimalK using all backends.
"""
# Create optimalK instance
optimalK = OptimalK(parallel_backend=parallel_backend, n_jobs=n_jobs)
# Create data
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
suggested_clusters = optimalK(X, n_refs=3, cluster_array=np.arange(1, 10))
assert np.allclose(
suggested_clusters, n_clusters, 2
), "Correct clusters is {}, OptimalK suggested {}".format(
n_clusters, suggested_clusters
)
@pytest.mark.skipif(
"TEST_RUST_EXT" not in os.environ, reason="Rust extension not built."
)
def test_optimalk_rust_ext():
"""
Test core functionality of OptimalK using all backends.
"""
# Create optimalK instance
optimalK = OptimalK(parallel_backend="rust", n_jobs=1)
# Create data
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
suggested_clusters = optimalK(X, n_refs=3, cluster_array=np.arange(1, 10))
assert np.allclose(
suggested_clusters, 3, 2
), "Correct clusters is {}, OptimalK suggested {}".format(3, suggested_clusters)
def test_optimalk_cluster_array_vs_data_sizes_error():
"""
Test ValueError when cluster_array is larger than dataset.
"""
import numpy as np
from gap_statistic import OptimalK
# Create optimalK instance
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
# Create data
X, y = make_blobs(n_samples=5, n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=np.arange(1, 10))
assert "The number of suggested clusters to try" in str(excinfo.value)
def test_optimalk_cluster_array_values_error():
"""
Test ValueError when cluster_array contains values less than 1
"""
from gap_statistic import OptimalK
# Create optimalK instance
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
# Create data
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=[0, -1, 1, 2, 3])
assert "cluster_array contains values less than 1" in str(excinfo.value)
def test_optimalk_cluster_array_empty_error():
"""
Test ValueError when cluster_array is empty.
"""
from gap_statistic import OptimalK
# Create optimalK instance
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
# Create data
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=[])
assert "The supplied cluster_array has no values." in str(excinfo.value)
def test_dunders():
"""
Test that implemented dunder methods don't return errors
"""
from gap_statistic import OptimalK
optimalK = OptimalK()
optimalK.__str__()
optimalK.__repr__()
optimalK._repr_html_()
| 29.441718 | 87 | 0.681184 |
import os
import pytest
import numpy as np
from sklearn.datasets import make_blobs
from sklearn.cluster import KMeans, MeanShift
from gap_statistic import OptimalK
def test_bad_init_config():
with pytest.raises(ValueError):
OptimalK(parallel_backend="rust", clusterer=lambda x, k: print("just testing"))
@pytest.mark.parametrize("ClusterModel", [KMeans, MeanShift])
def test_alternative_clusting_method(ClusterModel):
def clusterer(X: np.ndarray, k: int, another_test_arg):
m = ClusterModel()
m.fit(X)
assert another_test_arg == "test"
return m.cluster_centers_, m.predict(X)
optimalk = OptimalK(
n_jobs=-1,
parallel_backend="joblib",
clusterer=clusterer,
clusterer_kwargs={"another_test_arg": "test"},
)
X, y = make_blobs(n_samples=50, n_features=2, centers=3)
n_clusters = optimalk(X, n_refs=3, cluster_array=np.arange(1, 5))
assert isinstance(n_clusters, int)
@pytest.mark.parametrize(
"parallel_backend, n_jobs, n_clusters",
[
pytest.param(
"joblib", 1, 3, id="parallel_backend='joblib', n_jobs=1, n_clusters=3"
),
pytest.param(None, 1, 3, id="parallel_backend=None, n_jobs=1, n_clusters=3"),
],
)
def test_optimalk(parallel_backend, n_jobs, n_clusters):
optimalK = OptimalK(parallel_backend=parallel_backend, n_jobs=n_jobs)
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
suggested_clusters = optimalK(X, n_refs=3, cluster_array=np.arange(1, 10))
assert np.allclose(
suggested_clusters, n_clusters, 2
), "Correct clusters is {}, OptimalK suggested {}".format(
n_clusters, suggested_clusters
)
@pytest.mark.skipif(
"TEST_RUST_EXT" not in os.environ, reason="Rust extension not built."
)
def test_optimalk_rust_ext():
optimalK = OptimalK(parallel_backend="rust", n_jobs=1)
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
suggested_clusters = optimalK(X, n_refs=3, cluster_array=np.arange(1, 10))
assert np.allclose(
suggested_clusters, 3, 2
), "Correct clusters is {}, OptimalK suggested {}".format(3, suggested_clusters)
def test_optimalk_cluster_array_vs_data_sizes_error():
import numpy as np
from gap_statistic import OptimalK
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
X, y = make_blobs(n_samples=5, n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=np.arange(1, 10))
assert "The number of suggested clusters to try" in str(excinfo.value)
def test_optimalk_cluster_array_values_error():
from gap_statistic import OptimalK
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=[0, -1, 1, 2, 3])
assert "cluster_array contains values less than 1" in str(excinfo.value)
def test_optimalk_cluster_array_empty_error():
from gap_statistic import OptimalK
optimalK = OptimalK(parallel_backend=None, n_jobs=-1)
X, y = make_blobs(n_samples=int(1e3), n_features=2, centers=3)
with pytest.raises(ValueError) as excinfo:
optimalK(X, cluster_array=[])
assert "The supplied cluster_array has no values." in str(excinfo.value)
def test_dunders():
from gap_statistic import OptimalK
optimalK = OptimalK()
optimalK.__str__()
optimalK.__repr__()
optimalK._repr_html_()
| true | true |
f71f533ceeca3968a0d37a1a87b62202c911fd86 | 11,743 | py | Python | samples/openapi3/client/features/dynamic-servers/python/dynamic_servers/api/usage_api.py | JigarJoshi/openapi-generator | 785535b8d6881b358463994823abbda2b26ff42e | [
"Apache-2.0"
] | 1 | 2022-01-24T08:22:21.000Z | 2022-01-24T08:22:21.000Z | samples/openapi3/client/features/dynamic-servers/python/dynamic_servers/api/usage_api.py | JigarJoshi/openapi-generator | 785535b8d6881b358463994823abbda2b26ff42e | [
"Apache-2.0"
] | 4 | 2021-09-29T08:46:32.000Z | 2021-12-08T09:07:04.000Z | samples/openapi3/client/features/dynamic-servers/python/dynamic_servers/api/usage_api.py | JigarJoshi/openapi-generator | 785535b8d6881b358463994823abbda2b26ff42e | [
"Apache-2.0"
] | 1 | 2022-02-24T15:54:44.000Z | 2022-02-24T15:54:44.000Z | """
OpenAPI Extension with dynamic servers
This specification shows how to use dynamic servers. # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from dynamic_servers.api_client import ApiClient, Endpoint as _Endpoint
from dynamic_servers.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
class UsageApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.custom_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/custom',
'operation_id': 'custom_server',
'http_method': 'GET',
'servers': [
{
'url': "https://{server}.swagger.io:{port}/v2",
'description': "No description provided",
'variables': {
'server': {
'description': "No description provided",
'default_value': "custom-petstore",
'enum_values': [
"custom-petstore",
"custom-qa-petstore",
"custom-dev-petstore"
]
},
'port': {
'description': "No description provided",
'default_value': "8080",
'enum_values': [
"80",
"8080"
]
}
}
},
{
'url': "https://localhost:8081/{version}",
'description': "The local custom server",
'variables': {
'version': {
'description': "No description provided",
'default_value': "v2",
'enum_values': [
"v1",
"v2",
"v3"
]
}
}
},
{
'url': "https://third.example.com/{prefix}",
'description': "The local custom server",
'variables': {
'prefix': {
'description': "No description provided",
'default_value': "custom",
}
}
},
]
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.default_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/default',
'operation_id': 'default_server',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
def custom_server(
self,
**kwargs
):
"""Use custom server # noqa: E501
Use custom server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.custom_server(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.custom_server_endpoint.call_with_http_info(**kwargs)
def default_server(
self,
**kwargs
):
"""Use default server # noqa: E501
Use default server # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.default_server(async_req=True)
>>> result = thread.get()
Keyword Args:
_return_http_data_only (bool): response data without head status
code and headers. Default is True.
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
async_req (bool): execute request asynchronously
Returns:
{str: (bool, date, datetime, dict, float, int, list, str, none_type)}
If the method is called asynchronously, returns the request
thread.
"""
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.default_server_endpoint.call_with_http_info(**kwargs)
| 37.044164 | 106 | 0.471941 |
import re
import sys
from dynamic_servers.api_client import ApiClient, Endpoint as _Endpoint
from dynamic_servers.model_utils import (
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
class UsageApi(object):
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.custom_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/custom',
'operation_id': 'custom_server',
'http_method': 'GET',
'servers': [
{
'url': "https://{server}.swagger.io:{port}/v2",
'description': "No description provided",
'variables': {
'server': {
'description': "No description provided",
'default_value': "custom-petstore",
'enum_values': [
"custom-petstore",
"custom-qa-petstore",
"custom-dev-petstore"
]
},
'port': {
'description': "No description provided",
'default_value': "8080",
'enum_values': [
"80",
"8080"
]
}
}
},
{
'url': "https://localhost:8081/{version}",
'description': "The local custom server",
'variables': {
'version': {
'description': "No description provided",
'default_value': "v2",
'enum_values': [
"v1",
"v2",
"v3"
]
}
}
},
{
'url': "https://third.example.com/{prefix}",
'description': "The local custom server",
'variables': {
'prefix': {
'description': "No description provided",
'default_value': "custom",
}
}
},
]
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.default_server_endpoint = _Endpoint(
settings={
'response_type': ({str: (bool, date, datetime, dict, float, int, list, str, none_type)},),
'auth': [],
'endpoint_path': '/default',
'operation_id': 'default_server',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
},
'attribute_map': {
},
'location_map': {
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
def custom_server(
self,
**kwargs
):
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.custom_server_endpoint.call_with_http_info(**kwargs)
def default_server(
self,
**kwargs
):
kwargs['async_req'] = kwargs.get(
'async_req', False
)
kwargs['_return_http_data_only'] = kwargs.get(
'_return_http_data_only', True
)
kwargs['_preload_content'] = kwargs.get(
'_preload_content', True
)
kwargs['_request_timeout'] = kwargs.get(
'_request_timeout', None
)
kwargs['_check_input_type'] = kwargs.get(
'_check_input_type', True
)
kwargs['_check_return_type'] = kwargs.get(
'_check_return_type', True
)
kwargs['_spec_property_naming'] = kwargs.get(
'_spec_property_naming', False
)
kwargs['_content_type'] = kwargs.get(
'_content_type')
kwargs['_host_index'] = kwargs.get('_host_index')
return self.default_server_endpoint.call_with_http_info(**kwargs)
| true | true |
f71f537b15779a901376e8b188dd9b2dd1be6031 | 1,525 | py | Python | scripts/cleanup_datasets/update_dataset_size.py | vimalkumarvelayudhan/galaxy | ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7 | [
"CC-BY-3.0"
] | null | null | null | scripts/cleanup_datasets/update_dataset_size.py | vimalkumarvelayudhan/galaxy | ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7 | [
"CC-BY-3.0"
] | null | null | null | scripts/cleanup_datasets/update_dataset_size.py | vimalkumarvelayudhan/galaxy | ea89dd8f149778b6c2f0f3f4a34c8b21f7033af7 | [
"CC-BY-3.0"
] | null | null | null | #!/usr/bin/env python
"""
Updates dataset.size column.
Remember to backup your database before running.
"""
import sys, os, ConfigParser
import galaxy.app
assert sys.version_info[:2] >= ( 2, 4 )
def usage(prog) :
print "usage: %s galaxy.ini" % prog
print """
Updates the dataset.size column. Users are advised to backup the database before
running.
"""
def main():
if len(sys.argv) != 1 or sys.argv[1] == "-h" or sys.argv[1] == "--help" :
usage(sys.argv[0])
sys.exit()
ini_file = sys.argv.pop(1)
conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} )
conf_parser.read( ini_file )
configuration = {}
for key, value in conf_parser.items( "app:main" ):
configuration[key] = value
app = galaxy.app.UniverseApplication( global_conf = ini_file, **configuration )
#Step through Datasets, determining size on disk for each.
print "Determining the size of each dataset..."
for row in app.model.Dataset.table.select().execute():
purged = app.model.Dataset.get( row.id ).purged
file_size = app.model.Dataset.get( row.id ).file_size
if file_size is None and not purged:
size_on_disk = app.model.Dataset.get( row.id ).get_size()
print "Updating Dataset.%d with file_size: %d" %( row.id, size_on_disk )
app.model.Dataset.table.update( app.model.Dataset.table.c.id == row.id ).execute( file_size=size_on_disk )
app.shutdown()
sys.exit(0)
if __name__ == "__main__":
main()
| 33.888889 | 118 | 0.649836 |
"""
Updates dataset.size column.
Remember to backup your database before running.
"""
import sys, os, ConfigParser
import galaxy.app
assert sys.version_info[:2] >= ( 2, 4 )
def usage(prog) :
print "usage: %s galaxy.ini" % prog
print """
Updates the dataset.size column. Users are advised to backup the database before
running.
"""
def main():
if len(sys.argv) != 1 or sys.argv[1] == "-h" or sys.argv[1] == "--help" :
usage(sys.argv[0])
sys.exit()
ini_file = sys.argv.pop(1)
conf_parser = ConfigParser.ConfigParser( {'here':os.getcwd()} )
conf_parser.read( ini_file )
configuration = {}
for key, value in conf_parser.items( "app:main" ):
configuration[key] = value
app = galaxy.app.UniverseApplication( global_conf = ini_file, **configuration )
print "Determining the size of each dataset..."
for row in app.model.Dataset.table.select().execute():
purged = app.model.Dataset.get( row.id ).purged
file_size = app.model.Dataset.get( row.id ).file_size
if file_size is None and not purged:
size_on_disk = app.model.Dataset.get( row.id ).get_size()
print "Updating Dataset.%d with file_size: %d" %( row.id, size_on_disk )
app.model.Dataset.table.update( app.model.Dataset.table.c.id == row.id ).execute( file_size=size_on_disk )
app.shutdown()
sys.exit(0)
if __name__ == "__main__":
main()
| false | true |
f71f5536fde4ae2ab6e7c0ba9feffb7cea1900eb | 37,453 | py | Python | src/base/android/jni_generator/jni_generator.py | jxjnjjn/chromium | 435c1d02fd1b99001dc9e1e831632c894523580d | [
"Apache-2.0"
] | 9 | 2018-09-21T05:36:12.000Z | 2021-11-15T15:14:36.000Z | base/android/jni_generator/jni_generator.py | devasia1000/chromium | 919a8a666862fb866a6bb7aa7f3ae8c0442b4828 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null | base/android/jni_generator/jni_generator.py | devasia1000/chromium | 919a8a666862fb866a6bb7aa7f3ae8c0442b4828 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 3 | 2018-11-28T14:54:13.000Z | 2020-07-02T07:36:07.000Z | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Extracts native methods from a Java file and generates the JNI bindings.
If you change this, please run and update the tests."""
import collections
import errno
import optparse
import os
import re
import string
from string import Template
import subprocess
import sys
import textwrap
import zipfile
class ParseError(Exception):
"""Exception thrown when we can't parse the input file."""
def __init__(self, description, *context_lines):
Exception.__init__(self)
self.description = description
self.context_lines = context_lines
def __str__(self):
context = '\n'.join(self.context_lines)
return '***\nERROR: %s\n\n%s\n***' % (self.description, context)
class Param(object):
"""Describes a param for a method, either java or native."""
def __init__(self, **kwargs):
self.datatype = kwargs['datatype']
self.name = kwargs['name']
class NativeMethod(object):
"""Describes a C/C++ method that is called by Java code"""
def __init__(self, **kwargs):
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
if self.params:
assert type(self.params) is list
assert type(self.params[0]) is Param
if (self.params and
self.params[0].datatype == 'int' and
self.params[0].name.startswith('native')):
self.type = 'method'
self.p0_type = self.params[0].name[len('native'):]
if kwargs.get('native_class_name'):
self.p0_type = kwargs['native_class_name']
else:
self.type = 'function'
self.method_id_var_name = kwargs.get('method_id_var_name', None)
class CalledByNative(object):
"""Describes a java method exported to c/c++"""
def __init__(self, **kwargs):
self.system_class = kwargs['system_class']
self.unchecked = kwargs['unchecked']
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
self.method_id_var_name = kwargs.get('method_id_var_name', None)
self.is_constructor = kwargs.get('is_constructor', False)
self.env_call = GetEnvCall(self.is_constructor, self.static,
self.return_type)
self.static_cast = GetStaticCastForReturnType(self.return_type)
def JavaDataTypeToC(java_type):
"""Returns a C datatype for the given java type."""
java_pod_type_map = {
'int': 'jint',
'byte': 'jbyte',
'char': 'jchar',
'short': 'jshort',
'boolean': 'jboolean',
'long': 'jlong',
'double': 'jdouble',
'float': 'jfloat',
}
java_type_map = {
'void': 'void',
'String': 'jstring',
'java/lang/String': 'jstring',
'Class': 'jclass',
'java/lang/Class': 'jclass',
}
if java_type in java_pod_type_map:
return java_pod_type_map[java_type]
elif java_type in java_type_map:
return java_type_map[java_type]
elif java_type.endswith('[]'):
if java_type[:-2] in java_pod_type_map:
return java_pod_type_map[java_type[:-2]] + 'Array'
return 'jobjectArray'
else:
return 'jobject'
class JniParams(object):
_imports = []
_fully_qualified_class = ''
_package = ''
_inner_classes = []
@staticmethod
def SetFullyQualifiedClass(fully_qualified_class):
JniParams._fully_qualified_class = 'L' + fully_qualified_class
JniParams._package = '/'.join(fully_qualified_class.split('/')[:-1])
@staticmethod
def ExtractImportsAndInnerClasses(contents):
contents = contents.replace('\n', '')
re_import = re.compile(r'import.*?(?P<class>\S*?);')
for match in re.finditer(re_import, contents):
JniParams._imports += ['L' + match.group('class').replace('.', '/')]
re_inner = re.compile(r'(class|interface)\s+?(?P<name>\w+?)\W')
for match in re.finditer(re_inner, contents):
inner = match.group('name')
if not JniParams._fully_qualified_class.endswith(inner):
JniParams._inner_classes += [JniParams._fully_qualified_class + '$' +
inner]
@staticmethod
def JavaToJni(param):
"""Converts a java param into a JNI signature type."""
pod_param_map = {
'int': 'I',
'boolean': 'Z',
'char': 'C',
'short': 'S',
'long': 'J',
'double': 'D',
'float': 'F',
'byte': 'B',
'void': 'V',
}
object_param_list = [
'Ljava/lang/Boolean',
'Ljava/lang/Integer',
'Ljava/lang/Long',
'Ljava/lang/Object',
'Ljava/lang/String',
'Ljava/lang/Class',
]
prefix = ''
# Array?
while param[-2:] == '[]':
prefix += '['
param = param[:-2]
# Generic?
if '<' in param:
param = param[:param.index('<')]
if param in pod_param_map:
return prefix + pod_param_map[param]
if '/' in param:
# Coming from javap, use the fully qualified param directly.
return prefix + 'L' + param + ';'
for qualified_name in (object_param_list +
[JniParams._fully_qualified_class] +
JniParams._inner_classes):
if (qualified_name.endswith('/' + param) or
qualified_name.endswith('$' + param.replace('.', '$')) or
qualified_name == 'L' + param):
return prefix + qualified_name + ';'
# Is it from an import? (e.g. referecing Class from import pkg.Class;
# note that referencing an inner class Inner from import pkg.Class.Inner
# is not supported).
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + param):
# Ensure it's not an inner class.
components = qualified_name.split('/')
if len(components) > 2 and components[-2][0].isupper():
raise SyntaxError('Inner class (%s) can not be imported '
'and used by JNI (%s). Please import the outer '
'class and use Outer.Inner instead.' %
(qualified_name, param))
return prefix + qualified_name + ';'
# Is it an inner class from an outer class import? (e.g. referencing
# Class.Inner from import pkg.Class).
if '.' in param:
components = param.split('.')
outer = '/'.join(components[:-1])
inner = components[-1]
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + outer):
return prefix + qualified_name + '$' + inner + ';'
# Type not found, falling back to same package as this class.
return prefix + 'L' + JniParams._package + '/' + param + ';'
@staticmethod
def Signature(params, returns, wrap):
"""Returns the JNI signature for the given datatypes."""
items = ['(']
items += [JniParams.JavaToJni(param.datatype) for param in params]
items += [')']
items += [JniParams.JavaToJni(returns)]
if wrap:
return '\n' + '\n'.join(['"' + item + '"' for item in items])
else:
return '"' + ''.join(items) + '"'
@staticmethod
def Parse(params):
"""Parses the params into a list of Param objects."""
if not params:
return []
ret = []
for p in [p.strip() for p in params.split(',')]:
items = p.split(' ')
if 'final' in items:
items.remove('final')
param = Param(
datatype=items[0],
name=(items[1] if len(items) > 1 else 'p%s' % len(ret)),
)
ret += [param]
return ret
def ExtractJNINamespace(contents):
re_jni_namespace = re.compile('.*?@JNINamespace\("(.*?)"\)')
m = re.findall(re_jni_namespace, contents)
if not m:
return ''
return m[0]
def ExtractFullyQualifiedJavaClassName(java_file_name, contents):
re_package = re.compile('.*?package (.*?);')
matches = re.findall(re_package, contents)
if not matches:
raise SyntaxError('Unable to find "package" line in %s' % java_file_name)
return (matches[0].replace('.', '/') + '/' +
os.path.splitext(os.path.basename(java_file_name))[0])
def ExtractNatives(contents):
"""Returns a list of dict containing information about a native method."""
contents = contents.replace('\n', '')
natives = []
re_native = re.compile(r'(@NativeClassQualifiedName'
'\(\"(?P<native_class_name>.*?)\"\))?\s*'
'(@NativeCall(\(\"(?P<java_class_name>.*?)\"\)))?\s*'
'(?P<qualifiers>\w+\s\w+|\w+|\s+)\s*?native '
'(?P<return_type>\S*?) '
'(?P<name>\w+?)\((?P<params>.*?)\);')
for match in re.finditer(re_native, contents):
native = NativeMethod(
static='static' in match.group('qualifiers'),
java_class_name=match.group('java_class_name'),
native_class_name=match.group('native_class_name'),
return_type=match.group('return_type'),
name=match.group('name').replace('native', ''),
params=JniParams.Parse(match.group('params')))
natives += [native]
return natives
def GetStaticCastForReturnType(return_type):
type_map = { 'String' : 'jstring',
'java/lang/String' : 'jstring',
'boolean[]': 'jbooleanArray',
'byte[]': 'jbyteArray',
'char[]': 'jcharArray',
'short[]': 'jshortArray',
'int[]': 'jintArray',
'long[]': 'jlongArray',
'double[]': 'jdoubleArray' }
ret = type_map.get(return_type, None)
if ret:
return ret
if return_type.endswith('[]'):
return 'jobjectArray'
return None
def GetEnvCall(is_constructor, is_static, return_type):
"""Maps the types availabe via env->Call__Method."""
if is_constructor:
return 'NewObject'
env_call_map = {'boolean': 'Boolean',
'byte': 'Byte',
'char': 'Char',
'short': 'Short',
'int': 'Int',
'long': 'Long',
'float': 'Float',
'void': 'Void',
'double': 'Double',
'Object': 'Object',
}
call = env_call_map.get(return_type, 'Object')
if is_static:
call = 'Static' + call
return 'Call' + call + 'Method'
def GetMangledParam(datatype):
"""Returns a mangled identifier for the datatype."""
if len(datatype) <= 2:
return datatype.replace('[', 'A')
ret = ''
for i in range(1, len(datatype)):
c = datatype[i]
if c == '[':
ret += 'A'
elif c.isupper() or datatype[i - 1] in ['/', 'L']:
ret += c.upper()
return ret
def GetMangledMethodName(name, params, return_type):
"""Returns a mangled method name for the given signature.
The returned name can be used as a C identifier and will be unique for all
valid overloads of the same method.
Args:
name: string.
params: list of Param.
return_type: string.
Returns:
A mangled name.
"""
mangled_items = []
for datatype in [return_type] + [x.datatype for x in params]:
mangled_items += [GetMangledParam(JniParams.JavaToJni(datatype))]
mangled_name = name + '_'.join(mangled_items)
assert re.match(r'[0-9a-zA-Z_]+', mangled_name)
return mangled_name
def MangleCalledByNatives(called_by_natives):
"""Mangles all the overloads from the call_by_natives list."""
method_counts = collections.defaultdict(
lambda: collections.defaultdict(lambda: 0))
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
name = called_by_native.name
method_counts[java_class_name][name] += 1
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
method_name = called_by_native.name
method_id_var_name = method_name
if method_counts[java_class_name][method_name] > 1:
method_id_var_name = GetMangledMethodName(method_name,
called_by_native.params,
called_by_native.return_type)
called_by_native.method_id_var_name = method_id_var_name
return called_by_natives
# Regex to match the JNI return types that should be included in a
# ScopedJavaLocalRef.
RE_SCOPED_JNI_RETURN_TYPES = re.compile('jobject|jclass|jstring|.*Array')
# Regex to match a string like "@CalledByNative public void foo(int bar)".
RE_CALLED_BY_NATIVE = re.compile(
'@CalledByNative(?P<Unchecked>(Unchecked)*?)(?:\("(?P<annotation>.*)"\))?'
'\s+(?P<prefix>[\w ]*?)'
'\s*(?P<return_type>\S+?)'
'\s+(?P<name>\w+)'
'\s*\((?P<params>[^\)]*)\)')
def ExtractCalledByNatives(contents):
"""Parses all methods annotated with @CalledByNative.
Args:
contents: the contents of the java file.
Returns:
A list of dict with information about the annotated methods.
TODO(bulach): return a CalledByNative object.
Raises:
ParseError: if unable to parse.
"""
called_by_natives = []
for match in re.finditer(RE_CALLED_BY_NATIVE, contents):
called_by_natives += [CalledByNative(
system_class=False,
unchecked='Unchecked' in match.group('Unchecked'),
static='static' in match.group('prefix'),
java_class_name=match.group('annotation') or '',
return_type=match.group('return_type'),
name=match.group('name'),
params=JniParams.Parse(match.group('params')))]
# Check for any @CalledByNative occurrences that weren't matched.
unmatched_lines = re.sub(RE_CALLED_BY_NATIVE, '', contents).split('\n')
for line1, line2 in zip(unmatched_lines, unmatched_lines[1:]):
if '@CalledByNative' in line1:
raise ParseError('could not parse @CalledByNative method signature',
line1, line2)
return MangleCalledByNatives(called_by_natives)
class JNIFromJavaP(object):
"""Uses 'javap' to parse a .class file and generate the JNI header file."""
def __init__(self, contents, namespace):
self.contents = contents
self.namespace = namespace
self.fully_qualified_class = re.match(
'.*?(class|interface) (?P<class_name>.*?)( |{)',
contents[1]).group('class_name')
self.fully_qualified_class = self.fully_qualified_class.replace('.', '/')
JniParams.SetFullyQualifiedClass(self.fully_qualified_class)
self.java_class_name = self.fully_qualified_class.split('/')[-1]
if not self.namespace:
self.namespace = 'JNI_' + self.java_class_name
re_method = re.compile('(?P<prefix>.*?)(?P<return_type>\S+?) (?P<name>\w+?)'
'\((?P<params>.*?)\)')
self.called_by_natives = []
for content in contents[2:]:
match = re.match(re_method, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static='static' in match.group('prefix'),
java_class_name='',
return_type=match.group('return_type').replace('.', '/'),
name=match.group('name'),
params=JniParams.Parse(match.group('params').replace('.', '/')))]
re_constructor = re.compile('.*? public ' +
self.fully_qualified_class.replace('/', '.') +
'\((?P<params>.*?)\)')
for content in contents[2:]:
match = re.match(re_constructor, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static=False,
java_class_name='',
return_type=self.fully_qualified_class,
name='Constructor',
params=JniParams.Parse(match.group('params').replace('.', '/')),
is_constructor=True)]
self.called_by_natives = MangleCalledByNatives(self.called_by_natives)
self.inl_header_file_generator = InlHeaderFileGenerator(
self.namespace, self.fully_qualified_class, [], self.called_by_natives)
def GetContent(self):
return self.inl_header_file_generator.GetContent()
@staticmethod
def CreateFromClass(class_file, namespace):
class_name = os.path.splitext(os.path.basename(class_file))[0]
p = subprocess.Popen(args=['javap', class_name],
cwd=os.path.dirname(class_file),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate()
jni_from_javap = JNIFromJavaP(stdout.split('\n'), namespace)
return jni_from_javap
class JNIFromJavaSource(object):
"""Uses the given java source file to generate the JNI header file."""
def __init__(self, contents, fully_qualified_class):
contents = self._RemoveComments(contents)
JniParams.SetFullyQualifiedClass(fully_qualified_class)
JniParams.ExtractImportsAndInnerClasses(contents)
jni_namespace = ExtractJNINamespace(contents)
natives = ExtractNatives(contents)
called_by_natives = ExtractCalledByNatives(contents)
if len(natives) == 0 and len(called_by_natives) == 0:
raise SyntaxError('Unable to find any JNI methods for %s.' %
fully_qualified_class)
inl_header_file_generator = InlHeaderFileGenerator(
jni_namespace, fully_qualified_class, natives, called_by_natives)
self.content = inl_header_file_generator.GetContent()
def _RemoveComments(self, contents):
# We need to support both inline and block comments, and we need to handle
# strings that contain '//' or '/*'. Rather than trying to do all that with
# regexps, we just pipe the contents through the C preprocessor. We tell cpp
# the file has already been preprocessed, so it just removes comments and
# doesn't try to parse #include, #pragma etc.
#
# TODO(husky): This is a bit hacky. It would be cleaner to use a real Java
# parser. Maybe we could ditch JNIFromJavaSource and just always use
# JNIFromJavaP; or maybe we could rewrite this script in Java and use APT.
# http://code.google.com/p/chromium/issues/detail?id=138941
p = subprocess.Popen(args=['cpp', '-fpreprocessed'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate(contents)
return stdout
def GetContent(self):
return self.content
@staticmethod
def CreateFromFile(java_file_name):
contents = file(java_file_name).read()
fully_qualified_class = ExtractFullyQualifiedJavaClassName(java_file_name,
contents)
return JNIFromJavaSource(contents, fully_qualified_class)
class InlHeaderFileGenerator(object):
"""Generates an inline header file for JNI integration."""
def __init__(self, namespace, fully_qualified_class, natives,
called_by_natives):
self.namespace = namespace
self.fully_qualified_class = fully_qualified_class
self.class_name = self.fully_qualified_class.split('/')[-1]
self.natives = natives
self.called_by_natives = called_by_natives
self.header_guard = fully_qualified_class.replace('/', '_') + '_JNI'
def GetContent(self):
"""Returns the content of the JNI binding file."""
template = Template("""\
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// For
// ${FULLY_QUALIFIED_CLASS}
#ifndef ${HEADER_GUARD}
#define ${HEADER_GUARD}
#include <jni.h>
#include "base/android/jni_android.h"
#include "base/android/scoped_java_ref.h"
#include "base/basictypes.h"
#include "base/logging.h"
using base::android::ScopedJavaLocalRef;
// Step 1: forward declarations.
namespace {
$CLASS_PATH_DEFINITIONS
} // namespace
$OPEN_NAMESPACE
$FORWARD_DECLARATIONS
// Step 2: method stubs.
$METHOD_STUBS
// Step 3: RegisterNatives.
static bool RegisterNativesImpl(JNIEnv* env) {
$REGISTER_NATIVES_IMPL
return true;
}
$CLOSE_NAMESPACE
#endif // ${HEADER_GUARD}
""")
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
base_index = script_components.index('base')
script_name = os.sep.join(script_components[base_index:])
values = {
'SCRIPT_NAME': script_name,
'FULLY_QUALIFIED_CLASS': self.fully_qualified_class,
'CLASS_PATH_DEFINITIONS': self.GetClassPathDefinitionsString(),
'FORWARD_DECLARATIONS': self.GetForwardDeclarationsString(),
'METHOD_STUBS': self.GetMethodStubsString(),
'OPEN_NAMESPACE': self.GetOpenNamespaceString(),
'REGISTER_NATIVES_IMPL': self.GetRegisterNativesImplString(),
'CLOSE_NAMESPACE': self.GetCloseNamespaceString(),
'HEADER_GUARD': self.header_guard,
}
return WrapOutput(template.substitute(values))
def GetClassPathDefinitionsString(self):
ret = []
ret += [self.GetClassPathDefinitions()]
return '\n'.join(ret)
def GetForwardDeclarationsString(self):
ret = []
for native in self.natives:
if native.type != 'method':
ret += [self.GetForwardDeclaration(native)]
return '\n'.join(ret)
def GetMethodStubsString(self):
ret = []
for native in self.natives:
if native.type == 'method':
ret += [self.GetNativeMethodStub(native)]
for called_by_native in self.called_by_natives:
ret += [self.GetCalledByNativeMethodStub(called_by_native)]
return '\n'.join(ret)
def GetKMethodsString(self, clazz):
ret = []
for native in self.natives:
if (native.java_class_name == clazz or
(not native.java_class_name and clazz == self.class_name)):
ret += [self.GetKMethodArrayEntry(native)]
return '\n'.join(ret)
def GetRegisterNativesImplString(self):
"""Returns the implementation for RegisterNatives."""
template = Template("""\
static const JNINativeMethod kMethods${JAVA_CLASS}[] = {
${KMETHODS}
};
const int kMethods${JAVA_CLASS}Size = arraysize(kMethods${JAVA_CLASS});
if (env->RegisterNatives(g_${JAVA_CLASS}_clazz,
kMethods${JAVA_CLASS},
kMethods${JAVA_CLASS}Size) < 0) {
LOG(ERROR) << "RegisterNatives failed in " << __FILE__;
return false;
}
""")
ret = [self.GetFindClasses()]
all_classes = self.GetUniqueClasses(self.natives)
all_classes[self.class_name] = self.fully_qualified_class
for clazz in all_classes:
kmethods = self.GetKMethodsString(clazz)
if kmethods:
values = {'JAVA_CLASS': clazz,
'KMETHODS': kmethods}
ret += [template.substitute(values)]
if not ret: return ''
return '\n' + '\n'.join(ret)
def GetOpenNamespaceString(self):
if self.namespace:
all_namespaces = ['namespace %s {' % ns
for ns in self.namespace.split('::')]
return '\n'.join(all_namespaces)
return ''
def GetCloseNamespaceString(self):
if self.namespace:
all_namespaces = ['} // namespace %s' % ns
for ns in self.namespace.split('::')]
all_namespaces.reverse()
return '\n'.join(all_namespaces) + '\n'
return ''
def GetJNIFirstParam(self, native):
ret = []
if native.type == 'method':
ret = ['jobject obj']
elif native.type == 'function':
if native.static:
ret = ['jclass clazz']
else:
ret = ['jobject obj']
return ret
def GetParamsInDeclaration(self, native):
"""Returns the params for the stub declaration.
Args:
native: the native dictionary describing the method.
Returns:
A string containing the params.
"""
return ',\n '.join(self.GetJNIFirstParam(native) +
[JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in native.params])
def GetCalledByNativeParamsInDeclaration(self, called_by_native):
return ',\n '.join([JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in called_by_native.params])
def GetForwardDeclaration(self, native):
template = Template("""
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS});
""")
values = {'RETURN': JavaDataTypeToC(native.return_type),
'NAME': native.name,
'PARAMS': self.GetParamsInDeclaration(native)}
return template.substitute(values)
def GetNativeMethodStub(self, native):
"""Returns stubs for native methods."""
template = Template("""\
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {
DCHECK(${PARAM0_NAME}) << "${NAME}";
${P0_TYPE}* native = reinterpret_cast<${P0_TYPE}*>(${PARAM0_NAME});
return native->${NAME}(env, obj${PARAMS_IN_CALL})${POST_CALL};
}
""")
params_for_call = ', '.join(p.name for p in native.params[1:])
if params_for_call:
params_for_call = ', ' + params_for_call
return_type = JavaDataTypeToC(native.return_type)
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
scoped_return_type = 'ScopedJavaLocalRef<' + return_type + '>'
post_call = '.Release()'
else:
scoped_return_type = return_type
post_call = ''
values = {
'RETURN': return_type,
'SCOPED_RETURN': scoped_return_type,
'NAME': native.name,
'PARAMS_IN_DECLARATION': self.GetParamsInDeclaration(native),
'PARAM0_NAME': native.params[0].name,
'P0_TYPE': native.p0_type,
'PARAMS_IN_CALL': params_for_call,
'POST_CALL': post_call
}
return template.substitute(values)
def GetCalledByNativeMethodStub(self, called_by_native):
"""Returns a string."""
function_signature_template = Template("""\
static ${RETURN_TYPE} Java_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}(\
JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION})""")
function_header_template = Template("""\
${FUNCTION_SIGNATURE} {""")
function_header_with_unused_template = Template("""\
${FUNCTION_SIGNATURE} __attribute__ ((unused));
${FUNCTION_SIGNATURE} {""")
template = Template("""
static base::subtle::AtomicWord g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = 0;
${FUNCTION_HEADER}
/* Must call RegisterNativesImpl() */
DCHECK(g_${JAVA_CLASS}_clazz);
jmethodID method_id =
${GET_METHOD_ID_IMPL}
${RETURN_DECLARATION}
${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL},
method_id${PARAMS_IN_CALL})${POST_CALL};
${CHECK_EXCEPTION}
${RETURN_CLAUSE}
}""")
if called_by_native.static or called_by_native.is_constructor:
first_param_in_declaration = ''
first_param_in_call = ('g_%s_clazz' %
(called_by_native.java_class_name or
self.class_name))
else:
first_param_in_declaration = ', jobject obj'
first_param_in_call = 'obj'
params_in_declaration = self.GetCalledByNativeParamsInDeclaration(
called_by_native)
if params_in_declaration:
params_in_declaration = ', ' + params_in_declaration
params_for_call = ', '.join(param.name
for param in called_by_native.params)
if params_for_call:
params_for_call = ', ' + params_for_call
pre_call = ''
post_call = ''
if called_by_native.static_cast:
pre_call = 'static_cast<%s>(' % called_by_native.static_cast
post_call = ')'
check_exception = ''
if not called_by_native.unchecked:
check_exception = 'base::android::CheckException(env);'
return_type = JavaDataTypeToC(called_by_native.return_type)
return_declaration = ''
return_clause = ''
if return_type != 'void':
pre_call = ' ' + pre_call
return_declaration = return_type + ' ret ='
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
return_type = 'ScopedJavaLocalRef<' + return_type + '>'
return_clause = 'return ' + return_type + '(env, ret);'
else:
return_clause = 'return ret;'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'METHOD': called_by_native.name,
'RETURN_TYPE': return_type,
'RETURN_DECLARATION': return_declaration,
'RETURN_CLAUSE': return_clause,
'FIRST_PARAM_IN_DECLARATION': first_param_in_declaration,
'PARAMS_IN_DECLARATION': params_in_declaration,
'STATIC': 'Static' if called_by_native.static else '',
'PRE_CALL': pre_call,
'POST_CALL': post_call,
'ENV_CALL': called_by_native.env_call,
'FIRST_PARAM_IN_CALL': first_param_in_call,
'PARAMS_IN_CALL': params_for_call,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'CHECK_EXCEPTION': check_exception,
'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native)
}
values['FUNCTION_SIGNATURE'] = (
function_signature_template.substitute(values))
if called_by_native.system_class:
values['FUNCTION_HEADER'] = (
function_header_with_unused_template.substitute(values))
else:
values['FUNCTION_HEADER'] = function_header_template.substitute(values)
return template.substitute(values)
def GetKMethodArrayEntry(self, native):
template = Template("""\
{ "native${NAME}", ${JNI_SIGNATURE}, reinterpret_cast<void*>(${NAME}) },""")
values = {'NAME': native.name,
'JNI_SIGNATURE': JniParams.Signature(native.params,
native.return_type,
True)}
return template.substitute(values)
def GetUniqueClasses(self, origin):
ret = {self.class_name: self.fully_qualified_class}
for entry in origin:
class_name = self.class_name
jni_class_path = self.fully_qualified_class
if entry.java_class_name:
class_name = entry.java_class_name
jni_class_path = self.fully_qualified_class + '$' + class_name
ret[class_name] = jni_class_path
return ret
def GetClassPathDefinitions(self):
"""Returns the ClassPath constants."""
ret = []
template = Template("""\
const char k${JAVA_CLASS}ClassPath[] = "${JNI_CLASS_PATH}";""")
native_classes = self.GetUniqueClasses(self.natives)
called_by_native_classes = self.GetUniqueClasses(self.called_by_natives)
all_classes = native_classes
all_classes.update(called_by_native_classes)
for clazz in all_classes:
values = {
'JAVA_CLASS': clazz,
'JNI_CLASS_PATH': all_classes[clazz],
}
ret += [template.substitute(values)]
ret += ''
for clazz in called_by_native_classes:
template = Template("""\
// Leaking this jclass as we cannot use LazyInstance from some threads.
jclass g_${JAVA_CLASS}_clazz = NULL;""")
values = {
'JAVA_CLASS': clazz,
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetFindClasses(self):
"""Returns the imlementation of FindClass for all known classes."""
template = Template("""\
g_${JAVA_CLASS}_clazz = reinterpret_cast<jclass>(env->NewGlobalRef(
base::android::GetClass(env, k${JAVA_CLASS}ClassPath).obj()));""")
ret = []
for clazz in self.GetUniqueClasses(self.called_by_natives):
values = {'JAVA_CLASS': clazz}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetMethodIDImpl(self, called_by_native):
"""Returns the implementation of GetMethodID."""
template = Template("""\
base::android::MethodID::LazyGet<
base::android::MethodID::TYPE_${STATIC}>(
env, g_${JAVA_CLASS}_clazz,
"${JNI_NAME}",
${JNI_SIGNATURE},
&g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME});
""")
jni_name = called_by_native.name
jni_return_type = called_by_native.return_type
if called_by_native.is_constructor:
jni_name = '<init>'
jni_return_type = 'void'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'JNI_NAME': jni_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'STATIC': 'STATIC' if called_by_native.static else 'INSTANCE',
'JNI_SIGNATURE': JniParams.Signature(called_by_native.params,
jni_return_type,
True)
}
return template.substitute(values)
def WrapOutput(output):
ret = []
for line in output.splitlines():
# Do not wrap lines under 80 characters or preprocessor directives.
if len(line) < 80 or line.lstrip()[:1] == '#':
stripped = line.rstrip()
if len(ret) == 0 or len(ret[-1]) or len(stripped):
ret.append(stripped)
else:
first_line_indent = ' ' * (len(line) - len(line.lstrip()))
subsequent_indent = first_line_indent + ' ' * 4
if line.startswith('//'):
subsequent_indent = '//' + subsequent_indent
wrapper = textwrap.TextWrapper(width=80,
subsequent_indent=subsequent_indent,
break_long_words=False)
ret += [wrapped.rstrip() for wrapped in wrapper.wrap(line)]
ret += ['']
return '\n'.join(ret)
def ExtractJarInputFile(jar_file, input_file, out_dir):
"""Extracts input file from jar and returns the filename.
The input file is extracted to the same directory that the generated jni
headers will be placed in. This is passed as an argument to script.
Args:
jar_file: the jar file containing the input files to extract.
input_files: the list of files to extract from the jar file.
out_dir: the name of the directories to extract to.
Returns:
the name of extracted input file.
"""
jar_file = zipfile.ZipFile(jar_file)
out_dir = os.path.join(out_dir, os.path.dirname(input_file))
try:
os.makedirs(out_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
extracted_file_name = os.path.join(out_dir, os.path.basename(input_file))
with open(extracted_file_name, 'w') as outfile:
outfile.write(jar_file.read(input_file))
return extracted_file_name
def GenerateJNIHeader(input_file, output_file, namespace, skip_if_same):
try:
if os.path.splitext(input_file)[1] == '.class':
jni_from_javap = JNIFromJavaP.CreateFromClass(input_file, namespace)
content = jni_from_javap.GetContent()
else:
jni_from_java_source = JNIFromJavaSource.CreateFromFile(input_file)
content = jni_from_java_source.GetContent()
except ParseError, e:
print e
sys.exit(1)
if output_file:
if not os.path.exists(os.path.dirname(os.path.abspath(output_file))):
os.makedirs(os.path.dirname(os.path.abspath(output_file)))
if skip_if_same and os.path.exists(output_file):
with file(output_file, 'r') as f:
existing_content = f.read()
if existing_content == content:
return
with file(output_file, 'w') as f:
f.write(content)
else:
print output
def main(argv):
usage = """usage: %prog [OPTIONS]
This script will parse the given java source code extracting the native
declarations and print the header file to stdout (or a file).
See SampleForTests.java for more details.
"""
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option('-j', dest='jar_file',
help='Extract the list of input files from'
' a specified jar file.'
' Uses javap to extract the methods from a'
' pre-compiled class. --input should point'
' to pre-compiled Java .class files.')
option_parser.add_option('-n', dest='namespace',
help='Uses as a namespace in the generated header,'
' instead of the javap class name.')
option_parser.add_option('--input_file',
help='Single input file name. The output file name '
'will be derived from it. Must be used with '
'--output_dir.')
option_parser.add_option('--output_dir',
help='The output directory. Must be used with '
'--input')
option_parser.add_option('--optimize_generation', type="int",
default=0, help='Whether we should optimize JNI '
'generation by not regenerating files if they have '
'not changed.')
options, args = option_parser.parse_args(argv)
if options.jar_file:
input_file = ExtractJarInputFile(options.jar_file, options.input_file,
options.output_dir)
else:
input_file = options.input_file
output_file = None
if options.output_dir:
root_name = os.path.splitext(os.path.basename(input_file))[0]
output_file = os.path.join(options.output_dir, root_name) + '_jni.h'
GenerateJNIHeader(input_file, output_file, options.namespace,
options.optimize_generation)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| 36.256534 | 80 | 0.637893 |
"""Extracts native methods from a Java file and generates the JNI bindings.
If you change this, please run and update the tests."""
import collections
import errno
import optparse
import os
import re
import string
from string import Template
import subprocess
import sys
import textwrap
import zipfile
class ParseError(Exception):
"""Exception thrown when we can't parse the input file."""
def __init__(self, description, *context_lines):
Exception.__init__(self)
self.description = description
self.context_lines = context_lines
def __str__(self):
context = '\n'.join(self.context_lines)
return '***\nERROR: %s\n\n%s\n***' % (self.description, context)
class Param(object):
"""Describes a param for a method, either java or native."""
def __init__(self, **kwargs):
self.datatype = kwargs['datatype']
self.name = kwargs['name']
class NativeMethod(object):
"""Describes a C/C++ method that is called by Java code"""
def __init__(self, **kwargs):
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
if self.params:
assert type(self.params) is list
assert type(self.params[0]) is Param
if (self.params and
self.params[0].datatype == 'int' and
self.params[0].name.startswith('native')):
self.type = 'method'
self.p0_type = self.params[0].name[len('native'):]
if kwargs.get('native_class_name'):
self.p0_type = kwargs['native_class_name']
else:
self.type = 'function'
self.method_id_var_name = kwargs.get('method_id_var_name', None)
class CalledByNative(object):
"""Describes a java method exported to c/c++"""
def __init__(self, **kwargs):
self.system_class = kwargs['system_class']
self.unchecked = kwargs['unchecked']
self.static = kwargs['static']
self.java_class_name = kwargs['java_class_name']
self.return_type = kwargs['return_type']
self.name = kwargs['name']
self.params = kwargs['params']
self.method_id_var_name = kwargs.get('method_id_var_name', None)
self.is_constructor = kwargs.get('is_constructor', False)
self.env_call = GetEnvCall(self.is_constructor, self.static,
self.return_type)
self.static_cast = GetStaticCastForReturnType(self.return_type)
def JavaDataTypeToC(java_type):
"""Returns a C datatype for the given java type."""
java_pod_type_map = {
'int': 'jint',
'byte': 'jbyte',
'char': 'jchar',
'short': 'jshort',
'boolean': 'jboolean',
'long': 'jlong',
'double': 'jdouble',
'float': 'jfloat',
}
java_type_map = {
'void': 'void',
'String': 'jstring',
'java/lang/String': 'jstring',
'Class': 'jclass',
'java/lang/Class': 'jclass',
}
if java_type in java_pod_type_map:
return java_pod_type_map[java_type]
elif java_type in java_type_map:
return java_type_map[java_type]
elif java_type.endswith('[]'):
if java_type[:-2] in java_pod_type_map:
return java_pod_type_map[java_type[:-2]] + 'Array'
return 'jobjectArray'
else:
return 'jobject'
class JniParams(object):
_imports = []
_fully_qualified_class = ''
_package = ''
_inner_classes = []
@staticmethod
def SetFullyQualifiedClass(fully_qualified_class):
JniParams._fully_qualified_class = 'L' + fully_qualified_class
JniParams._package = '/'.join(fully_qualified_class.split('/')[:-1])
@staticmethod
def ExtractImportsAndInnerClasses(contents):
contents = contents.replace('\n', '')
re_import = re.compile(r'import.*?(?P<class>\S*?);')
for match in re.finditer(re_import, contents):
JniParams._imports += ['L' + match.group('class').replace('.', '/')]
re_inner = re.compile(r'(class|interface)\s+?(?P<name>\w+?)\W')
for match in re.finditer(re_inner, contents):
inner = match.group('name')
if not JniParams._fully_qualified_class.endswith(inner):
JniParams._inner_classes += [JniParams._fully_qualified_class + '$' +
inner]
@staticmethod
def JavaToJni(param):
"""Converts a java param into a JNI signature type."""
pod_param_map = {
'int': 'I',
'boolean': 'Z',
'char': 'C',
'short': 'S',
'long': 'J',
'double': 'D',
'float': 'F',
'byte': 'B',
'void': 'V',
}
object_param_list = [
'Ljava/lang/Boolean',
'Ljava/lang/Integer',
'Ljava/lang/Long',
'Ljava/lang/Object',
'Ljava/lang/String',
'Ljava/lang/Class',
]
prefix = ''
# Array?
while param[-2:] == '[]':
prefix += '['
param = param[:-2]
# Generic?
if '<' in param:
param = param[:param.index('<')]
if param in pod_param_map:
return prefix + pod_param_map[param]
if '/' in param:
# Coming from javap, use the fully qualified param directly.
return prefix + 'L' + param + ';'
for qualified_name in (object_param_list +
[JniParams._fully_qualified_class] +
JniParams._inner_classes):
if (qualified_name.endswith('/' + param) or
qualified_name.endswith('$' + param.replace('.', '$')) or
qualified_name == 'L' + param):
return prefix + qualified_name + ';'
# Is it from an import? (e.g. referecing Class from import pkg.Class;
# note that referencing an inner class Inner from import pkg.Class.Inner
# is not supported).
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + param):
# Ensure it's not an inner class.
components = qualified_name.split('/')
if len(components) > 2 and components[-2][0].isupper():
raise SyntaxError('Inner class (%s) can not be imported '
'and used by JNI (%s). Please import the outer '
'class and use Outer.Inner instead.' %
(qualified_name, param))
return prefix + qualified_name + ';'
if '.' in param:
components = param.split('.')
outer = '/'.join(components[:-1])
inner = components[-1]
for qualified_name in JniParams._imports:
if qualified_name.endswith('/' + outer):
return prefix + qualified_name + '$' + inner + ';'
return prefix + 'L' + JniParams._package + '/' + param + ';'
@staticmethod
def Signature(params, returns, wrap):
"""Returns the JNI signature for the given datatypes."""
items = ['(']
items += [JniParams.JavaToJni(param.datatype) for param in params]
items += [')']
items += [JniParams.JavaToJni(returns)]
if wrap:
return '\n' + '\n'.join(['"' + item + '"' for item in items])
else:
return '"' + ''.join(items) + '"'
@staticmethod
def Parse(params):
"""Parses the params into a list of Param objects."""
if not params:
return []
ret = []
for p in [p.strip() for p in params.split(',')]:
items = p.split(' ')
if 'final' in items:
items.remove('final')
param = Param(
datatype=items[0],
name=(items[1] if len(items) > 1 else 'p%s' % len(ret)),
)
ret += [param]
return ret
def ExtractJNINamespace(contents):
re_jni_namespace = re.compile('.*?@JNINamespace\("(.*?)"\)')
m = re.findall(re_jni_namespace, contents)
if not m:
return ''
return m[0]
def ExtractFullyQualifiedJavaClassName(java_file_name, contents):
re_package = re.compile('.*?package (.*?);')
matches = re.findall(re_package, contents)
if not matches:
raise SyntaxError('Unable to find "package" line in %s' % java_file_name)
return (matches[0].replace('.', '/') + '/' +
os.path.splitext(os.path.basename(java_file_name))[0])
def ExtractNatives(contents):
"""Returns a list of dict containing information about a native method."""
contents = contents.replace('\n', '')
natives = []
re_native = re.compile(r'(@NativeClassQualifiedName'
'\(\"(?P<native_class_name>.*?)\"\))?\s*'
'(@NativeCall(\(\"(?P<java_class_name>.*?)\"\)))?\s*'
'(?P<qualifiers>\w+\s\w+|\w+|\s+)\s*?native '
'(?P<return_type>\S*?) '
'(?P<name>\w+?)\((?P<params>.*?)\);')
for match in re.finditer(re_native, contents):
native = NativeMethod(
static='static' in match.group('qualifiers'),
java_class_name=match.group('java_class_name'),
native_class_name=match.group('native_class_name'),
return_type=match.group('return_type'),
name=match.group('name').replace('native', ''),
params=JniParams.Parse(match.group('params')))
natives += [native]
return natives
def GetStaticCastForReturnType(return_type):
type_map = { 'String' : 'jstring',
'java/lang/String' : 'jstring',
'boolean[]': 'jbooleanArray',
'byte[]': 'jbyteArray',
'char[]': 'jcharArray',
'short[]': 'jshortArray',
'int[]': 'jintArray',
'long[]': 'jlongArray',
'double[]': 'jdoubleArray' }
ret = type_map.get(return_type, None)
if ret:
return ret
if return_type.endswith('[]'):
return 'jobjectArray'
return None
def GetEnvCall(is_constructor, is_static, return_type):
"""Maps the types availabe via env->Call__Method."""
if is_constructor:
return 'NewObject'
env_call_map = {'boolean': 'Boolean',
'byte': 'Byte',
'char': 'Char',
'short': 'Short',
'int': 'Int',
'long': 'Long',
'float': 'Float',
'void': 'Void',
'double': 'Double',
'Object': 'Object',
}
call = env_call_map.get(return_type, 'Object')
if is_static:
call = 'Static' + call
return 'Call' + call + 'Method'
def GetMangledParam(datatype):
"""Returns a mangled identifier for the datatype."""
if len(datatype) <= 2:
return datatype.replace('[', 'A')
ret = ''
for i in range(1, len(datatype)):
c = datatype[i]
if c == '[':
ret += 'A'
elif c.isupper() or datatype[i - 1] in ['/', 'L']:
ret += c.upper()
return ret
def GetMangledMethodName(name, params, return_type):
"""Returns a mangled method name for the given signature.
The returned name can be used as a C identifier and will be unique for all
valid overloads of the same method.
Args:
name: string.
params: list of Param.
return_type: string.
Returns:
A mangled name.
"""
mangled_items = []
for datatype in [return_type] + [x.datatype for x in params]:
mangled_items += [GetMangledParam(JniParams.JavaToJni(datatype))]
mangled_name = name + '_'.join(mangled_items)
assert re.match(r'[0-9a-zA-Z_]+', mangled_name)
return mangled_name
def MangleCalledByNatives(called_by_natives):
"""Mangles all the overloads from the call_by_natives list."""
method_counts = collections.defaultdict(
lambda: collections.defaultdict(lambda: 0))
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
name = called_by_native.name
method_counts[java_class_name][name] += 1
for called_by_native in called_by_natives:
java_class_name = called_by_native.java_class_name
method_name = called_by_native.name
method_id_var_name = method_name
if method_counts[java_class_name][method_name] > 1:
method_id_var_name = GetMangledMethodName(method_name,
called_by_native.params,
called_by_native.return_type)
called_by_native.method_id_var_name = method_id_var_name
return called_by_natives
RE_SCOPED_JNI_RETURN_TYPES = re.compile('jobject|jclass|jstring|.*Array')
RE_CALLED_BY_NATIVE = re.compile(
'@CalledByNative(?P<Unchecked>(Unchecked)*?)(?:\("(?P<annotation>.*)"\))?'
'\s+(?P<prefix>[\w ]*?)'
'\s*(?P<return_type>\S+?)'
'\s+(?P<name>\w+)'
'\s*\((?P<params>[^\)]*)\)')
def ExtractCalledByNatives(contents):
"""Parses all methods annotated with @CalledByNative.
Args:
contents: the contents of the java file.
Returns:
A list of dict with information about the annotated methods.
TODO(bulach): return a CalledByNative object.
Raises:
ParseError: if unable to parse.
"""
called_by_natives = []
for match in re.finditer(RE_CALLED_BY_NATIVE, contents):
called_by_natives += [CalledByNative(
system_class=False,
unchecked='Unchecked' in match.group('Unchecked'),
static='static' in match.group('prefix'),
java_class_name=match.group('annotation') or '',
return_type=match.group('return_type'),
name=match.group('name'),
params=JniParams.Parse(match.group('params')))]
unmatched_lines = re.sub(RE_CALLED_BY_NATIVE, '', contents).split('\n')
for line1, line2 in zip(unmatched_lines, unmatched_lines[1:]):
if '@CalledByNative' in line1:
raise ParseError('could not parse @CalledByNative method signature',
line1, line2)
return MangleCalledByNatives(called_by_natives)
class JNIFromJavaP(object):
"""Uses 'javap' to parse a .class file and generate the JNI header file."""
def __init__(self, contents, namespace):
self.contents = contents
self.namespace = namespace
self.fully_qualified_class = re.match(
'.*?(class|interface) (?P<class_name>.*?)( |{)',
contents[1]).group('class_name')
self.fully_qualified_class = self.fully_qualified_class.replace('.', '/')
JniParams.SetFullyQualifiedClass(self.fully_qualified_class)
self.java_class_name = self.fully_qualified_class.split('/')[-1]
if not self.namespace:
self.namespace = 'JNI_' + self.java_class_name
re_method = re.compile('(?P<prefix>.*?)(?P<return_type>\S+?) (?P<name>\w+?)'
'\((?P<params>.*?)\)')
self.called_by_natives = []
for content in contents[2:]:
match = re.match(re_method, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static='static' in match.group('prefix'),
java_class_name='',
return_type=match.group('return_type').replace('.', '/'),
name=match.group('name'),
params=JniParams.Parse(match.group('params').replace('.', '/')))]
re_constructor = re.compile('.*? public ' +
self.fully_qualified_class.replace('/', '.') +
'\((?P<params>.*?)\)')
for content in contents[2:]:
match = re.match(re_constructor, content)
if not match:
continue
self.called_by_natives += [CalledByNative(
system_class=True,
unchecked=False,
static=False,
java_class_name='',
return_type=self.fully_qualified_class,
name='Constructor',
params=JniParams.Parse(match.group('params').replace('.', '/')),
is_constructor=True)]
self.called_by_natives = MangleCalledByNatives(self.called_by_natives)
self.inl_header_file_generator = InlHeaderFileGenerator(
self.namespace, self.fully_qualified_class, [], self.called_by_natives)
def GetContent(self):
return self.inl_header_file_generator.GetContent()
@staticmethod
def CreateFromClass(class_file, namespace):
class_name = os.path.splitext(os.path.basename(class_file))[0]
p = subprocess.Popen(args=['javap', class_name],
cwd=os.path.dirname(class_file),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate()
jni_from_javap = JNIFromJavaP(stdout.split('\n'), namespace)
return jni_from_javap
class JNIFromJavaSource(object):
"""Uses the given java source file to generate the JNI header file."""
def __init__(self, contents, fully_qualified_class):
contents = self._RemoveComments(contents)
JniParams.SetFullyQualifiedClass(fully_qualified_class)
JniParams.ExtractImportsAndInnerClasses(contents)
jni_namespace = ExtractJNINamespace(contents)
natives = ExtractNatives(contents)
called_by_natives = ExtractCalledByNatives(contents)
if len(natives) == 0 and len(called_by_natives) == 0:
raise SyntaxError('Unable to find any JNI methods for %s.' %
fully_qualified_class)
inl_header_file_generator = InlHeaderFileGenerator(
jni_namespace, fully_qualified_class, natives, called_by_natives)
self.content = inl_header_file_generator.GetContent()
def _RemoveComments(self, contents):
# We need to support both inline and block comments, and we need to handle
# strings that contain '//' or '/*'. Rather than trying to do all that with
# regexps, we just pipe the contents through the C preprocessor. We tell cpp
# the file has already been preprocessed, so it just removes comments and
# doesn't try to parse
p = subprocess.Popen(args=['cpp', '-fpreprocessed'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = p.communicate(contents)
return stdout
def GetContent(self):
return self.content
@staticmethod
def CreateFromFile(java_file_name):
contents = file(java_file_name).read()
fully_qualified_class = ExtractFullyQualifiedJavaClassName(java_file_name,
contents)
return JNIFromJavaSource(contents, fully_qualified_class)
class InlHeaderFileGenerator(object):
"""Generates an inline header file for JNI integration."""
def __init__(self, namespace, fully_qualified_class, natives,
called_by_natives):
self.namespace = namespace
self.fully_qualified_class = fully_qualified_class
self.class_name = self.fully_qualified_class.split('/')[-1]
self.natives = natives
self.called_by_natives = called_by_natives
self.header_guard = fully_qualified_class.replace('/', '_') + '_JNI'
def GetContent(self):
"""Returns the content of the JNI binding file."""
template = Template("""\
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// This file is autogenerated by
// ${SCRIPT_NAME}
// For
// ${FULLY_QUALIFIED_CLASS}
#ifndef ${HEADER_GUARD}
#define ${HEADER_GUARD}
#include <jni.h>
#include "base/android/jni_android.h"
#include "base/android/scoped_java_ref.h"
#include "base/basictypes.h"
#include "base/logging.h"
using base::android::ScopedJavaLocalRef;
// Step 1: forward declarations.
namespace {
$CLASS_PATH_DEFINITIONS
} // namespace
$OPEN_NAMESPACE
$FORWARD_DECLARATIONS
// Step 2: method stubs.
$METHOD_STUBS
// Step 3: RegisterNatives.
static bool RegisterNativesImpl(JNIEnv* env) {
$REGISTER_NATIVES_IMPL
return true;
}
$CLOSE_NAMESPACE
#endif // ${HEADER_GUARD}
""")
script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
base_index = script_components.index('base')
script_name = os.sep.join(script_components[base_index:])
values = {
'SCRIPT_NAME': script_name,
'FULLY_QUALIFIED_CLASS': self.fully_qualified_class,
'CLASS_PATH_DEFINITIONS': self.GetClassPathDefinitionsString(),
'FORWARD_DECLARATIONS': self.GetForwardDeclarationsString(),
'METHOD_STUBS': self.GetMethodStubsString(),
'OPEN_NAMESPACE': self.GetOpenNamespaceString(),
'REGISTER_NATIVES_IMPL': self.GetRegisterNativesImplString(),
'CLOSE_NAMESPACE': self.GetCloseNamespaceString(),
'HEADER_GUARD': self.header_guard,
}
return WrapOutput(template.substitute(values))
def GetClassPathDefinitionsString(self):
ret = []
ret += [self.GetClassPathDefinitions()]
return '\n'.join(ret)
def GetForwardDeclarationsString(self):
ret = []
for native in self.natives:
if native.type != 'method':
ret += [self.GetForwardDeclaration(native)]
return '\n'.join(ret)
def GetMethodStubsString(self):
ret = []
for native in self.natives:
if native.type == 'method':
ret += [self.GetNativeMethodStub(native)]
for called_by_native in self.called_by_natives:
ret += [self.GetCalledByNativeMethodStub(called_by_native)]
return '\n'.join(ret)
def GetKMethodsString(self, clazz):
ret = []
for native in self.natives:
if (native.java_class_name == clazz or
(not native.java_class_name and clazz == self.class_name)):
ret += [self.GetKMethodArrayEntry(native)]
return '\n'.join(ret)
def GetRegisterNativesImplString(self):
"""Returns the implementation for RegisterNatives."""
template = Template("""\
static const JNINativeMethod kMethods${JAVA_CLASS}[] = {
${KMETHODS}
};
const int kMethods${JAVA_CLASS}Size = arraysize(kMethods${JAVA_CLASS});
if (env->RegisterNatives(g_${JAVA_CLASS}_clazz,
kMethods${JAVA_CLASS},
kMethods${JAVA_CLASS}Size) < 0) {
LOG(ERROR) << "RegisterNatives failed in " << __FILE__;
return false;
}
""")
ret = [self.GetFindClasses()]
all_classes = self.GetUniqueClasses(self.natives)
all_classes[self.class_name] = self.fully_qualified_class
for clazz in all_classes:
kmethods = self.GetKMethodsString(clazz)
if kmethods:
values = {'JAVA_CLASS': clazz,
'KMETHODS': kmethods}
ret += [template.substitute(values)]
if not ret: return ''
return '\n' + '\n'.join(ret)
def GetOpenNamespaceString(self):
if self.namespace:
all_namespaces = ['namespace %s {' % ns
for ns in self.namespace.split('::')]
return '\n'.join(all_namespaces)
return ''
def GetCloseNamespaceString(self):
if self.namespace:
all_namespaces = ['} // namespace %s' % ns
for ns in self.namespace.split('::')]
all_namespaces.reverse()
return '\n'.join(all_namespaces) + '\n'
return ''
def GetJNIFirstParam(self, native):
ret = []
if native.type == 'method':
ret = ['jobject obj']
elif native.type == 'function':
if native.static:
ret = ['jclass clazz']
else:
ret = ['jobject obj']
return ret
def GetParamsInDeclaration(self, native):
"""Returns the params for the stub declaration.
Args:
native: the native dictionary describing the method.
Returns:
A string containing the params.
"""
return ',\n '.join(self.GetJNIFirstParam(native) +
[JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in native.params])
def GetCalledByNativeParamsInDeclaration(self, called_by_native):
return ',\n '.join([JavaDataTypeToC(param.datatype) + ' ' +
param.name
for param in called_by_native.params])
def GetForwardDeclaration(self, native):
template = Template("""
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS});
""")
values = {'RETURN': JavaDataTypeToC(native.return_type),
'NAME': native.name,
'PARAMS': self.GetParamsInDeclaration(native)}
return template.substitute(values)
def GetNativeMethodStub(self, native):
"""Returns stubs for native methods."""
template = Template("""\
static ${RETURN} ${NAME}(JNIEnv* env, ${PARAMS_IN_DECLARATION}) {
DCHECK(${PARAM0_NAME}) << "${NAME}";
${P0_TYPE}* native = reinterpret_cast<${P0_TYPE}*>(${PARAM0_NAME});
return native->${NAME}(env, obj${PARAMS_IN_CALL})${POST_CALL};
}
""")
params_for_call = ', '.join(p.name for p in native.params[1:])
if params_for_call:
params_for_call = ', ' + params_for_call
return_type = JavaDataTypeToC(native.return_type)
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
scoped_return_type = 'ScopedJavaLocalRef<' + return_type + '>'
post_call = '.Release()'
else:
scoped_return_type = return_type
post_call = ''
values = {
'RETURN': return_type,
'SCOPED_RETURN': scoped_return_type,
'NAME': native.name,
'PARAMS_IN_DECLARATION': self.GetParamsInDeclaration(native),
'PARAM0_NAME': native.params[0].name,
'P0_TYPE': native.p0_type,
'PARAMS_IN_CALL': params_for_call,
'POST_CALL': post_call
}
return template.substitute(values)
def GetCalledByNativeMethodStub(self, called_by_native):
"""Returns a string."""
function_signature_template = Template("""\
static ${RETURN_TYPE} Java_${JAVA_CLASS}_${METHOD_ID_VAR_NAME}(\
JNIEnv* env${FIRST_PARAM_IN_DECLARATION}${PARAMS_IN_DECLARATION})""")
function_header_template = Template("""\
${FUNCTION_SIGNATURE} {""")
function_header_with_unused_template = Template("""\
${FUNCTION_SIGNATURE} __attribute__ ((unused));
${FUNCTION_SIGNATURE} {""")
template = Template("""
static base::subtle::AtomicWord g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME} = 0;
${FUNCTION_HEADER}
/* Must call RegisterNativesImpl() */
DCHECK(g_${JAVA_CLASS}_clazz);
jmethodID method_id =
${GET_METHOD_ID_IMPL}
${RETURN_DECLARATION}
${PRE_CALL}env->${ENV_CALL}(${FIRST_PARAM_IN_CALL},
method_id${PARAMS_IN_CALL})${POST_CALL};
${CHECK_EXCEPTION}
${RETURN_CLAUSE}
}""")
if called_by_native.static or called_by_native.is_constructor:
first_param_in_declaration = ''
first_param_in_call = ('g_%s_clazz' %
(called_by_native.java_class_name or
self.class_name))
else:
first_param_in_declaration = ', jobject obj'
first_param_in_call = 'obj'
params_in_declaration = self.GetCalledByNativeParamsInDeclaration(
called_by_native)
if params_in_declaration:
params_in_declaration = ', ' + params_in_declaration
params_for_call = ', '.join(param.name
for param in called_by_native.params)
if params_for_call:
params_for_call = ', ' + params_for_call
pre_call = ''
post_call = ''
if called_by_native.static_cast:
pre_call = 'static_cast<%s>(' % called_by_native.static_cast
post_call = ')'
check_exception = ''
if not called_by_native.unchecked:
check_exception = 'base::android::CheckException(env);'
return_type = JavaDataTypeToC(called_by_native.return_type)
return_declaration = ''
return_clause = ''
if return_type != 'void':
pre_call = ' ' + pre_call
return_declaration = return_type + ' ret ='
if re.match(RE_SCOPED_JNI_RETURN_TYPES, return_type):
return_type = 'ScopedJavaLocalRef<' + return_type + '>'
return_clause = 'return ' + return_type + '(env, ret);'
else:
return_clause = 'return ret;'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'METHOD': called_by_native.name,
'RETURN_TYPE': return_type,
'RETURN_DECLARATION': return_declaration,
'RETURN_CLAUSE': return_clause,
'FIRST_PARAM_IN_DECLARATION': first_param_in_declaration,
'PARAMS_IN_DECLARATION': params_in_declaration,
'STATIC': 'Static' if called_by_native.static else '',
'PRE_CALL': pre_call,
'POST_CALL': post_call,
'ENV_CALL': called_by_native.env_call,
'FIRST_PARAM_IN_CALL': first_param_in_call,
'PARAMS_IN_CALL': params_for_call,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'CHECK_EXCEPTION': check_exception,
'GET_METHOD_ID_IMPL': self.GetMethodIDImpl(called_by_native)
}
values['FUNCTION_SIGNATURE'] = (
function_signature_template.substitute(values))
if called_by_native.system_class:
values['FUNCTION_HEADER'] = (
function_header_with_unused_template.substitute(values))
else:
values['FUNCTION_HEADER'] = function_header_template.substitute(values)
return template.substitute(values)
def GetKMethodArrayEntry(self, native):
template = Template("""\
{ "native${NAME}", ${JNI_SIGNATURE}, reinterpret_cast<void*>(${NAME}) },""")
values = {'NAME': native.name,
'JNI_SIGNATURE': JniParams.Signature(native.params,
native.return_type,
True)}
return template.substitute(values)
def GetUniqueClasses(self, origin):
ret = {self.class_name: self.fully_qualified_class}
for entry in origin:
class_name = self.class_name
jni_class_path = self.fully_qualified_class
if entry.java_class_name:
class_name = entry.java_class_name
jni_class_path = self.fully_qualified_class + '$' + class_name
ret[class_name] = jni_class_path
return ret
def GetClassPathDefinitions(self):
"""Returns the ClassPath constants."""
ret = []
template = Template("""\
const char k${JAVA_CLASS}ClassPath[] = "${JNI_CLASS_PATH}";""")
native_classes = self.GetUniqueClasses(self.natives)
called_by_native_classes = self.GetUniqueClasses(self.called_by_natives)
all_classes = native_classes
all_classes.update(called_by_native_classes)
for clazz in all_classes:
values = {
'JAVA_CLASS': clazz,
'JNI_CLASS_PATH': all_classes[clazz],
}
ret += [template.substitute(values)]
ret += ''
for clazz in called_by_native_classes:
template = Template("""\
// Leaking this jclass as we cannot use LazyInstance from some threads.
jclass g_${JAVA_CLASS}_clazz = NULL;""")
values = {
'JAVA_CLASS': clazz,
}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetFindClasses(self):
"""Returns the imlementation of FindClass for all known classes."""
template = Template("""\
g_${JAVA_CLASS}_clazz = reinterpret_cast<jclass>(env->NewGlobalRef(
base::android::GetClass(env, k${JAVA_CLASS}ClassPath).obj()));""")
ret = []
for clazz in self.GetUniqueClasses(self.called_by_natives):
values = {'JAVA_CLASS': clazz}
ret += [template.substitute(values)]
return '\n'.join(ret)
def GetMethodIDImpl(self, called_by_native):
"""Returns the implementation of GetMethodID."""
template = Template("""\
base::android::MethodID::LazyGet<
base::android::MethodID::TYPE_${STATIC}>(
env, g_${JAVA_CLASS}_clazz,
"${JNI_NAME}",
${JNI_SIGNATURE},
&g_${JAVA_CLASS}_${METHOD_ID_VAR_NAME});
""")
jni_name = called_by_native.name
jni_return_type = called_by_native.return_type
if called_by_native.is_constructor:
jni_name = '<init>'
jni_return_type = 'void'
values = {
'JAVA_CLASS': called_by_native.java_class_name or self.class_name,
'JNI_NAME': jni_name,
'METHOD_ID_VAR_NAME': called_by_native.method_id_var_name,
'STATIC': 'STATIC' if called_by_native.static else 'INSTANCE',
'JNI_SIGNATURE': JniParams.Signature(called_by_native.params,
jni_return_type,
True)
}
return template.substitute(values)
def WrapOutput(output):
ret = []
for line in output.splitlines():
if len(line) < 80 or line.lstrip()[:1] == '#':
stripped = line.rstrip()
if len(ret) == 0 or len(ret[-1]) or len(stripped):
ret.append(stripped)
else:
first_line_indent = ' ' * (len(line) - len(line.lstrip()))
subsequent_indent = first_line_indent + ' ' * 4
if line.startswith('//'):
subsequent_indent = '//' + subsequent_indent
wrapper = textwrap.TextWrapper(width=80,
subsequent_indent=subsequent_indent,
break_long_words=False)
ret += [wrapped.rstrip() for wrapped in wrapper.wrap(line)]
ret += ['']
return '\n'.join(ret)
def ExtractJarInputFile(jar_file, input_file, out_dir):
"""Extracts input file from jar and returns the filename.
The input file is extracted to the same directory that the generated jni
headers will be placed in. This is passed as an argument to script.
Args:
jar_file: the jar file containing the input files to extract.
input_files: the list of files to extract from the jar file.
out_dir: the name of the directories to extract to.
Returns:
the name of extracted input file.
"""
jar_file = zipfile.ZipFile(jar_file)
out_dir = os.path.join(out_dir, os.path.dirname(input_file))
try:
os.makedirs(out_dir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
extracted_file_name = os.path.join(out_dir, os.path.basename(input_file))
with open(extracted_file_name, 'w') as outfile:
outfile.write(jar_file.read(input_file))
return extracted_file_name
def GenerateJNIHeader(input_file, output_file, namespace, skip_if_same):
try:
if os.path.splitext(input_file)[1] == '.class':
jni_from_javap = JNIFromJavaP.CreateFromClass(input_file, namespace)
content = jni_from_javap.GetContent()
else:
jni_from_java_source = JNIFromJavaSource.CreateFromFile(input_file)
content = jni_from_java_source.GetContent()
except ParseError, e:
print e
sys.exit(1)
if output_file:
if not os.path.exists(os.path.dirname(os.path.abspath(output_file))):
os.makedirs(os.path.dirname(os.path.abspath(output_file)))
if skip_if_same and os.path.exists(output_file):
with file(output_file, 'r') as f:
existing_content = f.read()
if existing_content == content:
return
with file(output_file, 'w') as f:
f.write(content)
else:
print output
def main(argv):
usage = """usage: %prog [OPTIONS]
This script will parse the given java source code extracting the native
declarations and print the header file to stdout (or a file).
See SampleForTests.java for more details.
"""
option_parser = optparse.OptionParser(usage=usage)
option_parser.add_option('-j', dest='jar_file',
help='Extract the list of input files from'
' a specified jar file.'
' Uses javap to extract the methods from a'
' pre-compiled class. --input should point'
' to pre-compiled Java .class files.')
option_parser.add_option('-n', dest='namespace',
help='Uses as a namespace in the generated header,'
' instead of the javap class name.')
option_parser.add_option('--input_file',
help='Single input file name. The output file name '
'will be derived from it. Must be used with '
'--output_dir.')
option_parser.add_option('--output_dir',
help='The output directory. Must be used with '
'--input')
option_parser.add_option('--optimize_generation', type="int",
default=0, help='Whether we should optimize JNI '
'generation by not regenerating files if they have '
'not changed.')
options, args = option_parser.parse_args(argv)
if options.jar_file:
input_file = ExtractJarInputFile(options.jar_file, options.input_file,
options.output_dir)
else:
input_file = options.input_file
output_file = None
if options.output_dir:
root_name = os.path.splitext(os.path.basename(input_file))[0]
output_file = os.path.join(options.output_dir, root_name) + '_jni.h'
GenerateJNIHeader(input_file, output_file, options.namespace,
options.optimize_generation)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| false | true |
f71f559efb8f3cc65c106ea9756849f94c18c509 | 1,170 | py | Python | test/test_main.py | LucaMarconato/phyper | 065f41dbdce93b95cd2f8a16ad72a1cf57826c66 | [
"MIT"
] | 1 | 2020-08-14T07:40:18.000Z | 2020-08-14T07:40:18.000Z | test/test_main.py | LucaMarconato/phyper | 065f41dbdce93b95cd2f8a16ad72a1cf57826c66 | [
"MIT"
] | null | null | null | test/test_main.py | LucaMarconato/phyper | 065f41dbdce93b95cd2f8a16ad72a1cf57826c66 | [
"MIT"
] | null | null | null | import phyper
from typing import List
from pprint import pprint
import pandas as pd
class NonKeys:
n_epochs = 11
batch_size = 10
resume_training = False
another_non_key = True
class MyParser(phyper.Parser, NonKeys):
my_testa: str = 1
ehi = None
bbbbb = 32
c = 'ehi'
hashed_resources_folder = 'hashed_resources'
my_parser = MyParser(hashed_resources_folder)
my_parser.register_new_resource(name='normalizer', dependencies=['my_testa', 'ehi', 'bbbbb'])
print(my_parser.get_hyperparameters())
print(my_parser.get_hashable_hyperparameters())
my_instance = my_parser.new_instance()
my_instance.get_instance_hash()
print(my_instance.get_hyperparameters())
print(my_instance.get_hashable_hyperparameters())
print(my_instance.get_instance_hash())
print(my_instance.get_instance_hash('normalizer'))
# print(my_instance.get_instance_hash('c'))
print(my_instance.get_resources_path())
print(my_instance.get_resources_path('normalizer'))
d = {'n_epochs': [50], 'c': ['c0', 'c1'], 'my_testa': [1, 2, 3]}
instances: List[MyParser] = my_parser.get_instances_from_dictionary(d)
for instance in instances:
print(instance.get_instance_hash())
| 27.857143 | 93 | 0.766667 | import phyper
from typing import List
from pprint import pprint
import pandas as pd
class NonKeys:
n_epochs = 11
batch_size = 10
resume_training = False
another_non_key = True
class MyParser(phyper.Parser, NonKeys):
my_testa: str = 1
ehi = None
bbbbb = 32
c = 'ehi'
hashed_resources_folder = 'hashed_resources'
my_parser = MyParser(hashed_resources_folder)
my_parser.register_new_resource(name='normalizer', dependencies=['my_testa', 'ehi', 'bbbbb'])
print(my_parser.get_hyperparameters())
print(my_parser.get_hashable_hyperparameters())
my_instance = my_parser.new_instance()
my_instance.get_instance_hash()
print(my_instance.get_hyperparameters())
print(my_instance.get_hashable_hyperparameters())
print(my_instance.get_instance_hash())
print(my_instance.get_instance_hash('normalizer'))
print(my_instance.get_resources_path())
print(my_instance.get_resources_path('normalizer'))
d = {'n_epochs': [50], 'c': ['c0', 'c1'], 'my_testa': [1, 2, 3]}
instances: List[MyParser] = my_parser.get_instances_from_dictionary(d)
for instance in instances:
print(instance.get_instance_hash())
| true | true |
f71f55c54252740d7984c8598467133969e771fe | 1,091 | py | Python | motion_primitives_py/motion_primitives_py/examples/dispersion_algorithm_animation.py | ljarin/dispersion_motion_planning | 1c16c95b70915e58e407c1a45aa4065877fbb3de | [
"BSD-3-Clause"
] | 1 | 2022-03-04T12:03:26.000Z | 2022-03-04T12:03:26.000Z | motion_primitives_py/motion_primitives_py/examples/dispersion_algorithm_animation.py | ljarin/dispersion_motion_planning | 1c16c95b70915e58e407c1a45aa4065877fbb3de | [
"BSD-3-Clause"
] | null | null | null | motion_primitives_py/motion_primitives_py/examples/dispersion_algorithm_animation.py | ljarin/dispersion_motion_planning | 1c16c95b70915e58e407c1a45aa4065877fbb3de | [
"BSD-3-Clause"
] | null | null | null | # %%
from motion_primitives_py import *
import numpy as np
import time
from pycallgraph import PyCallGraph, Config
from pycallgraph.output import GraphvizOutput
"""
Animate the evolution of the min. dispersion algorithm
"""
tiling = True
plot = False
animate = True
check_backwards_dispersion = False
mp_subclass_specific_data = {}
# %%
# define parameters
control_space_q = 2
num_dims = 2
max_state = [3.5, 2*np.pi]
motion_primitive_type = ReedsSheppMotionPrimitive
# resolution = [.51, .5]
num_dense_samples = 100
# # # %%
# motion_primitive_type = PolynomialMotionPrimitive
# control_space_q = 2
# num_dims = 2
# max_state = [3.51, 1.51, 10, 100]
# mp_subclass_specific_data = {'iterative_bvp_dt': .1, 'iterative_bvp_max_t': 5, 'rho': 10}
# num_dense_samples = 200
# %%
# build lattice
mpl = MotionPrimitiveLattice(control_space_q, num_dims, max_state, motion_primitive_type, tiling, False, mp_subclass_specific_data)
mpl.compute_min_dispersion_space(
num_output_pts=10, check_backwards_dispersion=check_backwards_dispersion, animate=animate, num_dense_samples=num_dense_samples)
| 27.974359 | 131 | 0.781852 |
from motion_primitives_py import *
import numpy as np
import time
from pycallgraph import PyCallGraph, Config
from pycallgraph.output import GraphvizOutput
tiling = True
plot = False
animate = True
check_backwards_dispersion = False
mp_subclass_specific_data = {}
control_space_q = 2
num_dims = 2
max_state = [3.5, 2*np.pi]
motion_primitive_type = ReedsSheppMotionPrimitive
num_dense_samples = 100
mpl = MotionPrimitiveLattice(control_space_q, num_dims, max_state, motion_primitive_type, tiling, False, mp_subclass_specific_data)
mpl.compute_min_dispersion_space(
num_output_pts=10, check_backwards_dispersion=check_backwards_dispersion, animate=animate, num_dense_samples=num_dense_samples)
| true | true |
f71f55f7dda83299229f1c6bd846bc4c7c0d3162 | 4,502 | py | Python | apps/beeswax/src/beeswax/hive_site.py | thinker0/hue | ee5aecc3db442e962584d3151c0f2eab397d6707 | [
"Apache-2.0"
] | null | null | null | apps/beeswax/src/beeswax/hive_site.py | thinker0/hue | ee5aecc3db442e962584d3151c0f2eab397d6707 | [
"Apache-2.0"
] | null | null | null | apps/beeswax/src/beeswax/hive_site.py | thinker0/hue | ee5aecc3db442e962584d3151c0f2eab397d6707 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Helper for reading hive-site.xml
"""
import errno
import logging
import os.path
import re
import socket
from desktop.lib import security_util
from hadoop import confparse
import beeswax.conf
LOG = logging.getLogger(__name__)
_HIVE_SITE_PATH = None # Path to hive-site.xml
_HIVE_SITE_DICT = None # A dictionary of name/value config options
_METASTORE_LOC_CACHE = None
_CNF_METASTORE_SASL = 'hive.metastore.sasl.enabled'
_CNF_METASTORE_URIS = 'hive.metastore.uris'
_CNF_METASTORE_KERBEROS_PRINCIPAL = 'hive.metastore.kerberos.principal'
_CNF_HIVESERVER2_KERBEROS_PRINCIPAL = 'hive.server2.authentication.kerberos.principal'
_CNF_HIVESERVER2_AUTHENTICATION = 'hive.server2.authentication'
_CNF_HIVESERVER2_IMPERSONATION = 'hive.server2.enable.doAs'
# Host is whatever up to the colon. Allow and ignore a trailing slash.
_THRIFT_URI_RE = re.compile("^thrift://([^:]+):(\d+)[/]?$")
class MalformedHiveSiteException(Exception):
"""Parsing error class used internally"""
pass
def reset():
"""Reset the cached conf"""
global _HIVE_SITE_DICT
global _METASTORE_LOC_CACHE
_HIVE_SITE_DICT = None
_METASTORE_LOC_CACHE = None
def get_conf():
"""get_conf() -> ConfParse object for hive-site.xml"""
if _HIVE_SITE_DICT is None:
_parse_hive_site()
return _HIVE_SITE_DICT
def get_metastore():
"""
Get first metastore information from local hive-site.xml.
"""
global _METASTORE_LOC_CACHE
if not _METASTORE_LOC_CACHE:
thrift_uris = get_conf().get(_CNF_METASTORE_URIS)
is_local = thrift_uris is None or thrift_uris == ''
if not is_local:
use_sasl = str(get_conf().get(_CNF_METASTORE_SASL, 'false')).lower() == 'true'
thrift_uri = thrift_uris.split(",")[0] # First URI
host = socket.getfqdn()
match = _THRIFT_URI_RE.match(thrift_uri)
if not match:
LOG.error('Cannot understand remote metastore uri "%s"' % thrift_uri)
else:
host, port = match.groups()
kerberos_principal = security_util.get_kerberos_principal(get_conf().get(_CNF_METASTORE_KERBEROS_PRINCIPAL, None), host)
_METASTORE_LOC_CACHE = {
'use_sasl': use_sasl,
'thrift_uri': thrift_uri,
'kerberos_principal': kerberos_principal
}
else:
LOG.error('Hue requires a remote metastore configuration')
return _METASTORE_LOC_CACHE
def get_hiveserver2_kerberos_principal(hostname_or_ip):
"""
Retrieves principal for HiveServer 2.
Raises socket.herror
"""
fqdn = security_util.get_fqdn(hostname_or_ip)
# Get kerberos principal and replace host pattern
principal = get_conf().get(_CNF_HIVESERVER2_KERBEROS_PRINCIPAL, None)
if principal:
return security_util.get_kerberos_principal(principal, fqdn)
else:
return None
def get_hiveserver2_authentication():
return get_conf().get(_CNF_HIVESERVER2_AUTHENTICATION, 'NONE').upper() # NONE == PLAIN SASL
def hiveserver2_impersonation_enabled():
return get_conf().get(_CNF_HIVESERVER2_IMPERSONATION, 'FALSE').upper() == 'TRUE'
def hiveserver2_jdbc_url():
return 'jdbc:hive2://%s:%s/default' % (beeswax.conf.HIVE_SERVER_HOST.get(), beeswax.conf.HIVE_SERVER_PORT.get())
def _parse_hive_site():
"""
Parse hive-site.xml and store in _HIVE_SITE_DICT
"""
global _HIVE_SITE_DICT
global _HIVE_SITE_PATH
_HIVE_SITE_PATH = os.path.join(beeswax.conf.HIVE_CONF_DIR.get(), 'hive-site.xml')
try:
data = file(_HIVE_SITE_PATH, 'r').read()
except IOError, err:
if err.errno != errno.ENOENT:
LOG.error('Cannot read from "%s": %s' % (_HIVE_SITE_PATH, err))
return
# Keep going and make an empty ConfParse
data = ""
_HIVE_SITE_DICT = confparse.ConfParse(data)
| 31.263889 | 126 | 0.734118 |
"""
Helper for reading hive-site.xml
"""
import errno
import logging
import os.path
import re
import socket
from desktop.lib import security_util
from hadoop import confparse
import beeswax.conf
LOG = logging.getLogger(__name__)
_HIVE_SITE_PATH = None
_HIVE_SITE_DICT = None
_METASTORE_LOC_CACHE = None
_CNF_METASTORE_SASL = 'hive.metastore.sasl.enabled'
_CNF_METASTORE_URIS = 'hive.metastore.uris'
_CNF_METASTORE_KERBEROS_PRINCIPAL = 'hive.metastore.kerberos.principal'
_CNF_HIVESERVER2_KERBEROS_PRINCIPAL = 'hive.server2.authentication.kerberos.principal'
_CNF_HIVESERVER2_AUTHENTICATION = 'hive.server2.authentication'
_CNF_HIVESERVER2_IMPERSONATION = 'hive.server2.enable.doAs'
_THRIFT_URI_RE = re.compile("^thrift://([^:]+):(\d+)[/]?$")
class MalformedHiveSiteException(Exception):
"""Parsing error class used internally"""
pass
def reset():
"""Reset the cached conf"""
global _HIVE_SITE_DICT
global _METASTORE_LOC_CACHE
_HIVE_SITE_DICT = None
_METASTORE_LOC_CACHE = None
def get_conf():
"""get_conf() -> ConfParse object for hive-site.xml"""
if _HIVE_SITE_DICT is None:
_parse_hive_site()
return _HIVE_SITE_DICT
def get_metastore():
"""
Get first metastore information from local hive-site.xml.
"""
global _METASTORE_LOC_CACHE
if not _METASTORE_LOC_CACHE:
thrift_uris = get_conf().get(_CNF_METASTORE_URIS)
is_local = thrift_uris is None or thrift_uris == ''
if not is_local:
use_sasl = str(get_conf().get(_CNF_METASTORE_SASL, 'false')).lower() == 'true'
thrift_uri = thrift_uris.split(",")[0]
host = socket.getfqdn()
match = _THRIFT_URI_RE.match(thrift_uri)
if not match:
LOG.error('Cannot understand remote metastore uri "%s"' % thrift_uri)
else:
host, port = match.groups()
kerberos_principal = security_util.get_kerberos_principal(get_conf().get(_CNF_METASTORE_KERBEROS_PRINCIPAL, None), host)
_METASTORE_LOC_CACHE = {
'use_sasl': use_sasl,
'thrift_uri': thrift_uri,
'kerberos_principal': kerberos_principal
}
else:
LOG.error('Hue requires a remote metastore configuration')
return _METASTORE_LOC_CACHE
def get_hiveserver2_kerberos_principal(hostname_or_ip):
"""
Retrieves principal for HiveServer 2.
Raises socket.herror
"""
fqdn = security_util.get_fqdn(hostname_or_ip)
principal = get_conf().get(_CNF_HIVESERVER2_KERBEROS_PRINCIPAL, None)
if principal:
return security_util.get_kerberos_principal(principal, fqdn)
else:
return None
def get_hiveserver2_authentication():
return get_conf().get(_CNF_HIVESERVER2_AUTHENTICATION, 'NONE').upper()
def hiveserver2_impersonation_enabled():
return get_conf().get(_CNF_HIVESERVER2_IMPERSONATION, 'FALSE').upper() == 'TRUE'
def hiveserver2_jdbc_url():
return 'jdbc:hive2://%s:%s/default' % (beeswax.conf.HIVE_SERVER_HOST.get(), beeswax.conf.HIVE_SERVER_PORT.get())
def _parse_hive_site():
"""
Parse hive-site.xml and store in _HIVE_SITE_DICT
"""
global _HIVE_SITE_DICT
global _HIVE_SITE_PATH
_HIVE_SITE_PATH = os.path.join(beeswax.conf.HIVE_CONF_DIR.get(), 'hive-site.xml')
try:
data = file(_HIVE_SITE_PATH, 'r').read()
except IOError, err:
if err.errno != errno.ENOENT:
LOG.error('Cannot read from "%s": %s' % (_HIVE_SITE_PATH, err))
return
data = ""
_HIVE_SITE_DICT = confparse.ConfParse(data)
| false | true |
f71f573d416e2f35d92d643b1b9835d4b1c1c202 | 13,297 | py | Python | KiBuzzard/buzzard/modules/svgstring2path.py | HDR/KiBuzzard | b9e2cff0783b7cda9b8d68f2d2b5077b48d3a838 | [
"MIT"
] | 240 | 2021-01-11T14:49:24.000Z | 2022-03-29T22:33:49.000Z | KiBuzzard/buzzard/modules/svgstring2path.py | HDR/KiBuzzard | b9e2cff0783b7cda9b8d68f2d2b5077b48d3a838 | [
"MIT"
] | 77 | 2021-01-12T20:23:30.000Z | 2022-03-28T12:14:34.000Z | KiBuzzard/buzzard/modules/svgstring2path.py | HDR/KiBuzzard | b9e2cff0783b7cda9b8d68f2d2b5077b48d3a838 | [
"MIT"
] | 28 | 2021-01-17T05:44:11.000Z | 2022-01-11T19:58:46.000Z | # This is a conglomeration of modules removed from https://github.com/mathandy/svgpathtools
# in order to support a modified 'svg2paths' method called 'string2paths' which takes an
# svg string as an argument instead of a filename.
from svgpathtools import Line, QuadraticBezier, CubicBezier, Path, Arc
from xml.dom.minidom import parseString
import warnings
import re
try:
str = basestring
except NameError:
pass
COMMANDS = set('MmZzLlHhVvCcSsQqTtAa')
UPPERCASE = set('MZLHVCSQTA')
COMMAND_RE = re.compile("([MmZzLlHhVvCcSsQqTtAa])")
FLOAT_RE = re.compile("[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?")
COORD_PAIR_TMPLT = re.compile(
r'([\+-]?\d*[\.\d]\d*[eE][\+-]?\d+|[\+-]?\d*[\.\d]\d*)' +
r'(?:\s*,\s*|\s+|(?=-))' +
r'([\+-]?\d*[\.\d]\d*[eE][\+-]?\d+|[\+-]?\d*[\.\d]\d*)'
)
def path2pathd(path):
return path.get('d', '')
def ellipse2pathd(ellipse):
"""converts the parameters from an ellipse or a circle to a string for a
Path object d-attribute"""
cx = ellipse.get('cx', 0)
cy = ellipse.get('cy', 0)
rx = ellipse.get('rx', None)
ry = ellipse.get('ry', None)
r = ellipse.get('r', None)
if r is not None:
rx = ry = float(r)
else:
rx = float(rx)
ry = float(ry)
cx = float(cx)
cy = float(cy)
d = ''
d += 'M' + str(cx - rx) + ',' + str(cy)
d += 'a' + str(rx) + ',' + str(ry) + ' 0 1,0 ' + str(2 * rx) + ',0'
d += 'a' + str(rx) + ',' + str(ry) + ' 0 1,0 ' + str(-2 * rx) + ',0'
return d
def polyline2pathd(polyline_d, is_polygon=False):
"""converts the string from a polyline points-attribute to a string for a
Path object d-attribute"""
points = COORD_PAIR_TMPLT.findall(polyline_d)
closed = (float(points[0][0]) == float(points[-1][0]) and
float(points[0][1]) == float(points[-1][1]))
# The `parse_path` call ignores redundant 'z' (closure) commands
# e.g. `parse_path('M0 0L100 100Z') == parse_path('M0 0L100 100L0 0Z')`
# This check ensures that an n-point polygon is converted to an n-Line path.
if is_polygon and closed:
points.append(points[0])
d = 'M' + 'L'.join('{0} {1}'.format(x,y) for x,y in points)
if is_polygon or closed:
d += 'z'
return d
def polygon2pathd(polyline_d):
"""converts the string from a polygon points-attribute to a string
for a Path object d-attribute.
Note: For a polygon made from n points, the resulting path will be
composed of n lines (even if some of these lines have length zero).
"""
return polyline2pathd(polyline_d, True)
def rect2pathd(rect):
"""Converts an SVG-rect element to a Path d-string.
The rectangle will start at the (x,y) coordinate specified by the
rectangle object and proceed counter-clockwise."""
x0, y0 = float(rect.get('x', 0)), float(rect.get('y', 0))
w, h = float(rect.get('width', 0)), float(rect.get('height', 0))
x1, y1 = x0 + w, y0
x2, y2 = x0 + w, y0 + h
x3, y3 = x0, y0 + h
d = ("M{} {} L {} {} L {} {} L {} {} z"
"".format(x0, y0, x1, y1, x2, y2, x3, y3))
return d
def line2pathd(l):
return 'M' + l['x1'] + ' ' + l['y1'] + 'L' + l['x2'] + ' ' + l['y2']
def string2paths(svg_string,
return_svg_attributes=True,
convert_circles_to_paths=True,
convert_ellipses_to_paths=True,
convert_lines_to_paths=True,
convert_polylines_to_paths=True,
convert_polygons_to_paths=True,
convert_rectangles_to_paths=True):
doc = parseString(svg_string)
def dom2dict(element):
"""Converts DOM elements to dictionaries of attributes."""
keys = list(element.attributes.keys())
values = [val.value for val in list(element.attributes.values())]
return dict(list(zip(keys, values)))
# Use minidom to extract path strings from input SVG
paths = [dom2dict(el) for el in doc.getElementsByTagName('path')]
d_strings = [el['d'] for el in paths]
attribute_dictionary_list = paths
# Use minidom to extract polyline strings from input SVG, convert to
# path strings, add to list
if convert_polylines_to_paths:
plins = [dom2dict(el) for el in doc.getElementsByTagName('polyline')]
d_strings += [polyline2pathd(pl['points']) for pl in plins]
attribute_dictionary_list += plins
# Use minidom to extract polygon strings from input SVG, convert to
# path strings, add to list
if convert_polygons_to_paths:
pgons = [dom2dict(el) for el in doc.getElementsByTagName('polygon')]
d_strings += [polygon2pathd(pg['points']) for pg in pgons]
attribute_dictionary_list += pgons
if convert_lines_to_paths:
lines = [dom2dict(el) for el in doc.getElementsByTagName('line')]
d_strings += [('M' + l['x1'] + ' ' + l['y1'] +
'L' + l['x2'] + ' ' + l['y2']) for l in lines]
attribute_dictionary_list += lines
if convert_ellipses_to_paths:
ellipses = [dom2dict(el) for el in doc.getElementsByTagName('ellipse')]
d_strings += [ellipse2pathd(e) for e in ellipses]
attribute_dictionary_list += ellipses
if convert_circles_to_paths:
circles = [dom2dict(el) for el in doc.getElementsByTagName('circle')]
d_strings += [ellipse2pathd(c) for c in circles]
attribute_dictionary_list += circles
if convert_rectangles_to_paths:
rectangles = [dom2dict(el) for el in doc.getElementsByTagName('rect')]
d_strings += [rect2pathd(r) for r in rectangles]
attribute_dictionary_list += rectangles
if return_svg_attributes:
svg_attributes = dom2dict(doc.getElementsByTagName('svg')[0])
doc.unlink()
path_list = [parse_path(d) for d in d_strings]
return path_list, attribute_dictionary_list, svg_attributes
else:
doc.unlink()
path_list = [parse_path(d) for d in d_strings]
return path_list, attribute_dictionary_list
def _tokenize_path(pathdef):
for x in COMMAND_RE.split(pathdef):
if x in COMMANDS:
yield x
for token in FLOAT_RE.findall(x):
yield token
def parse_path(pathdef, current_pos=0j, tree_element=None):
# In the SVG specs, initial movetos are absolute, even if
# specified as 'm'. This is the default behavior here as well.
# But if you pass in a current_pos variable, the initial moveto
# will be relative to that current_pos. This is useful.
elements = list(_tokenize_path(pathdef))
# Reverse for easy use of .pop()
elements.reverse()
if tree_element is None:
segments = Path()
else:
segments = Path(tree_element=tree_element)
start_pos = None
command = None
while elements:
if elements[-1] in COMMANDS:
# New command.
last_command = command # Used by S and T
command = elements.pop()
absolute = command in UPPERCASE
command = command.upper()
else:
# If this element starts with numbers, it is an implicit command
# and we don't change the command. Check that it's allowed:
if command is None:
raise ValueError("Unallowed implicit command in %s, position %s" % (
pathdef, len(pathdef.split()) - len(elements)))
if command == 'M':
# Moveto command.
x = elements.pop()
y = elements.pop()
pos = float(x) + float(y) * 1j
if absolute:
current_pos = pos
else:
current_pos += pos
# when M is called, reset start_pos
# This behavior of Z is defined in svg spec:
# http://www.w3.org/TR/SVG/paths.html#PathDataClosePathCommand
start_pos = current_pos
# Implicit moveto commands are treated as lineto commands.
# So we set command to lineto here, in case there are
# further implicit commands after this moveto.
command = 'L'
elif command == 'Z':
# Close path
if not (current_pos == start_pos):
segments.append(Line(current_pos, start_pos))
segments.closed = True
current_pos = start_pos
command = None
elif command == 'L':
x = elements.pop()
y = elements.pop()
pos = float(x) + float(y) * 1j
if not absolute:
pos += current_pos
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'H':
x = elements.pop()
pos = float(x) + current_pos.imag * 1j
if not absolute:
pos += current_pos.real
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'V':
y = elements.pop()
pos = current_pos.real + float(y) * 1j
if not absolute:
pos += current_pos.imag * 1j
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'C':
control1 = float(elements.pop()) + float(elements.pop()) * 1j
control2 = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control1 += current_pos
control2 += current_pos
end += current_pos
segments.append(CubicBezier(current_pos, control1, control2, end))
current_pos = end
elif command == 'S':
# Smooth curve. First control point is the "reflection" of
# the second control point in the previous path.
if last_command not in 'CS':
# If there is no previous command or if the previous command
# was not an C, c, S or s, assume the first control point is
# coincident with the current point.
control1 = current_pos
else:
# The first control point is assumed to be the reflection of
# the second control point on the previous command relative
# to the current point.
control1 = current_pos + current_pos - segments[-1].control2
control2 = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control2 += current_pos
end += current_pos
segments.append(CubicBezier(current_pos, control1, control2, end))
current_pos = end
elif command == 'Q':
control = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control += current_pos
end += current_pos
segments.append(QuadraticBezier(current_pos, control, end))
current_pos = end
elif command == 'T':
# Smooth curve. Control point is the "reflection" of
# the second control point in the previous path.
if last_command not in 'QT':
# If there is no previous command or if the previous command
# was not an Q, q, T or t, assume the first control point is
# coincident with the current point.
control = current_pos
else:
# The control point is assumed to be the reflection of
# the control point on the previous command relative
# to the current point.
control = current_pos + current_pos - segments[-1].control
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
end += current_pos
segments.append(QuadraticBezier(current_pos, control, end))
current_pos = end
elif command == 'A':
radius = float(elements.pop()) + float(elements.pop()) * 1j
rotation = float(elements.pop())
arc = float(elements.pop())
sweep = float(elements.pop())
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
end += current_pos
segments.append(Arc(current_pos, radius, rotation, arc, sweep, end))
current_pos = end
return segments
def _check_num_parsed_values(values, allowed):
if not any(num == len(values) for num in allowed):
if len(allowed) > 1:
warnings.warn('Expected one of the following number of values {0}, but found {1} values instead: {2}'
.format(allowed, len(values), values))
elif allowed[0] != 1:
warnings.warn('Expected {0} values, found {1}: {2}'.format(allowed[0], len(values), values))
else:
warnings.warn('Expected 1 value, found {0}: {1}'.format(len(values), values))
return False
return True
def parse_transform(transform_str):
warnings.warn('Transforms not implemented')
| 36.53022 | 113 | 0.582011 |
from svgpathtools import Line, QuadraticBezier, CubicBezier, Path, Arc
from xml.dom.minidom import parseString
import warnings
import re
try:
str = basestring
except NameError:
pass
COMMANDS = set('MmZzLlHhVvCcSsQqTtAa')
UPPERCASE = set('MZLHVCSQTA')
COMMAND_RE = re.compile("([MmZzLlHhVvCcSsQqTtAa])")
FLOAT_RE = re.compile("[-+]?[0-9]*\.?[0-9]+(?:[eE][-+]?[0-9]+)?")
COORD_PAIR_TMPLT = re.compile(
r'([\+-]?\d*[\.\d]\d*[eE][\+-]?\d+|[\+-]?\d*[\.\d]\d*)' +
r'(?:\s*,\s*|\s+|(?=-))' +
r'([\+-]?\d*[\.\d]\d*[eE][\+-]?\d+|[\+-]?\d*[\.\d]\d*)'
)
def path2pathd(path):
return path.get('d', '')
def ellipse2pathd(ellipse):
cx = ellipse.get('cx', 0)
cy = ellipse.get('cy', 0)
rx = ellipse.get('rx', None)
ry = ellipse.get('ry', None)
r = ellipse.get('r', None)
if r is not None:
rx = ry = float(r)
else:
rx = float(rx)
ry = float(ry)
cx = float(cx)
cy = float(cy)
d = ''
d += 'M' + str(cx - rx) + ',' + str(cy)
d += 'a' + str(rx) + ',' + str(ry) + ' 0 1,0 ' + str(2 * rx) + ',0'
d += 'a' + str(rx) + ',' + str(ry) + ' 0 1,0 ' + str(-2 * rx) + ',0'
return d
def polyline2pathd(polyline_d, is_polygon=False):
points = COORD_PAIR_TMPLT.findall(polyline_d)
closed = (float(points[0][0]) == float(points[-1][0]) and
float(points[0][1]) == float(points[-1][1]))
if is_polygon and closed:
points.append(points[0])
d = 'M' + 'L'.join('{0} {1}'.format(x,y) for x,y in points)
if is_polygon or closed:
d += 'z'
return d
def polygon2pathd(polyline_d):
return polyline2pathd(polyline_d, True)
def rect2pathd(rect):
x0, y0 = float(rect.get('x', 0)), float(rect.get('y', 0))
w, h = float(rect.get('width', 0)), float(rect.get('height', 0))
x1, y1 = x0 + w, y0
x2, y2 = x0 + w, y0 + h
x3, y3 = x0, y0 + h
d = ("M{} {} L {} {} L {} {} L {} {} z"
"".format(x0, y0, x1, y1, x2, y2, x3, y3))
return d
def line2pathd(l):
return 'M' + l['x1'] + ' ' + l['y1'] + 'L' + l['x2'] + ' ' + l['y2']
def string2paths(svg_string,
return_svg_attributes=True,
convert_circles_to_paths=True,
convert_ellipses_to_paths=True,
convert_lines_to_paths=True,
convert_polylines_to_paths=True,
convert_polygons_to_paths=True,
convert_rectangles_to_paths=True):
doc = parseString(svg_string)
def dom2dict(element):
keys = list(element.attributes.keys())
values = [val.value for val in list(element.attributes.values())]
return dict(list(zip(keys, values)))
paths = [dom2dict(el) for el in doc.getElementsByTagName('path')]
d_strings = [el['d'] for el in paths]
attribute_dictionary_list = paths
if convert_polylines_to_paths:
plins = [dom2dict(el) for el in doc.getElementsByTagName('polyline')]
d_strings += [polyline2pathd(pl['points']) for pl in plins]
attribute_dictionary_list += plins
if convert_polygons_to_paths:
pgons = [dom2dict(el) for el in doc.getElementsByTagName('polygon')]
d_strings += [polygon2pathd(pg['points']) for pg in pgons]
attribute_dictionary_list += pgons
if convert_lines_to_paths:
lines = [dom2dict(el) for el in doc.getElementsByTagName('line')]
d_strings += [('M' + l['x1'] + ' ' + l['y1'] +
'L' + l['x2'] + ' ' + l['y2']) for l in lines]
attribute_dictionary_list += lines
if convert_ellipses_to_paths:
ellipses = [dom2dict(el) for el in doc.getElementsByTagName('ellipse')]
d_strings += [ellipse2pathd(e) for e in ellipses]
attribute_dictionary_list += ellipses
if convert_circles_to_paths:
circles = [dom2dict(el) for el in doc.getElementsByTagName('circle')]
d_strings += [ellipse2pathd(c) for c in circles]
attribute_dictionary_list += circles
if convert_rectangles_to_paths:
rectangles = [dom2dict(el) for el in doc.getElementsByTagName('rect')]
d_strings += [rect2pathd(r) for r in rectangles]
attribute_dictionary_list += rectangles
if return_svg_attributes:
svg_attributes = dom2dict(doc.getElementsByTagName('svg')[0])
doc.unlink()
path_list = [parse_path(d) for d in d_strings]
return path_list, attribute_dictionary_list, svg_attributes
else:
doc.unlink()
path_list = [parse_path(d) for d in d_strings]
return path_list, attribute_dictionary_list
def _tokenize_path(pathdef):
for x in COMMAND_RE.split(pathdef):
if x in COMMANDS:
yield x
for token in FLOAT_RE.findall(x):
yield token
def parse_path(pathdef, current_pos=0j, tree_element=None):
elements = list(_tokenize_path(pathdef))
elements.reverse()
if tree_element is None:
segments = Path()
else:
segments = Path(tree_element=tree_element)
start_pos = None
command = None
while elements:
if elements[-1] in COMMANDS:
last_command = command
command = elements.pop()
absolute = command in UPPERCASE
command = command.upper()
else:
if command is None:
raise ValueError("Unallowed implicit command in %s, position %s" % (
pathdef, len(pathdef.split()) - len(elements)))
if command == 'M':
x = elements.pop()
y = elements.pop()
pos = float(x) + float(y) * 1j
if absolute:
current_pos = pos
else:
current_pos += pos
current_pos
command = 'L'
elif command == 'Z':
if not (current_pos == start_pos):
segments.append(Line(current_pos, start_pos))
segments.closed = True
current_pos = start_pos
command = None
elif command == 'L':
x = elements.pop()
y = elements.pop()
pos = float(x) + float(y) * 1j
if not absolute:
pos += current_pos
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'H':
x = elements.pop()
pos = float(x) + current_pos.imag * 1j
if not absolute:
pos += current_pos.real
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'V':
y = elements.pop()
pos = current_pos.real + float(y) * 1j
if not absolute:
pos += current_pos.imag * 1j
segments.append(Line(current_pos, pos))
current_pos = pos
elif command == 'C':
control1 = float(elements.pop()) + float(elements.pop()) * 1j
control2 = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control1 += current_pos
control2 += current_pos
end += current_pos
segments.append(CubicBezier(current_pos, control1, control2, end))
current_pos = end
elif command == 'S':
if last_command not in 'CS':
control1 = current_pos
else:
control1 = current_pos + current_pos - segments[-1].control2
control2 = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control2 += current_pos
end += current_pos
segments.append(CubicBezier(current_pos, control1, control2, end))
current_pos = end
elif command == 'Q':
control = float(elements.pop()) + float(elements.pop()) * 1j
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
control += current_pos
end += current_pos
segments.append(QuadraticBezier(current_pos, control, end))
current_pos = end
elif command == 'T':
if last_command not in 'QT':
control = current_pos
else:
control = current_pos + current_pos - segments[-1].control
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
end += current_pos
segments.append(QuadraticBezier(current_pos, control, end))
current_pos = end
elif command == 'A':
radius = float(elements.pop()) + float(elements.pop()) * 1j
rotation = float(elements.pop())
arc = float(elements.pop())
sweep = float(elements.pop())
end = float(elements.pop()) + float(elements.pop()) * 1j
if not absolute:
end += current_pos
segments.append(Arc(current_pos, radius, rotation, arc, sweep, end))
current_pos = end
return segments
def _check_num_parsed_values(values, allowed):
if not any(num == len(values) for num in allowed):
if len(allowed) > 1:
warnings.warn('Expected one of the following number of values {0}, but found {1} values instead: {2}'
.format(allowed, len(values), values))
elif allowed[0] != 1:
warnings.warn('Expected {0} values, found {1}: {2}'.format(allowed[0], len(values), values))
else:
warnings.warn('Expected 1 value, found {0}: {1}'.format(len(values), values))
return False
return True
def parse_transform(transform_str):
warnings.warn('Transforms not implemented')
| true | true |
f71f58007a0c5588589b9d561d48fa13ca605a79 | 4,663 | py | Python | parser/fase2/team22/Instrucciones/Sql_alter/AlterTableAddColumn.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
] | null | null | null | parser/fase2/team22/Instrucciones/Sql_alter/AlterTableAddColumn.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
] | null | null | null | parser/fase2/team22/Instrucciones/Sql_alter/AlterTableAddColumn.py | LopDlMa/tytus | 0b43ee1c7300cb11ddbe593e08239321b71dc443 | [
"MIT"
] | null | null | null | from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
import collections
from storageManager.jsonMode import *
from Optimizador.C3D import *
from Instrucciones.TablaSimbolos import Instruccion3D as c3d
class AlterTableAddColumn(Instruccion):
def __init__(self, tabla, lista_col, strGram,linea, columna):
Instruccion.__init__(self,None,linea,columna,strGram)
self.tabla = tabla
self.lista_col = lista_col
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
if arbol.bdUsar != None:
objetoTabla = arbol.devolviendoTablaDeBase(self.tabla)
if objetoTabla != 0:
existeColumna = False
for c in self.lista_col:
for columnas in objetoTabla.lista_de_campos:
# Si la columna ya existe retorna error semántico
if columnas.nombre == c.id:
existeColumna = True
error = Excepcion('42701',"Semántico","Ya existe la columna «"+c.id+"» en la relación «"+self.tabla+"»",c.linea,c.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
if existeColumna:
return
# Existen columnas con el mismo nombre a insertar
nombres = []
for columnas in self.lista_col:
nombres.append(columnas.id)
duplicados = [item for item, count in collections.Counter(nombres).items() if count > 1]
for columnas in duplicados:
existeColumna = True
error = Excepcion('42701',"Semántico","Ya existe la columna «"+columnas+"» en la relación «"+self.tabla+"»",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
if existeColumna:
return
# Las columnas se almacenan en memoria.
for c in self.lista_col:
objetoTabla.agregarColumna(c.id, c.tipo,None, None)
# Las columnas se almacenan en disco.
for columnas in self.lista_col:
resultado = alterAddColumn(arbol.getBaseDatos(),self.tabla,columnas.id)
if resultado == 1:
error = Excepcion('XX000',"Semántico","Error interno",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
elif resultado == 2:
error = Excepcion('42P00',"Semántico","La base de datos "+str(arbol.getBaseDatos())+" no existe",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
elif resultado == 3:
error = Excepcion('42P01',"Semántico","No existe la relación "+self.tabla,self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
arbol.consola.append("Consulta devuelta correctamente.")
else:
error = Excepcion('42P01',"Semántico","No existe la relación "+self.tabla,self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
else:
error = Excepcion("100","Semantico","No ha seleccionado ninguna Base de Datos.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
def generar3D(self, tabla, arbol):
super().generar3D(tabla,arbol)
code = []
t0 = c3d.getTemporal()
code.append(c3d.asignacionString(t0, "ALTER TABLE " + self.tabla))
t1 = c3d.getTemporal()
for col in self.lista_col:
code.append(c3d.operacion(t1, Identificador(t0), Valor(" \" ADD COLUMN " + col.id + " " + col.tipo.toString() + "\" ", "STRING"), OP_ARITMETICO.SUMA))
t0 = t1
t1 = c3d.getTemporal()
code.append(c3d.operacion(t1, Identificador(t0), Valor("\";\"", "STRING"), OP_ARITMETICO.SUMA))
code.append(c3d.asignacionTemporalStack(t1))
code.append(c3d.aumentarP())
return code | 51.811111 | 162 | 0.5638 | from Instrucciones.TablaSimbolos.Instruccion import Instruccion
from Instrucciones.Excepcion import Excepcion
import collections
from storageManager.jsonMode import *
from Optimizador.C3D import *
from Instrucciones.TablaSimbolos import Instruccion3D as c3d
class AlterTableAddColumn(Instruccion):
def __init__(self, tabla, lista_col, strGram,linea, columna):
Instruccion.__init__(self,None,linea,columna,strGram)
self.tabla = tabla
self.lista_col = lista_col
def ejecutar(self, tabla, arbol):
super().ejecutar(tabla,arbol)
if arbol.bdUsar != None:
objetoTabla = arbol.devolviendoTablaDeBase(self.tabla)
if objetoTabla != 0:
existeColumna = False
for c in self.lista_col:
for columnas in objetoTabla.lista_de_campos:
if columnas.nombre == c.id:
existeColumna = True
error = Excepcion('42701',"Semántico","Ya existe la columna «"+c.id+"» en la relación «"+self.tabla+"»",c.linea,c.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
if existeColumna:
return
nombres = []
for columnas in self.lista_col:
nombres.append(columnas.id)
duplicados = [item for item, count in collections.Counter(nombres).items() if count > 1]
for columnas in duplicados:
existeColumna = True
error = Excepcion('42701',"Semántico","Ya existe la columna «"+columnas+"» en la relación «"+self.tabla+"»",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
if existeColumna:
return
for c in self.lista_col:
objetoTabla.agregarColumna(c.id, c.tipo,None, None)
for columnas in self.lista_col:
resultado = alterAddColumn(arbol.getBaseDatos(),self.tabla,columnas.id)
if resultado == 1:
error = Excepcion('XX000',"Semántico","Error interno",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
elif resultado == 2:
error = Excepcion('42P00',"Semántico","La base de datos "+str(arbol.getBaseDatos())+" no existe",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
elif resultado == 3:
error = Excepcion('42P01',"Semántico","No existe la relación "+self.tabla,self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
arbol.consola.append("Consulta devuelta correctamente.")
else:
error = Excepcion('42P01',"Semántico","No existe la relación "+self.tabla,self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
return error
else:
error = Excepcion("100","Semantico","No ha seleccionado ninguna Base de Datos.",self.linea,self.columna)
arbol.excepciones.append(error)
arbol.consola.append(error.toString())
def generar3D(self, tabla, arbol):
super().generar3D(tabla,arbol)
code = []
t0 = c3d.getTemporal()
code.append(c3d.asignacionString(t0, "ALTER TABLE " + self.tabla))
t1 = c3d.getTemporal()
for col in self.lista_col:
code.append(c3d.operacion(t1, Identificador(t0), Valor(" \" ADD COLUMN " + col.id + " " + col.tipo.toString() + "\" ", "STRING"), OP_ARITMETICO.SUMA))
t0 = t1
t1 = c3d.getTemporal()
code.append(c3d.operacion(t1, Identificador(t0), Valor("\";\"", "STRING"), OP_ARITMETICO.SUMA))
code.append(c3d.asignacionTemporalStack(t1))
code.append(c3d.aumentarP())
return code | true | true |
f71f5818bd5e30abb2dd28facc28beb49f2ea0f1 | 1,726 | py | Python | my_methods/my_cap_curve.py | noushadkhan01/my_methods | fc467d5c34b9b5dd105e32cc5aad218d3f6408a8 | [
"MIT"
] | null | null | null | my_methods/my_cap_curve.py | noushadkhan01/my_methods | fc467d5c34b9b5dd105e32cc5aad218d3f6408a8 | [
"MIT"
] | null | null | null | my_methods/my_cap_curve.py | noushadkhan01/my_methods | fc467d5c34b9b5dd105e32cc5aad218d3f6408a8 | [
"MIT"
] | null | null | null | def my_cap_curve(model, X, y, figsize = (10, 5),legend_font_size = 10,loc = 'best',
linewidth = 2,label_font_size = 10, poly_features = False, extra_name = None):
import matplotlib.pyplot as plt
import numpy as np
import my_global_variables
from sklearn.metrics import roc_curve, auc
class_name = model.__class__.__name__
if poly_features:
class_name = class_name + '_poly'
if extra_name:
class_name += '_' + extra_name
total = len(y)
class_1_count = np.sum(y)
class_0_count = total - class_1_count
probs = model.predict_proba(X)
probs = probs[:, 1]
model_y = [y for _, y in sorted(zip(probs, y), reverse = True)]
y_values = np.append([0], np.cumsum(model_y))
x_values = np.arange(0, total + 1)
# Area under Random Model
a = auc([0, total], [0, class_1_count])
# Area between Perfect and Random Model
aP = auc([0, class_1_count, total], [0, class_1_count, class_1_count]) - a
# Area between Trained and Random Model
aR = auc(x_values, y_values) - a
plt.figure(figsize = (figsize))
plt.plot([0, total], [0, class_1_count], c = 'r', linestyle = '--', label = 'Random Model')
plt.plot([0, class_1_count, total], [0, class_1_count, class_1_count], c = 'grey', linewidth = linewidth, label = 'Perfect Model')
plt.plot(x_values, y_values, c = 'b', label = f'{class_name} Classifier Accuracy Rate = {aR/aP}', linewidth = linewidth)
plt.xlabel('Total observations', fontsize = label_font_size)
plt.ylabel('Class 1 observations', fontsize = label_font_size)
plt.title('Cumulative Accuracy Profile', fontsize = label_font_size)
plt.legend(loc = loc, fontsize = legend_font_size)
plt.show()
my_global_variables.model_cap_scores[class_name] = aR/aP
| 45.421053 | 132 | 0.695829 | def my_cap_curve(model, X, y, figsize = (10, 5),legend_font_size = 10,loc = 'best',
linewidth = 2,label_font_size = 10, poly_features = False, extra_name = None):
import matplotlib.pyplot as plt
import numpy as np
import my_global_variables
from sklearn.metrics import roc_curve, auc
class_name = model.__class__.__name__
if poly_features:
class_name = class_name + '_poly'
if extra_name:
class_name += '_' + extra_name
total = len(y)
class_1_count = np.sum(y)
class_0_count = total - class_1_count
probs = model.predict_proba(X)
probs = probs[:, 1]
model_y = [y for _, y in sorted(zip(probs, y), reverse = True)]
y_values = np.append([0], np.cumsum(model_y))
x_values = np.arange(0, total + 1)
a = auc([0, total], [0, class_1_count])
aP = auc([0, class_1_count, total], [0, class_1_count, class_1_count]) - a
aR = auc(x_values, y_values) - a
plt.figure(figsize = (figsize))
plt.plot([0, total], [0, class_1_count], c = 'r', linestyle = '--', label = 'Random Model')
plt.plot([0, class_1_count, total], [0, class_1_count, class_1_count], c = 'grey', linewidth = linewidth, label = 'Perfect Model')
plt.plot(x_values, y_values, c = 'b', label = f'{class_name} Classifier Accuracy Rate = {aR/aP}', linewidth = linewidth)
plt.xlabel('Total observations', fontsize = label_font_size)
plt.ylabel('Class 1 observations', fontsize = label_font_size)
plt.title('Cumulative Accuracy Profile', fontsize = label_font_size)
plt.legend(loc = loc, fontsize = legend_font_size)
plt.show()
my_global_variables.model_cap_scores[class_name] = aR/aP
| true | true |
f71f5879feebeaca94821aab1a4522d364bde04b | 2,130 | py | Python | tests/test_setutils.py | acatton/fork--mahmoud--boltons | 8916c66121cdbbe2bfc365152d5c202096a0ad16 | [
"BSD-3-Clause"
] | 1 | 2017-05-08T17:42:01.000Z | 2017-05-08T17:42:01.000Z | tests/test_setutils.py | acatton/fork--mahmoud--boltons | 8916c66121cdbbe2bfc365152d5c202096a0ad16 | [
"BSD-3-Clause"
] | 16 | 2018-10-15T10:07:36.000Z | 2019-01-07T04:34:34.000Z | tests/test_setutils.py | r0flc0pt4/boltons | 96bd42b5cca2a8783079430b94f9930b764573e9 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from boltons.setutils import IndexedSet, _MISSING
def test_indexed_set_basic():
zero2nine = IndexedSet(range(10))
five2nine = zero2nine & IndexedSet(range(5, 15))
x = IndexedSet(five2nine)
x |= set([10])
assert list(zero2nine) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
assert set(zero2nine) == set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert list(five2nine) == [5, 6, 7, 8, 9]
assert x == IndexedSet([5, 6, 7, 8, 9, 10])
assert x[-1] == 10
assert zero2nine ^ five2nine == IndexedSet([0, 1, 2, 3, 4])
assert x[:3] == IndexedSet([5, 6, 7])
assert x[2:4:-1] == IndexedSet([8, 7])
def test_indexed_set_mutate():
thou = IndexedSet(range(1000))
assert (thou.pop(), thou.pop()) == (999, 998)
assert (thou.pop(499), thou.pop(499)) == (499, 500)
ref = [495, 496, 497, 498, 501, 502, 503, 504, 505, 506]
assert [thou[i] for i in range(495, 505)] == ref
assert len(thou) == 996
while len(thou) > 600:
dead_idx_len = len(thou.dead_indices)
dead_idx_count = thou._dead_index_count
thou.pop(0)
new_dead_idx_len = len(thou.dead_indices)
if new_dead_idx_len < dead_idx_len:
assert dead_idx_count > 0
# 124, 109, 95
assert len(thou) == 600
assert thou._dead_index_count == 67
assert not any([thou[i] is _MISSING for i in range(len(thou))])
thou &= IndexedSet(range(500, 503))
assert thou == IndexedSet([501, 502])
return
def big_popper():
# more of a benchmark than a test
from os import urandom
import time
big_set = IndexedSet(range(100000))
rands = [ord(r) for r in urandom(len(big_set))]
start_time, start_size = time.time(), len(big_set)
while len(big_set) > 10000:
if len(big_set) % 10000 == 0:
print(len(big_set) / 10000)
rand = rands.pop()
big_set.pop(rand)
big_set.pop(-rand)
end_time, end_size = time.time(), len(big_set)
print()
print('popped %s items in %s seconds' % (start_size - end_size,
end_time - start_time))
| 30.869565 | 68 | 0.585915 |
from boltons.setutils import IndexedSet, _MISSING
def test_indexed_set_basic():
zero2nine = IndexedSet(range(10))
five2nine = zero2nine & IndexedSet(range(5, 15))
x = IndexedSet(five2nine)
x |= set([10])
assert list(zero2nine) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
assert set(zero2nine) == set([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])
assert list(five2nine) == [5, 6, 7, 8, 9]
assert x == IndexedSet([5, 6, 7, 8, 9, 10])
assert x[-1] == 10
assert zero2nine ^ five2nine == IndexedSet([0, 1, 2, 3, 4])
assert x[:3] == IndexedSet([5, 6, 7])
assert x[2:4:-1] == IndexedSet([8, 7])
def test_indexed_set_mutate():
thou = IndexedSet(range(1000))
assert (thou.pop(), thou.pop()) == (999, 998)
assert (thou.pop(499), thou.pop(499)) == (499, 500)
ref = [495, 496, 497, 498, 501, 502, 503, 504, 505, 506]
assert [thou[i] for i in range(495, 505)] == ref
assert len(thou) == 996
while len(thou) > 600:
dead_idx_len = len(thou.dead_indices)
dead_idx_count = thou._dead_index_count
thou.pop(0)
new_dead_idx_len = len(thou.dead_indices)
if new_dead_idx_len < dead_idx_len:
assert dead_idx_count > 0
assert len(thou) == 600
assert thou._dead_index_count == 67
assert not any([thou[i] is _MISSING for i in range(len(thou))])
thou &= IndexedSet(range(500, 503))
assert thou == IndexedSet([501, 502])
return
def big_popper():
from os import urandom
import time
big_set = IndexedSet(range(100000))
rands = [ord(r) for r in urandom(len(big_set))]
start_time, start_size = time.time(), len(big_set)
while len(big_set) > 10000:
if len(big_set) % 10000 == 0:
print(len(big_set) / 10000)
rand = rands.pop()
big_set.pop(rand)
big_set.pop(-rand)
end_time, end_size = time.time(), len(big_set)
print()
print('popped %s items in %s seconds' % (start_size - end_size,
end_time - start_time))
| true | true |
f71f588ae8e89518a40ae039426b0803c80db5e6 | 27,740 | py | Python | dask/array/top.py | migueltorrescosta/dask | 60f488cf7358d14c523f84de9afbb10022818367 | [
"BSD-3-Clause"
] | 1 | 2019-05-24T00:46:48.000Z | 2019-05-24T00:46:48.000Z | dask/array/top.py | migueltorrescosta/dask | 60f488cf7358d14c523f84de9afbb10022818367 | [
"BSD-3-Clause"
] | null | null | null | dask/array/top.py | migueltorrescosta/dask | 60f488cf7358d14c523f84de9afbb10022818367 | [
"BSD-3-Clause"
] | null | null | null | import itertools
import numbers
import numpy as np
import toolz
from .. import base, core, sharedict, utils
from ..compatibility import apply, Mapping
from ..delayed import to_task_dask
from ..optimization import SubgraphCallable
def subs(task, substitution):
""" Create a new task with the values substituted
This is like dask.core.subs, but takes a dict of many substitutions to
perform simultaneously. It is not as concerned with micro performance.
"""
if isinstance(task, dict):
return {k: subs(v, substitution) for k, v in task.items()}
if type(task) in (tuple, list, set):
return type(task)([subs(x, substitution) for x in task])
try:
return substitution[task]
except (KeyError, TypeError):
return task
def index_subs(ind, substitution):
""" A simple subs function that works both on tuples and strings """
if ind is None:
return ind
else:
return tuple([substitution.get(c, c) for c in ind])
def atop_token(i, prefix='_'):
return prefix + '%d' % i
def _top(func, output, output_indices, *arrind_pairs, **kwargs):
""" Create a TOP symbolic mutable mapping, given the inputs to top
This is like the ``top`` function, but rather than construct a dict, it
returns a symbolic TOP object.
See Also
--------
top
TOP
"""
numblocks = kwargs.pop('numblocks')
concatenate = kwargs.pop('concatenate', None)
new_axes = kwargs.pop('new_axes', {})
graph = sharedict.ShareDict()
# Transform indices to canonical elements
# We use terms like _0, and _1 rather than provided index elements
arrind_pairs = list(arrind_pairs)
unique_indices = {i for ii in arrind_pairs[1::2]
if ii is not None
for i in ii} | set(output_indices)
sub = {k: atop_token(i, '.')
for i, k in enumerate(sorted(unique_indices))}
output_indices = index_subs(tuple(output_indices), sub)
arrind_pairs[1::2] = [tuple(a) if a is not None else a
for a in arrind_pairs[1::2]]
arrind_pairs[1::2] = [index_subs(a, sub)
for a in arrind_pairs[1::2]]
new_axes = {index_subs((k,), sub)[0]: v for k, v in new_axes.items()}
# Unpack dask values in non-array arguments
argpairs = list(toolz.partition(2, arrind_pairs))
for i, (arg, ind) in enumerate(argpairs):
if ind is None:
arg2, dsk2 = to_task_dask(arg)
if dsk2:
graph.update(dsk2)
argpairs[i] = (arg2, ind)
# separate argpairs into two separate tuples
inputs = tuple([name for name, _ in argpairs])
inputs_indices = tuple([index for _, index in argpairs])
# Unpack delayed objects in kwargs
if kwargs:
kwargs, dsk_kwargs = to_task_dask(kwargs)
# replace keys in kwargs with _0 tokens
new_keys = list(core.get_dependencies(dsk_kwargs, task=kwargs))
new_tokens = tuple(atop_token(i) for i in range(len(inputs), len(inputs) + len(new_keys)))
sub = dict(zip(new_keys, new_tokens))
inputs = inputs + tuple(new_keys)
inputs_indices = inputs_indices + (None,) * len(new_keys)
kwargs = subs(kwargs, sub)
graph.update(dsk_kwargs)
indices = [(k, v) for k, v in zip(inputs, inputs_indices)]
keys = tuple(map(atop_token, range(len(inputs))))
# Construct local graph
if not kwargs:
dsk = {output: (func,) + keys}
else:
_keys = list(keys)
if new_keys:
_keys = _keys[:-len(new_keys)]
dsk = {output: (apply, func, _keys, kwargs)}
# Construct final output
top = TOP(output, output_indices, dsk, indices,
numblocks=numblocks, concatenate=concatenate, new_axes=new_axes)
graph.update_with_key(top, output)
graph.dependencies = {output: {arg for arg, ind in argpairs if ind is not None}}
return graph
class TOP(Mapping):
""" Tensor Operation
This is a lazily constructed mapping for tensor operation graphs.
This defines a dictionary using an operation and an indexing pattern.
It is built for many operations like elementwise, transpose, tensordot, and
so on. We choose to keep these as symbolic mappings rather than raw
dictionaries because we are able to fuse them during optimization,
sometimes resulting in much lower overhead.
See Also
--------
top
atop
"""
def __init__(self, output, output_indices, dsk, indices,
numblocks, concatenate=None, new_axes=None):
self.output = output
self.output_indices = tuple(output_indices)
self.dsk = dsk
self.indices = tuple((name, tuple(ind) if ind is not None else ind)
for name, ind in indices)
self.numblocks = numblocks
self.concatenate = concatenate
self.new_axes = new_axes or {}
@property
def _dict(self):
if hasattr(self, '_cached_dict'):
return self._cached_dict
else:
keys = tuple(map(atop_token, range(len(self.indices))))
func = SubgraphCallable(self.dsk, self.output, keys)
self._cached_dict = top(
func,
self.output,
self.output_indices,
*list(toolz.concat(self.indices)),
new_axes=self.new_axes,
numblocks=self.numblocks,
concatenate=self.concatenate
)
return self._cached_dict
def __getitem__(self, key):
return self._dict[key]
def __iter__(self):
return iter(self._dict)
def __len__(self):
return int(np.prod(list(self._out_numblocks().values())))
def _out_numblocks(self):
d = {}
indices = {k: v for k, v in self.indices if v is not None}
for k, v in self.numblocks.items():
for a, b in zip(indices[k], v):
d[a] = max(d.get(a, 0), b)
return {k: v for k, v in d.items() if k in self.output_indices}
def top(func, output, out_indices, *arrind_pairs, **kwargs):
""" Tensor operation
Applies a function, ``func``, across blocks from many different input
dasks. We arrange the pattern with which those blocks interact with sets
of matching indices. E.g.::
top(func, 'z', 'i', 'x', 'i', 'y', 'i')
yield an embarrassingly parallel communication pattern and is read as
$$ z_i = func(x_i, y_i) $$
More complex patterns may emerge, including multiple indices::
top(func, 'z', 'ij', 'x', 'ij', 'y', 'ji')
$$ z_{ij} = func(x_{ij}, y_{ji}) $$
Indices missing in the output but present in the inputs results in many
inputs being sent to one function (see examples).
Examples
--------
Simple embarrassing map operation
>>> inc = lambda x: x + 1
>>> top(inc, 'z', 'ij', 'x', 'ij', numblocks={'x': (2, 2)}) # doctest: +SKIP
{('z', 0, 0): (inc, ('x', 0, 0)),
('z', 0, 1): (inc, ('x', 0, 1)),
('z', 1, 0): (inc, ('x', 1, 0)),
('z', 1, 1): (inc, ('x', 1, 1))}
Simple operation on two datasets
>>> add = lambda x, y: x + y
>>> top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij', numblocks={'x': (2, 2),
... 'y': (2, 2)}) # doctest: +SKIP
{('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)),
('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 1)),
('z', 1, 0): (add, ('x', 1, 0), ('y', 1, 0)),
('z', 1, 1): (add, ('x', 1, 1), ('y', 1, 1))}
Operation that flips one of the datasets
>>> addT = lambda x, y: x + y.T # Transpose each chunk
>>> # z_ij ~ x_ij y_ji
>>> # .. .. .. notice swap
>>> top(addT, 'z', 'ij', 'x', 'ij', 'y', 'ji', numblocks={'x': (2, 2),
... 'y': (2, 2)}) # doctest: +SKIP
{('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)),
('z', 0, 1): (add, ('x', 0, 1), ('y', 1, 0)),
('z', 1, 0): (add, ('x', 1, 0), ('y', 0, 1)),
('z', 1, 1): (add, ('x', 1, 1), ('y', 1, 1))}
Dot product with contraction over ``j`` index. Yields list arguments
>>> top(dotmany, 'z', 'ik', 'x', 'ij', 'y', 'jk', numblocks={'x': (2, 2),
... 'y': (2, 2)}) # doctest: +SKIP
{('z', 0, 0): (dotmany, [('x', 0, 0), ('x', 0, 1)],
[('y', 0, 0), ('y', 1, 0)]),
('z', 0, 1): (dotmany, [('x', 0, 0), ('x', 0, 1)],
[('y', 0, 1), ('y', 1, 1)]),
('z', 1, 0): (dotmany, [('x', 1, 0), ('x', 1, 1)],
[('y', 0, 0), ('y', 1, 0)]),
('z', 1, 1): (dotmany, [('x', 1, 0), ('x', 1, 1)],
[('y', 0, 1), ('y', 1, 1)])}
Pass ``concatenate=True`` to concatenate arrays ahead of time
>>> top(f, 'z', 'i', 'x', 'ij', 'y', 'ij', concatenate=True,
... numblocks={'x': (2, 2), 'y': (2, 2,)}) # doctest: +SKIP
{('z', 0): (f, (concatenate_axes, [('x', 0, 0), ('x', 0, 1)], (1,)),
(concatenate_axes, [('y', 0, 0), ('y', 0, 1)], (1,)))
('z', 1): (f, (concatenate_axes, [('x', 1, 0), ('x', 1, 1)], (1,)),
(concatenate_axes, [('y', 1, 0), ('y', 1, 1)], (1,)))}
Supports Broadcasting rules
>>> top(add, 'z', 'ij', 'x', 'ij', 'y', 'ij', numblocks={'x': (1, 2),
... 'y': (2, 2)}) # doctest: +SKIP
{('z', 0, 0): (add, ('x', 0, 0), ('y', 0, 0)),
('z', 0, 1): (add, ('x', 0, 1), ('y', 0, 1)),
('z', 1, 0): (add, ('x', 0, 0), ('y', 1, 0)),
('z', 1, 1): (add, ('x', 0, 1), ('y', 1, 1))}
Support keyword arguments with apply
>>> def f(a, b=0): return a + b
>>> top(f, 'z', 'i', 'x', 'i', numblocks={'x': (2,)}, b=10) # doctest: +SKIP
{('z', 0): (apply, f, [('x', 0)], {'b': 10}),
('z', 1): (apply, f, [('x', 1)], {'b': 10})}
Include literals by indexing with ``None``
>>> top(add, 'z', 'i', 'x', 'i', 100, None, numblocks={'x': (2,)}) # doctest: +SKIP
{('z', 0): (add, ('x', 0), 100),
('z', 1): (add, ('x', 1), 100)}
See Also
--------
atop
"""
from .core import broadcast_dimensions, zero_broadcast_dimensions, concatenate_axes
numblocks = kwargs.pop('numblocks')
concatenate = kwargs.pop('concatenate', None)
new_axes = kwargs.pop('new_axes', {})
argpairs = list(toolz.partition(2, arrind_pairs))
assert set(numblocks) == {name for name, ind in argpairs if ind is not None}
all_indices = {x for _, ind in argpairs if ind for x in ind}
dummy_indices = all_indices - set(out_indices)
# Dictionary mapping {i: 3, j: 4, ...} for i, j, ... the dimensions
dims = broadcast_dimensions(argpairs, numblocks)
for k in new_axes:
dims[k] = 1
# (0, 0), (0, 1), (0, 2), (1, 0), ...
keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
# {i: 0, j: 0}, {i: 0, j: 1}, ...
keydicts = [dict(zip(out_indices, tup)) for tup in keytups]
# {j: [1, 2, 3], ...} For j a dummy index of dimension 3
dummies = dict((i, list(range(dims[i]))) for i in dummy_indices)
dsk = {}
# Create argument lists
valtups = []
for kd in keydicts:
args = []
for arg, ind in argpairs:
if ind is None:
args.append(arg)
else:
tups = lol_tuples((arg,), ind, kd, dummies)
if any(nb == 1 for nb in numblocks[arg]):
tups2 = zero_broadcast_dimensions(tups, numblocks[arg])
else:
tups2 = tups
if concatenate and isinstance(tups2, list):
axes = [n for n, i in enumerate(ind) if i in dummies]
tups2 = (concatenate_axes, tups2, axes)
args.append(tups2)
valtups.append(args)
if not kwargs: # will not be used in an apply, should be a tuple
valtups = [tuple(vt) for vt in valtups]
# Add heads to tuples
keys = [(output,) + kt for kt in keytups]
# Unpack delayed objects in kwargs
if kwargs:
task, dsk2 = to_task_dask(kwargs)
if dsk2:
dsk.update(utils.ensure_dict(dsk2))
kwargs2 = task
else:
kwargs2 = kwargs
vals = [(apply, func, vt, kwargs2) for vt in valtups]
else:
vals = [(func,) + vt for vt in valtups]
dsk.update(dict(zip(keys, vals)))
return dsk
def atop(func, out_ind, *args, **kwargs):
""" Tensor operation: Generalized inner and outer products
A broad class of blocked algorithms and patterns can be specified with a
concise multi-index notation. The ``atop`` function applies an in-memory
function across multiple blocks of multiple inputs in a variety of ways.
Many dask.array operations are special cases of atop including elementwise,
broadcasting, reductions, tensordot, and transpose.
Parameters
----------
func : callable
Function to apply to individual tuples of blocks
out_ind : iterable
Block pattern of the output, something like 'ijk' or (1, 2, 3)
*args : sequence of Array, index pairs
Sequence like (x, 'ij', y, 'jk', z, 'i')
**kwargs : dict
Extra keyword arguments to pass to function
dtype : np.dtype
Datatype of resulting array.
concatenate : bool, keyword only
If true concatenate arrays along dummy indices, else provide lists
adjust_chunks : dict
Dictionary mapping index to function to be applied to chunk sizes
new_axes : dict, keyword only
New indexes and their dimension lengths
Examples
--------
2D embarrassingly parallel operation from two arrays, x, and y.
>>> z = atop(operator.add, 'ij', x, 'ij', y, 'ij', dtype='f8') # z = x + y # doctest: +SKIP
Outer product multiplying x by y, two 1-d vectors
>>> z = atop(operator.mul, 'ij', x, 'i', y, 'j', dtype='f8') # doctest: +SKIP
z = x.T
>>> z = atop(np.transpose, 'ji', x, 'ij', dtype=x.dtype) # doctest: +SKIP
The transpose case above is illustrative because it does same transposition
both on each in-memory block by calling ``np.transpose`` and on the order
of the blocks themselves, by switching the order of the index ``ij -> ji``.
We can compose these same patterns with more variables and more complex
in-memory functions
z = X + Y.T
>>> z = atop(lambda x, y: x + y.T, 'ij', x, 'ij', y, 'ji', dtype='f8') # doctest: +SKIP
Any index, like ``i`` missing from the output index is interpreted as a
contraction (note that this differs from Einstein convention; repeated
indices do not imply contraction.) In the case of a contraction the passed
function should expect an iterable of blocks on any array that holds that
index. To receive arrays concatenated along contracted dimensions instead
pass ``concatenate=True``.
Inner product multiplying x by y, two 1-d vectors
>>> def sequence_dot(x_blocks, y_blocks):
... result = 0
... for x, y in zip(x_blocks, y_blocks):
... result += x.dot(y)
... return result
>>> z = atop(sequence_dot, '', x, 'i', y, 'i', dtype='f8') # doctest: +SKIP
Add new single-chunk dimensions with the ``new_axes=`` keyword, including
the length of the new dimension. New dimensions will always be in a single
chunk.
>>> def f(x):
... return x[:, None] * np.ones((1, 5))
>>> z = atop(f, 'az', x, 'a', new_axes={'z': 5}, dtype=x.dtype) # doctest: +SKIP
If the applied function changes the size of each chunk you can specify this
with a ``adjust_chunks={...}`` dictionary holding a function for each index
that modifies the dimension size in that index.
>>> def double(x):
... return np.concatenate([x, x])
>>> y = atop(double, 'ij', x, 'ij',
... adjust_chunks={'i': lambda n: 2 * n}, dtype=x.dtype) # doctest: +SKIP
Include literals by indexing with None
>>> y = atop(add, 'ij', x, 'ij', 1234, None, dtype=x.dtype) # doctest: +SKIP
See Also
--------
top - dict formulation of this function, contains most logic
"""
out = kwargs.pop('name', None) # May be None at this point
token = kwargs.pop('token', None)
dtype = kwargs.pop('dtype', None)
adjust_chunks = kwargs.pop('adjust_chunks', None)
new_axes = kwargs.get('new_axes', {})
# Input Validation
if len(set(out_ind)) != len(out_ind):
raise ValueError("Repeated elements not allowed in output index",
[k for k, v in toolz.frequencies(out_ind).items() if v > 1])
new = (set(out_ind)
- {a for arg in args[1::2] if arg is not None for a in arg}
- set(new_axes or ()))
if new:
raise ValueError("Unknown dimension", new)
from .core import Array, unify_chunks, normalize_arg
if dtype is None:
raise ValueError("Must specify dtype of output array")
chunkss, arrays = unify_chunks(*args)
for k, v in new_axes.items():
chunkss[k] = (v,)
arginds = list(zip(arrays, args[1::2]))
for arg, ind in arginds:
if hasattr(arg, 'ndim') and hasattr(ind, '__len__') and arg.ndim != len(ind):
raise ValueError("Index string %s does not match array dimension %d"
% (ind, arg.ndim))
numblocks = {a.name: a.numblocks for a, ind in arginds if ind is not None}
argindsstr = list(toolz.concat([(normalize_arg(a) if ind is None else a.name, ind)
for a, ind in arginds]))
# Finish up the name
if not out:
out = '%s-%s' % (token or utils.funcname(func).strip('_'),
base.tokenize(func, out_ind, argindsstr, dtype, **kwargs))
kwargs2 = {k: normalize_arg(v) for k, v in kwargs.items()}
dsk = _top(func, out, out_ind, *argindsstr, numblocks=numblocks, **kwargs2)
dsks = [a.dask for a, ind in arginds if ind is not None]
chunks = [chunkss[i] for i in out_ind]
if adjust_chunks:
for i, ind in enumerate(out_ind):
if ind in adjust_chunks:
if callable(adjust_chunks[ind]):
chunks[i] = tuple(map(adjust_chunks[ind], chunks[i]))
elif isinstance(adjust_chunks[ind], numbers.Integral):
chunks[i] = tuple(adjust_chunks[ind] for _ in chunks[i])
elif isinstance(adjust_chunks[ind], (tuple, list)):
chunks[i] = tuple(adjust_chunks[ind])
else:
raise NotImplementedError(
"adjust_chunks values must be callable, int, or tuple")
chunks = tuple(chunks)
return Array(sharedict.merge((out, dsk), *dsks,
dependencies={out: {a.name for a, ind in arginds if ind is not None}}),
out, chunks, dtype=dtype)
def lol_tuples(head, ind, values, dummies):
""" List of list of tuple keys
Parameters
----------
head : tuple
The known tuple so far
ind : Iterable
An iterable of indices not yet covered
values : dict
Known values for non-dummy indices
dummies : dict
Ranges of values for dummy indices
Examples
--------
>>> lol_tuples(('x',), 'ij', {'i': 1, 'j': 0}, {})
('x', 1, 0)
>>> lol_tuples(('x',), 'ij', {'i': 1}, {'j': range(3)})
[('x', 1, 0), ('x', 1, 1), ('x', 1, 2)]
>>> lol_tuples(('x',), 'ij', {'i': 1}, {'j': range(3)})
[('x', 1, 0), ('x', 1, 1), ('x', 1, 2)]
>>> lol_tuples(('x',), 'ijk', {'i': 1}, {'j': [0, 1, 2], 'k': [0, 1]}) # doctest: +NORMALIZE_WHITESPACE
[[('x', 1, 0, 0), ('x', 1, 0, 1)],
[('x', 1, 1, 0), ('x', 1, 1, 1)],
[('x', 1, 2, 0), ('x', 1, 2, 1)]]
"""
if not ind:
return head
if ind[0] not in dummies:
return lol_tuples(head + (values[ind[0]],), ind[1:], values, dummies)
else:
return [lol_tuples(head + (v,), ind[1:], values, dummies)
for v in dummies[ind[0]]]
def optimize_atop(full_graph, keys=()):
""" High level optimization of stacked TOP layers
For operations that have multiple TOP operations one after the other, like
``x.T + 123`` we can fuse these into a single TOP operation. This happens
before any actual tasks are generated, and so can reduce overhead.
This finds groups of TOP operations that can be safely fused, and then
passes them to ``rewrite_atop`` for rewriting.
Parameters
----------
full_graph: ShareDict
keys: Iterable
The keys of all outputs of all collections.
Used to make sure that we don't fuse a layer needed by an output
Returns
-------
sharedict : ShareDict
See Also
--------
rewrite_atop
"""
keep = {k[0] if type(k) is tuple else k for k in keys}
layers = full_graph.dicts
dependents = core.reverse_dict(full_graph.dependencies)
roots = {k for k in full_graph.dicts
if not dependents.get(k)}
stack = list(roots)
out = {}
dependencies = {}
seen = set()
while stack:
layer = stack.pop()
if layer in seen or layer not in layers:
continue
seen.add(layer)
# Outer loop walks through possible output TOP layers
if isinstance(layers[layer], TOP):
top_layers = {layer}
deps = set(top_layers)
while deps: # we gather as many sub-layers as we can
dep = deps.pop()
if dep not in layers:
stack.append(dep)
continue
if not isinstance(layers[dep], TOP):
stack.append(dep)
continue
if (dep != layer and dep in keep):
stack.append(dep)
continue
if layers[dep].concatenate != layers[layer].concatenate:
stack.append(dep)
continue
# passed everything, proceed
top_layers.add(dep)
# traverse further to this child's children
for d in full_graph.dependencies.get(dep, ()):
# Don't allow reductions to proceed
output_indices = set(layers[dep].output_indices)
input_indices = {i for _, ind in layers[dep].indices if ind for i in ind}
if len(dependents[d]) <= 1 and output_indices.issuperset(input_indices):
deps.add(d)
else:
stack.append(d)
# Merge these TOP layers into one
new_layer = rewrite_atop([layers[l] for l in top_layers])
out[layer] = new_layer
dependencies[layer] = {k for k, v in new_layer.indices if v is not None}
else:
out[layer] = layers[layer]
dependencies[layer] = full_graph.dependencies.get(layer, set())
stack.extend(full_graph.dependencies.get(layer, ()))
return sharedict.ShareDict(out, dependencies)
def rewrite_atop(inputs):
""" Rewrite a stack of atop expressions into a single atop expression
Given a set of TOP layers, combine them into a single layer. The provided
layers are expected to fit well together. That job is handled by
``optimize_atop``
Parameters
----------
inputs : List[TOP]
Returns
-------
top : TOP
See Also
--------
optimize_atop
"""
inputs = {inp.output: inp for inp in inputs}
dependencies = {inp.output: {d for d, v in inp.indices
if v is not None and d in inputs}
for inp in inputs.values()}
dependents = core.reverse_dict(dependencies)
new_index_iter = (c + (str(d) if d else '') # A, B, ... A1, B1, ...
for d in itertools.count()
for c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ')
[root] = [k for k, v in dependents.items() if not v]
# Our final results. These will change during fusion below
indices = list(inputs[root].indices)
new_axes = inputs[root].new_axes
concatenate = inputs[root].concatenate
dsk = dict(inputs[root].dsk)
changed = True
while changed:
changed = False
for i, (dep, ind) in enumerate(indices):
if ind is None:
continue
if dep not in inputs:
continue
changed = True
# Replace _n with dep name in existing tasks
# (inc, _0) -> (inc, 'b')
dsk = {k: subs(v, {atop_token(i): dep}) for k, v in dsk.items()}
# Remove current input from input indices
# [('a', 'i'), ('b', 'i')] -> [('a', 'i')]
_, current_dep_indices = indices.pop(i)
sub = {atop_token(i): atop_token(i - 1) for i in range(i + 1, len(indices) + 1)}
dsk = subs(dsk, sub)
# Change new input_indices to match give index from current computation
# [('c', j')] -> [('c', 'i')]
new_indices = inputs[dep].indices
sub = dict(zip(inputs[dep].output_indices, current_dep_indices))
contracted = {x for _, j in new_indices
if j is not None
for x in j
if x not in inputs[dep].output_indices}
extra = dict(zip(contracted, new_index_iter))
sub.update(extra)
new_indices = [(x, index_subs(j, sub)) for x, j in new_indices]
# Update new_axes
for k, v in inputs[dep].new_axes.items():
new_axes[sub[k]] = v
# Bump new inputs up in list
sub = {}
for i, index in enumerate(new_indices):
try:
contains = index in indices
except (ValueError, TypeError):
contains = False
if contains: # use old inputs if available
sub[atop_token(i)] = atop_token(indices.index(index))
else:
sub[atop_token(i)] = atop_token(len(indices))
indices.append(index)
new_dsk = subs(inputs[dep].dsk, sub)
# indices.extend(new_indices)
dsk.update(new_dsk)
indices = [(a, tuple(b) if isinstance(b, list) else b)
for a, b in indices]
# De-duplicate indices like [(a, ij), (b, i), (a, ij)] -> [(a, ij), (b, i)]
# Make sure that we map everything else appropriately as we remove inputs
new_indices = []
seen = {}
sub = {} # like {_0: _0, _1: _0, _2: _1}
for i, x in enumerate(indices):
if x[1] is not None and x in seen:
sub[i] = seen[x]
else:
if x[1] is not None:
seen[x] = len(new_indices)
sub[i] = len(new_indices)
new_indices.append(x)
sub = {atop_token(k): atop_token(v) for k, v in sub.items()}
dsk = {k: subs(v, sub) for k, v in dsk.items()}
indices_check = {k for k, v in indices if v is not None}
numblocks = toolz.merge([inp.numblocks for inp in inputs.values()])
numblocks = {k: v for k, v in numblocks.items()
if v is None or k in indices_check}
out = TOP(root, inputs[root].output_indices, dsk, new_indices,
numblocks=numblocks, new_axes=new_axes, concatenate=concatenate)
return out
| 36.308901 | 107 | 0.548594 | import itertools
import numbers
import numpy as np
import toolz
from .. import base, core, sharedict, utils
from ..compatibility import apply, Mapping
from ..delayed import to_task_dask
from ..optimization import SubgraphCallable
def subs(task, substitution):
if isinstance(task, dict):
return {k: subs(v, substitution) for k, v in task.items()}
if type(task) in (tuple, list, set):
return type(task)([subs(x, substitution) for x in task])
try:
return substitution[task]
except (KeyError, TypeError):
return task
def index_subs(ind, substitution):
if ind is None:
return ind
else:
return tuple([substitution.get(c, c) for c in ind])
def atop_token(i, prefix='_'):
return prefix + '%d' % i
def _top(func, output, output_indices, *arrind_pairs, **kwargs):
numblocks = kwargs.pop('numblocks')
concatenate = kwargs.pop('concatenate', None)
new_axes = kwargs.pop('new_axes', {})
graph = sharedict.ShareDict()
arrind_pairs = list(arrind_pairs)
unique_indices = {i for ii in arrind_pairs[1::2]
if ii is not None
for i in ii} | set(output_indices)
sub = {k: atop_token(i, '.')
for i, k in enumerate(sorted(unique_indices))}
output_indices = index_subs(tuple(output_indices), sub)
arrind_pairs[1::2] = [tuple(a) if a is not None else a
for a in arrind_pairs[1::2]]
arrind_pairs[1::2] = [index_subs(a, sub)
for a in arrind_pairs[1::2]]
new_axes = {index_subs((k,), sub)[0]: v for k, v in new_axes.items()}
argpairs = list(toolz.partition(2, arrind_pairs))
for i, (arg, ind) in enumerate(argpairs):
if ind is None:
arg2, dsk2 = to_task_dask(arg)
if dsk2:
graph.update(dsk2)
argpairs[i] = (arg2, ind)
inputs = tuple([name for name, _ in argpairs])
inputs_indices = tuple([index for _, index in argpairs])
if kwargs:
kwargs, dsk_kwargs = to_task_dask(kwargs)
new_keys = list(core.get_dependencies(dsk_kwargs, task=kwargs))
new_tokens = tuple(atop_token(i) for i in range(len(inputs), len(inputs) + len(new_keys)))
sub = dict(zip(new_keys, new_tokens))
inputs = inputs + tuple(new_keys)
inputs_indices = inputs_indices + (None,) * len(new_keys)
kwargs = subs(kwargs, sub)
graph.update(dsk_kwargs)
indices = [(k, v) for k, v in zip(inputs, inputs_indices)]
keys = tuple(map(atop_token, range(len(inputs))))
if not kwargs:
dsk = {output: (func,) + keys}
else:
_keys = list(keys)
if new_keys:
_keys = _keys[:-len(new_keys)]
dsk = {output: (apply, func, _keys, kwargs)}
top = TOP(output, output_indices, dsk, indices,
numblocks=numblocks, concatenate=concatenate, new_axes=new_axes)
graph.update_with_key(top, output)
graph.dependencies = {output: {arg for arg, ind in argpairs if ind is not None}}
return graph
class TOP(Mapping):
def __init__(self, output, output_indices, dsk, indices,
numblocks, concatenate=None, new_axes=None):
self.output = output
self.output_indices = tuple(output_indices)
self.dsk = dsk
self.indices = tuple((name, tuple(ind) if ind is not None else ind)
for name, ind in indices)
self.numblocks = numblocks
self.concatenate = concatenate
self.new_axes = new_axes or {}
@property
def _dict(self):
if hasattr(self, '_cached_dict'):
return self._cached_dict
else:
keys = tuple(map(atop_token, range(len(self.indices))))
func = SubgraphCallable(self.dsk, self.output, keys)
self._cached_dict = top(
func,
self.output,
self.output_indices,
*list(toolz.concat(self.indices)),
new_axes=self.new_axes,
numblocks=self.numblocks,
concatenate=self.concatenate
)
return self._cached_dict
def __getitem__(self, key):
return self._dict[key]
def __iter__(self):
return iter(self._dict)
def __len__(self):
return int(np.prod(list(self._out_numblocks().values())))
def _out_numblocks(self):
d = {}
indices = {k: v for k, v in self.indices if v is not None}
for k, v in self.numblocks.items():
for a, b in zip(indices[k], v):
d[a] = max(d.get(a, 0), b)
return {k: v for k, v in d.items() if k in self.output_indices}
def top(func, output, out_indices, *arrind_pairs, **kwargs):
from .core import broadcast_dimensions, zero_broadcast_dimensions, concatenate_axes
numblocks = kwargs.pop('numblocks')
concatenate = kwargs.pop('concatenate', None)
new_axes = kwargs.pop('new_axes', {})
argpairs = list(toolz.partition(2, arrind_pairs))
assert set(numblocks) == {name for name, ind in argpairs if ind is not None}
all_indices = {x for _, ind in argpairs if ind for x in ind}
dummy_indices = all_indices - set(out_indices)
dims = broadcast_dimensions(argpairs, numblocks)
for k in new_axes:
dims[k] = 1
keytups = list(itertools.product(*[range(dims[i]) for i in out_indices]))
keydicts = [dict(zip(out_indices, tup)) for tup in keytups]
dummies = dict((i, list(range(dims[i]))) for i in dummy_indices)
dsk = {}
valtups = []
for kd in keydicts:
args = []
for arg, ind in argpairs:
if ind is None:
args.append(arg)
else:
tups = lol_tuples((arg,), ind, kd, dummies)
if any(nb == 1 for nb in numblocks[arg]):
tups2 = zero_broadcast_dimensions(tups, numblocks[arg])
else:
tups2 = tups
if concatenate and isinstance(tups2, list):
axes = [n for n, i in enumerate(ind) if i in dummies]
tups2 = (concatenate_axes, tups2, axes)
args.append(tups2)
valtups.append(args)
if not kwargs:
valtups = [tuple(vt) for vt in valtups]
keys = [(output,) + kt for kt in keytups]
if kwargs:
task, dsk2 = to_task_dask(kwargs)
if dsk2:
dsk.update(utils.ensure_dict(dsk2))
kwargs2 = task
else:
kwargs2 = kwargs
vals = [(apply, func, vt, kwargs2) for vt in valtups]
else:
vals = [(func,) + vt for vt in valtups]
dsk.update(dict(zip(keys, vals)))
return dsk
def atop(func, out_ind, *args, **kwargs):
out = kwargs.pop('name', None)
token = kwargs.pop('token', None)
dtype = kwargs.pop('dtype', None)
adjust_chunks = kwargs.pop('adjust_chunks', None)
new_axes = kwargs.get('new_axes', {})
if len(set(out_ind)) != len(out_ind):
raise ValueError("Repeated elements not allowed in output index",
[k for k, v in toolz.frequencies(out_ind).items() if v > 1])
new = (set(out_ind)
- {a for arg in args[1::2] if arg is not None for a in arg}
- set(new_axes or ()))
if new:
raise ValueError("Unknown dimension", new)
from .core import Array, unify_chunks, normalize_arg
if dtype is None:
raise ValueError("Must specify dtype of output array")
chunkss, arrays = unify_chunks(*args)
for k, v in new_axes.items():
chunkss[k] = (v,)
arginds = list(zip(arrays, args[1::2]))
for arg, ind in arginds:
if hasattr(arg, 'ndim') and hasattr(ind, '__len__') and arg.ndim != len(ind):
raise ValueError("Index string %s does not match array dimension %d"
% (ind, arg.ndim))
numblocks = {a.name: a.numblocks for a, ind in arginds if ind is not None}
argindsstr = list(toolz.concat([(normalize_arg(a) if ind is None else a.name, ind)
for a, ind in arginds]))
if not out:
out = '%s-%s' % (token or utils.funcname(func).strip('_'),
base.tokenize(func, out_ind, argindsstr, dtype, **kwargs))
kwargs2 = {k: normalize_arg(v) for k, v in kwargs.items()}
dsk = _top(func, out, out_ind, *argindsstr, numblocks=numblocks, **kwargs2)
dsks = [a.dask for a, ind in arginds if ind is not None]
chunks = [chunkss[i] for i in out_ind]
if adjust_chunks:
for i, ind in enumerate(out_ind):
if ind in adjust_chunks:
if callable(adjust_chunks[ind]):
chunks[i] = tuple(map(adjust_chunks[ind], chunks[i]))
elif isinstance(adjust_chunks[ind], numbers.Integral):
chunks[i] = tuple(adjust_chunks[ind] for _ in chunks[i])
elif isinstance(adjust_chunks[ind], (tuple, list)):
chunks[i] = tuple(adjust_chunks[ind])
else:
raise NotImplementedError(
"adjust_chunks values must be callable, int, or tuple")
chunks = tuple(chunks)
return Array(sharedict.merge((out, dsk), *dsks,
dependencies={out: {a.name for a, ind in arginds if ind is not None}}),
out, chunks, dtype=dtype)
def lol_tuples(head, ind, values, dummies):
if not ind:
return head
if ind[0] not in dummies:
return lol_tuples(head + (values[ind[0]],), ind[1:], values, dummies)
else:
return [lol_tuples(head + (v,), ind[1:], values, dummies)
for v in dummies[ind[0]]]
def optimize_atop(full_graph, keys=()):
keep = {k[0] if type(k) is tuple else k for k in keys}
layers = full_graph.dicts
dependents = core.reverse_dict(full_graph.dependencies)
roots = {k for k in full_graph.dicts
if not dependents.get(k)}
stack = list(roots)
out = {}
dependencies = {}
seen = set()
while stack:
layer = stack.pop()
if layer in seen or layer not in layers:
continue
seen.add(layer)
if isinstance(layers[layer], TOP):
top_layers = {layer}
deps = set(top_layers)
while deps:
dep = deps.pop()
if dep not in layers:
stack.append(dep)
continue
if not isinstance(layers[dep], TOP):
stack.append(dep)
continue
if (dep != layer and dep in keep):
stack.append(dep)
continue
if layers[dep].concatenate != layers[layer].concatenate:
stack.append(dep)
continue
top_layers.add(dep)
for d in full_graph.dependencies.get(dep, ()):
# Don't allow reductions to proceed
output_indices = set(layers[dep].output_indices)
input_indices = {i for _, ind in layers[dep].indices if ind for i in ind}
if len(dependents[d]) <= 1 and output_indices.issuperset(input_indices):
deps.add(d)
else:
stack.append(d)
new_layer = rewrite_atop([layers[l] for l in top_layers])
out[layer] = new_layer
dependencies[layer] = {k for k, v in new_layer.indices if v is not None}
else:
out[layer] = layers[layer]
dependencies[layer] = full_graph.dependencies.get(layer, set())
stack.extend(full_graph.dependencies.get(layer, ()))
return sharedict.ShareDict(out, dependencies)
def rewrite_atop(inputs):
inputs = {inp.output: inp for inp in inputs}
dependencies = {inp.output: {d for d, v in inp.indices
if v is not None and d in inputs}
for inp in inputs.values()}
dependents = core.reverse_dict(dependencies)
new_index_iter = (c + (str(d) if d else '')
for d in itertools.count()
for c in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ')
[root] = [k for k, v in dependents.items() if not v]
indices = list(inputs[root].indices)
new_axes = inputs[root].new_axes
concatenate = inputs[root].concatenate
dsk = dict(inputs[root].dsk)
changed = True
while changed:
changed = False
for i, (dep, ind) in enumerate(indices):
if ind is None:
continue
if dep not in inputs:
continue
changed = True
dsk = {k: subs(v, {atop_token(i): dep}) for k, v in dsk.items()}
_, current_dep_indices = indices.pop(i)
sub = {atop_token(i): atop_token(i - 1) for i in range(i + 1, len(indices) + 1)}
dsk = subs(dsk, sub)
new_indices = inputs[dep].indices
sub = dict(zip(inputs[dep].output_indices, current_dep_indices))
contracted = {x for _, j in new_indices
if j is not None
for x in j
if x not in inputs[dep].output_indices}
extra = dict(zip(contracted, new_index_iter))
sub.update(extra)
new_indices = [(x, index_subs(j, sub)) for x, j in new_indices]
# Update new_axes
for k, v in inputs[dep].new_axes.items():
new_axes[sub[k]] = v
# Bump new inputs up in list
sub = {}
for i, index in enumerate(new_indices):
try:
contains = index in indices
except (ValueError, TypeError):
contains = False
if contains: # use old inputs if available
sub[atop_token(i)] = atop_token(indices.index(index))
else:
sub[atop_token(i)] = atop_token(len(indices))
indices.append(index)
new_dsk = subs(inputs[dep].dsk, sub)
# indices.extend(new_indices)
dsk.update(new_dsk)
indices = [(a, tuple(b) if isinstance(b, list) else b)
for a, b in indices]
# De-duplicate indices like [(a, ij), (b, i), (a, ij)] -> [(a, ij), (b, i)]
# Make sure that we map everything else appropriately as we remove inputs
new_indices = []
seen = {}
sub = {} # like {_0: _0, _1: _0, _2: _1}
for i, x in enumerate(indices):
if x[1] is not None and x in seen:
sub[i] = seen[x]
else:
if x[1] is not None:
seen[x] = len(new_indices)
sub[i] = len(new_indices)
new_indices.append(x)
sub = {atop_token(k): atop_token(v) for k, v in sub.items()}
dsk = {k: subs(v, sub) for k, v in dsk.items()}
indices_check = {k for k, v in indices if v is not None}
numblocks = toolz.merge([inp.numblocks for inp in inputs.values()])
numblocks = {k: v for k, v in numblocks.items()
if v is None or k in indices_check}
out = TOP(root, inputs[root].output_indices, dsk, new_indices,
numblocks=numblocks, new_axes=new_axes, concatenate=concatenate)
return out
| true | true |
f71f58c1649fd2690611e738744d6c22a955fdf0 | 4,419 | py | Python | sherpa_client/models/http_service_metadata.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | sherpa_client/models/http_service_metadata.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | sherpa_client/models/http_service_metadata.py | kairntech/sherpa-client | cd259c87b7291eeec3f3ea025e368f2f069a06cd | [
"Apache-2.0"
] | null | null | null | from typing import Any, Dict, Type, TypeVar, Union
import attr
from ..models.http_service_metadata_operations import HttpServiceMetadataOperations
from ..types import UNSET, Unset
T = TypeVar("T", bound="HttpServiceMetadata")
@attr.s(auto_attribs=True)
class HttpServiceMetadata:
""" """
api: str
compatibility: str
version: str
annotators: Union[Unset, str] = UNSET
converters: Union[Unset, str] = UNSET
engine: Union[Unset, str] = UNSET
extensions: Union[Unset, str] = UNSET
formatters: Union[Unset, str] = UNSET
functions: Union[Unset, str] = UNSET
languages: Union[Unset, str] = UNSET
natures: Union[Unset, str] = UNSET
operations: Union[Unset, HttpServiceMetadataOperations] = UNSET
processors: Union[Unset, str] = UNSET
term_importers: Union[Unset, str] = UNSET
trigger: Union[Unset, str] = UNSET
def to_dict(self) -> Dict[str, Any]:
api = self.api
compatibility = self.compatibility
version = self.version
annotators = self.annotators
converters = self.converters
engine = self.engine
extensions = self.extensions
formatters = self.formatters
functions = self.functions
languages = self.languages
natures = self.natures
operations: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.operations, Unset):
operations = self.operations.to_dict()
processors = self.processors
term_importers = self.term_importers
trigger = self.trigger
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"api": api,
"compatibility": compatibility,
"version": version,
}
)
if annotators is not UNSET:
field_dict["annotators"] = annotators
if converters is not UNSET:
field_dict["converters"] = converters
if engine is not UNSET:
field_dict["engine"] = engine
if extensions is not UNSET:
field_dict["extensions"] = extensions
if formatters is not UNSET:
field_dict["formatters"] = formatters
if functions is not UNSET:
field_dict["functions"] = functions
if languages is not UNSET:
field_dict["languages"] = languages
if natures is not UNSET:
field_dict["natures"] = natures
if operations is not UNSET:
field_dict["operations"] = operations
if processors is not UNSET:
field_dict["processors"] = processors
if term_importers is not UNSET:
field_dict["termImporters"] = term_importers
if trigger is not UNSET:
field_dict["trigger"] = trigger
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
api = d.pop("api")
compatibility = d.pop("compatibility")
version = d.pop("version")
annotators = d.pop("annotators", UNSET)
converters = d.pop("converters", UNSET)
engine = d.pop("engine", UNSET)
extensions = d.pop("extensions", UNSET)
formatters = d.pop("formatters", UNSET)
functions = d.pop("functions", UNSET)
languages = d.pop("languages", UNSET)
natures = d.pop("natures", UNSET)
_operations = d.pop("operations", UNSET)
operations: Union[Unset, HttpServiceMetadataOperations]
if isinstance(_operations, Unset):
operations = UNSET
else:
operations = HttpServiceMetadataOperations.from_dict(_operations)
processors = d.pop("processors", UNSET)
term_importers = d.pop("termImporters", UNSET)
trigger = d.pop("trigger", UNSET)
http_service_metadata = cls(
api=api,
compatibility=compatibility,
version=version,
annotators=annotators,
converters=converters,
engine=engine,
extensions=extensions,
formatters=formatters,
functions=functions,
languages=languages,
natures=natures,
operations=operations,
processors=processors,
term_importers=term_importers,
trigger=trigger,
)
return http_service_metadata
| 30.902098 | 83 | 0.604435 | from typing import Any, Dict, Type, TypeVar, Union
import attr
from ..models.http_service_metadata_operations import HttpServiceMetadataOperations
from ..types import UNSET, Unset
T = TypeVar("T", bound="HttpServiceMetadata")
@attr.s(auto_attribs=True)
class HttpServiceMetadata:
api: str
compatibility: str
version: str
annotators: Union[Unset, str] = UNSET
converters: Union[Unset, str] = UNSET
engine: Union[Unset, str] = UNSET
extensions: Union[Unset, str] = UNSET
formatters: Union[Unset, str] = UNSET
functions: Union[Unset, str] = UNSET
languages: Union[Unset, str] = UNSET
natures: Union[Unset, str] = UNSET
operations: Union[Unset, HttpServiceMetadataOperations] = UNSET
processors: Union[Unset, str] = UNSET
term_importers: Union[Unset, str] = UNSET
trigger: Union[Unset, str] = UNSET
def to_dict(self) -> Dict[str, Any]:
api = self.api
compatibility = self.compatibility
version = self.version
annotators = self.annotators
converters = self.converters
engine = self.engine
extensions = self.extensions
formatters = self.formatters
functions = self.functions
languages = self.languages
natures = self.natures
operations: Union[Unset, Dict[str, Any]] = UNSET
if not isinstance(self.operations, Unset):
operations = self.operations.to_dict()
processors = self.processors
term_importers = self.term_importers
trigger = self.trigger
field_dict: Dict[str, Any] = {}
field_dict.update(
{
"api": api,
"compatibility": compatibility,
"version": version,
}
)
if annotators is not UNSET:
field_dict["annotators"] = annotators
if converters is not UNSET:
field_dict["converters"] = converters
if engine is not UNSET:
field_dict["engine"] = engine
if extensions is not UNSET:
field_dict["extensions"] = extensions
if formatters is not UNSET:
field_dict["formatters"] = formatters
if functions is not UNSET:
field_dict["functions"] = functions
if languages is not UNSET:
field_dict["languages"] = languages
if natures is not UNSET:
field_dict["natures"] = natures
if operations is not UNSET:
field_dict["operations"] = operations
if processors is not UNSET:
field_dict["processors"] = processors
if term_importers is not UNSET:
field_dict["termImporters"] = term_importers
if trigger is not UNSET:
field_dict["trigger"] = trigger
return field_dict
@classmethod
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
d = src_dict.copy()
api = d.pop("api")
compatibility = d.pop("compatibility")
version = d.pop("version")
annotators = d.pop("annotators", UNSET)
converters = d.pop("converters", UNSET)
engine = d.pop("engine", UNSET)
extensions = d.pop("extensions", UNSET)
formatters = d.pop("formatters", UNSET)
functions = d.pop("functions", UNSET)
languages = d.pop("languages", UNSET)
natures = d.pop("natures", UNSET)
_operations = d.pop("operations", UNSET)
operations: Union[Unset, HttpServiceMetadataOperations]
if isinstance(_operations, Unset):
operations = UNSET
else:
operations = HttpServiceMetadataOperations.from_dict(_operations)
processors = d.pop("processors", UNSET)
term_importers = d.pop("termImporters", UNSET)
trigger = d.pop("trigger", UNSET)
http_service_metadata = cls(
api=api,
compatibility=compatibility,
version=version,
annotators=annotators,
converters=converters,
engine=engine,
extensions=extensions,
formatters=formatters,
functions=functions,
languages=languages,
natures=natures,
operations=operations,
processors=processors,
term_importers=term_importers,
trigger=trigger,
)
return http_service_metadata
| true | true |
f71f5c880c576a98b3a2c7865445d8aef1babbe3 | 5,734 | py | Python | ivy/func_wrapper.py | sert121/ivy | 286f86e487b0c83d46a3ef8d30aa96316337db32 | [
"Apache-2.0"
] | 1 | 2022-02-15T02:07:07.000Z | 2022-02-15T02:07:07.000Z | ivy/func_wrapper.py | sert121/ivy | 286f86e487b0c83d46a3ef8d30aa96316337db32 | [
"Apache-2.0"
] | null | null | null | ivy/func_wrapper.py | sert121/ivy | 286f86e487b0c83d46a3ef8d30aa96316337db32 | [
"Apache-2.0"
] | null | null | null | import ivy
import inspect
import importlib
import numpy as np
from types import ModuleType
wrapped_modules_n_classes = []
NON_WRAPPED_METHODS = ['current_framework', 'current_framework_str', 'set_framework', 'get_framework',
'unset_framework', 'set_debug_mode', 'set_breakpoint_debug_mode', 'set_exception_debug_mode',
'unset_debug_mode', 'debug_mode', 'nested_map', 'to_ivy', 'args_to_ivy', 'to_native',
'args_to_native', 'default', 'exists', 'set_min_base', 'get_min_base', 'set_min_denominator',
'get_min_denominator', 'split_func_call_across_gpus', 'cache_fn', 'split_func_call',
'compile', 'compile_graph', 'dev', 'dev', 'dev_to_str', 'dev_from_str', 'memory_on_dev',
'gpu_is_available', 'num_gpus', 'tpu_is_available', 'dtype', 'dtype_to_str', 'cprint',
'to_ivy_module', 'tree_flatten', 'tree_unflatten', 'start_compiling', 'stop_compiling',
'get_compiled', 'index_nest', 'set_nest_at_index', 'map_nest_at_index', 'multi_index_nest',
'set_nest_at_indices', 'map_nest_at_indices', 'nested_indices_where', 'map',
'unset_default_device', 'closest_valid_dtype', 'default_dtype', 'dtype_from_str']
ARRAYLESS_RET_METHODS = ['to_numpy', 'to_list', 'to_scalar', 'shape', 'get_num_dims', 'is_array', 'is_variable']
NESTED_ARRAY_RET_METHODS = ['unstack', 'split']
FW_FN_KEYWORDS = {'numpy': [],
'jax': [],
'tensorflow': [],
'torch': [],
'mxnet': ['ndarray']}
NATIVE_KEYS_TO_SKIP = {'numpy': [],
'jax': [],
'tensorflow': [],
'torch': ['classes', 'torch', 'is_grad_enabled', 'get_default_dtype', 'numel', 'clone', 'cpu',
'set_', 'type', 'requires_grad_'],
'mxnet': []}
# Methods #
def _wrap_method(fn):
if hasattr(fn, '__name__') and (fn.__name__[0] == '_' or fn.__name__ in NON_WRAPPED_METHODS):
return fn
if hasattr(fn, 'wrapped') and fn.wrapped:
return fn
def _method_wrapped(*args, **kwargs):
native_args, native_kwargs = ivy.args_to_native(*args, **kwargs)
native_ret = fn(*native_args, **native_kwargs)
if fn.__name__ in ARRAYLESS_RET_METHODS + NESTED_ARRAY_RET_METHODS:
return native_ret
return ivy.to_ivy(native_ret, nested=True)
if hasattr(fn, '__name__'):
_method_wrapped.__name__ = fn.__name__
_method_wrapped.wrapped = True
_method_wrapped.inner_fn = fn
return _method_wrapped
def _unwrap_method(method_wrapped):
if not hasattr(method_wrapped, 'wrapped') or not method_wrapped.wrapped:
return method_wrapped
return method_wrapped.inner_fn
def _invalid_fn(fn, fs=None):
if fs is None:
fs = ivy.current_framework_str()
if isinstance(fn, np.ufunc):
return False
if not hasattr(fn, '__module__') or not fn.__module__:
return True
fw_fn_keywords = ['ivy', fs] + FW_FN_KEYWORDS[fs]
for kw in fw_fn_keywords:
if kw in fn.__module__:
return False
return True
def _wrap_or_unwrap_methods(wrap_or_unwrap_fn, val=None, fs=None, classes_to_wrap=None, native=False, depth=0):
classes_to_wrap = [] if classes_to_wrap is None else classes_to_wrap
if fs is None:
fs = ivy.current_framework_str()
if val is None:
val = importlib.import_module(ivy.current_framework_str()) if native else ivy
str_to_check = fs if native else 'ivy'
is_class = inspect.isclass(val)
if isinstance(val, ModuleType) or (val in classes_to_wrap):
if val in wrapped_modules_n_classes or (('__file__' not in val.__dict__ or
(str_to_check not in val.__file__) or 'framework_handler' in val.__file__) and not is_class):
return val
wrapped_modules_n_classes.append(val)
if is_class:
for k in dir(val):
if native and (k in NATIVE_KEYS_TO_SKIP[fs]):
continue
v = getattr(val, k)
if v is not None:
# noinspection PyBroadException
try:
setattr(val, k, _wrap_or_unwrap_methods(
wrap_or_unwrap_fn, v, fs, classes_to_wrap, native, depth + 1))
except Exception:
pass
else:
for k, v in val.__dict__.items():
if native and (k in NATIVE_KEYS_TO_SKIP[fs] or k[0] == '_'):
continue
if v is None:
val.__dict__[k] = v
else:
# noinspection PyBroadException
try:
val.__dict__[k] = _wrap_or_unwrap_methods(
wrap_or_unwrap_fn, v, fs, classes_to_wrap, native, depth + 1)
except Exception:
pass
if depth == 0:
wrapped_modules_n_classes.clear()
return val
elif callable(val) and not is_class:
if depth == 0:
wrapped_modules_n_classes.clear()
if (hasattr(val, 'inner_fn') and (_invalid_fn(val.inner_fn) and not native))\
or (_invalid_fn(val) and not native):
return val
return wrap_or_unwrap_fn(val)
if depth == 0:
wrapped_modules_n_classes.clear()
return val
def _wrap_methods():
return _wrap_or_unwrap_methods(_wrap_method)
def _unwrap_methods():
return _wrap_or_unwrap_methods(_unwrap_method)
| 40.380282 | 117 | 0.592431 | import ivy
import inspect
import importlib
import numpy as np
from types import ModuleType
wrapped_modules_n_classes = []
NON_WRAPPED_METHODS = ['current_framework', 'current_framework_str', 'set_framework', 'get_framework',
'unset_framework', 'set_debug_mode', 'set_breakpoint_debug_mode', 'set_exception_debug_mode',
'unset_debug_mode', 'debug_mode', 'nested_map', 'to_ivy', 'args_to_ivy', 'to_native',
'args_to_native', 'default', 'exists', 'set_min_base', 'get_min_base', 'set_min_denominator',
'get_min_denominator', 'split_func_call_across_gpus', 'cache_fn', 'split_func_call',
'compile', 'compile_graph', 'dev', 'dev', 'dev_to_str', 'dev_from_str', 'memory_on_dev',
'gpu_is_available', 'num_gpus', 'tpu_is_available', 'dtype', 'dtype_to_str', 'cprint',
'to_ivy_module', 'tree_flatten', 'tree_unflatten', 'start_compiling', 'stop_compiling',
'get_compiled', 'index_nest', 'set_nest_at_index', 'map_nest_at_index', 'multi_index_nest',
'set_nest_at_indices', 'map_nest_at_indices', 'nested_indices_where', 'map',
'unset_default_device', 'closest_valid_dtype', 'default_dtype', 'dtype_from_str']
ARRAYLESS_RET_METHODS = ['to_numpy', 'to_list', 'to_scalar', 'shape', 'get_num_dims', 'is_array', 'is_variable']
NESTED_ARRAY_RET_METHODS = ['unstack', 'split']
FW_FN_KEYWORDS = {'numpy': [],
'jax': [],
'tensorflow': [],
'torch': [],
'mxnet': ['ndarray']}
NATIVE_KEYS_TO_SKIP = {'numpy': [],
'jax': [],
'tensorflow': [],
'torch': ['classes', 'torch', 'is_grad_enabled', 'get_default_dtype', 'numel', 'clone', 'cpu',
'set_', 'type', 'requires_grad_'],
'mxnet': []}
def _wrap_method(fn):
if hasattr(fn, '__name__') and (fn.__name__[0] == '_' or fn.__name__ in NON_WRAPPED_METHODS):
return fn
if hasattr(fn, 'wrapped') and fn.wrapped:
return fn
def _method_wrapped(*args, **kwargs):
native_args, native_kwargs = ivy.args_to_native(*args, **kwargs)
native_ret = fn(*native_args, **native_kwargs)
if fn.__name__ in ARRAYLESS_RET_METHODS + NESTED_ARRAY_RET_METHODS:
return native_ret
return ivy.to_ivy(native_ret, nested=True)
if hasattr(fn, '__name__'):
_method_wrapped.__name__ = fn.__name__
_method_wrapped.wrapped = True
_method_wrapped.inner_fn = fn
return _method_wrapped
def _unwrap_method(method_wrapped):
if not hasattr(method_wrapped, 'wrapped') or not method_wrapped.wrapped:
return method_wrapped
return method_wrapped.inner_fn
def _invalid_fn(fn, fs=None):
if fs is None:
fs = ivy.current_framework_str()
if isinstance(fn, np.ufunc):
return False
if not hasattr(fn, '__module__') or not fn.__module__:
return True
fw_fn_keywords = ['ivy', fs] + FW_FN_KEYWORDS[fs]
for kw in fw_fn_keywords:
if kw in fn.__module__:
return False
return True
def _wrap_or_unwrap_methods(wrap_or_unwrap_fn, val=None, fs=None, classes_to_wrap=None, native=False, depth=0):
classes_to_wrap = [] if classes_to_wrap is None else classes_to_wrap
if fs is None:
fs = ivy.current_framework_str()
if val is None:
val = importlib.import_module(ivy.current_framework_str()) if native else ivy
str_to_check = fs if native else 'ivy'
is_class = inspect.isclass(val)
if isinstance(val, ModuleType) or (val in classes_to_wrap):
if val in wrapped_modules_n_classes or (('__file__' not in val.__dict__ or
(str_to_check not in val.__file__) or 'framework_handler' in val.__file__) and not is_class):
return val
wrapped_modules_n_classes.append(val)
if is_class:
for k in dir(val):
if native and (k in NATIVE_KEYS_TO_SKIP[fs]):
continue
v = getattr(val, k)
if v is not None:
try:
setattr(val, k, _wrap_or_unwrap_methods(
wrap_or_unwrap_fn, v, fs, classes_to_wrap, native, depth + 1))
except Exception:
pass
else:
for k, v in val.__dict__.items():
if native and (k in NATIVE_KEYS_TO_SKIP[fs] or k[0] == '_'):
continue
if v is None:
val.__dict__[k] = v
else:
try:
val.__dict__[k] = _wrap_or_unwrap_methods(
wrap_or_unwrap_fn, v, fs, classes_to_wrap, native, depth + 1)
except Exception:
pass
if depth == 0:
wrapped_modules_n_classes.clear()
return val
elif callable(val) and not is_class:
if depth == 0:
wrapped_modules_n_classes.clear()
if (hasattr(val, 'inner_fn') and (_invalid_fn(val.inner_fn) and not native))\
or (_invalid_fn(val) and not native):
return val
return wrap_or_unwrap_fn(val)
if depth == 0:
wrapped_modules_n_classes.clear()
return val
def _wrap_methods():
return _wrap_or_unwrap_methods(_wrap_method)
def _unwrap_methods():
return _wrap_or_unwrap_methods(_unwrap_method)
| true | true |
f71f5cbb0f82e3b460895dc04351f46514cc35da | 1,549 | py | Python | idb/client/pid_saver.py | fakeNetflix/facebook-repo-idb | eb4ed5a7dc4a14b224a22e833294d7366fe4725e | [
"MIT"
] | 1 | 2021-03-09T07:29:18.000Z | 2021-03-09T07:29:18.000Z | idb/client/pid_saver.py | fakeNetflix/facebook-repo-idb | eb4ed5a7dc4a14b224a22e833294d7366fe4725e | [
"MIT"
] | 6 | 2021-05-10T08:32:56.000Z | 2022-02-26T01:41:09.000Z | idb/client/pid_saver.py | fakeNetflix/facebook-repo-idb | eb4ed5a7dc4a14b224a22e833294d7366fe4725e | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import json
import logging
import os
import signal
from typing import List
from idb.common.constants import IDB_PID_PATH
def save_pid(pid: int) -> None:
pids = _get_pids()
pids.append(pid)
_write_pids(pids=pids)
logging.debug(f"saved daemon pid {pid}")
def remove_pid(pid: int) -> None:
pids = _get_pids()
if pids.count(pid) > 0:
pids.remove(pid)
_write_pids(pids=pids)
logging.debug(f"removed daemon pid {pid}")
def _write_pids(pids: List[int]) -> None:
with open(IDB_PID_PATH, "w") as pid_file:
json.dump(pids, pid_file)
pid_file.flush()
def _has_saved_pids() -> bool:
pids = _get_pids()
logging.debug(f"has saved pids {pids}")
return len(pids) > 0
def _get_pids() -> List[int]:
try:
with open(IDB_PID_PATH) as pid_file:
return json.load(pid_file)
except Exception:
return []
def _clear_saved_pids() -> None:
if os.path.exists(IDB_PID_PATH):
# Empty the file
with open(IDB_PID_PATH, "wb", buffering=0) as pid_file:
pid_file.flush()
async def kill_saved_pids() -> None:
if not _has_saved_pids():
logging.debug(f"no daemon pid found")
return
for pid in _get_pids():
try:
os.kill(pid, signal.SIGTERM)
logging.info(f"stopped daemon with pid {pid}")
except OSError or ProcessLookupError:
pass
_clear_saved_pids()
| 23.469697 | 71 | 0.632666 |
import json
import logging
import os
import signal
from typing import List
from idb.common.constants import IDB_PID_PATH
def save_pid(pid: int) -> None:
pids = _get_pids()
pids.append(pid)
_write_pids(pids=pids)
logging.debug(f"saved daemon pid {pid}")
def remove_pid(pid: int) -> None:
pids = _get_pids()
if pids.count(pid) > 0:
pids.remove(pid)
_write_pids(pids=pids)
logging.debug(f"removed daemon pid {pid}")
def _write_pids(pids: List[int]) -> None:
with open(IDB_PID_PATH, "w") as pid_file:
json.dump(pids, pid_file)
pid_file.flush()
def _has_saved_pids() -> bool:
pids = _get_pids()
logging.debug(f"has saved pids {pids}")
return len(pids) > 0
def _get_pids() -> List[int]:
try:
with open(IDB_PID_PATH) as pid_file:
return json.load(pid_file)
except Exception:
return []
def _clear_saved_pids() -> None:
if os.path.exists(IDB_PID_PATH):
with open(IDB_PID_PATH, "wb", buffering=0) as pid_file:
pid_file.flush()
async def kill_saved_pids() -> None:
if not _has_saved_pids():
logging.debug(f"no daemon pid found")
return
for pid in _get_pids():
try:
os.kill(pid, signal.SIGTERM)
logging.info(f"stopped daemon with pid {pid}")
except OSError or ProcessLookupError:
pass
_clear_saved_pids()
| true | true |
f71f5d1fb7366ad5808529f520d04d12bd1805b1 | 12,476 | py | Python | bot/bot.py | mudkipdev/pydis-bot | 234fba49e039fc4c5c8421162e803b1be3d0d33c | [
"MIT",
"BSD-3-Clause"
] | null | null | null | bot/bot.py | mudkipdev/pydis-bot | 234fba49e039fc4c5c8421162e803b1be3d0d33c | [
"MIT",
"BSD-3-Clause"
] | null | null | null | bot/bot.py | mudkipdev/pydis-bot | 234fba49e039fc4c5c8421162e803b1be3d0d33c | [
"MIT",
"BSD-3-Clause"
] | null | null | null | import asyncio
import logging
import socket
import warnings
from collections import defaultdict
from typing import Dict, Optional
import aiohttp
import discord
from async_rediscache import RedisSession
from discord.ext import commands
from sentry_sdk import push_scope
from bot import api, constants
from bot.async_stats import AsyncStatsClient
log = logging.getLogger('bot')
LOCALHOST = "127.0.0.1"
class Bot(commands.Bot):
"""A subclass of `discord.ext.commands.Bot` with an aiohttp session and an API client."""
def __init__(self, *args, redis_session: RedisSession, **kwargs):
if "connector" in kwargs:
warnings.warn(
"If login() is called (or the bot is started), the connector will be overwritten "
"with an internal one"
)
super().__init__(*args, **kwargs)
self.http_session: Optional[aiohttp.ClientSession] = None
self.redis_session = redis_session
self.api_client = api.APIClient(loop=self.loop)
self.filter_list_cache = defaultdict(dict)
self._connector = None
self._resolver = None
self._statsd_timerhandle: asyncio.TimerHandle = None
self._guild_available = asyncio.Event()
statsd_url = constants.Stats.statsd_host
if constants.DEBUG_MODE:
# Since statsd is UDP, there are no errors for sending to a down port.
# For this reason, setting the statsd host to 127.0.0.1 for development
# will effectively disable stats.
statsd_url = LOCALHOST
self.stats = AsyncStatsClient(self.loop, LOCALHOST)
self._connect_statsd(statsd_url)
def _connect_statsd(self, statsd_url: str, retry_after: int = 2, attempt: int = 1) -> None:
"""Callback used to retry a connection to statsd if it should fail."""
if attempt >= 8:
log.error("Reached 8 attempts trying to reconnect AsyncStatsClient. Aborting")
return
try:
self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot")
except socket.gaierror:
log.warning(f"Statsd client failed to connect (Attempt(s): {attempt})")
# Use a fallback strategy for retrying, up to 8 times.
self._statsd_timerhandle = self.loop.call_later(
retry_after,
self._connect_statsd,
statsd_url,
retry_after * 2,
attempt + 1
)
async def cache_filter_list_data(self) -> None:
"""Cache all the data in the FilterList on the site."""
full_cache = await self.api_client.get('bot/filter-lists')
for item in full_cache:
self.insert_item_into_filter_list_cache(item)
def _recreate(self) -> None:
"""Re-create the connector, aiohttp session, the APIClient and the Redis session."""
# Use asyncio for DNS resolution instead of threads so threads aren't spammed.
# Doesn't seem to have any state with regards to being closed, so no need to worry?
self._resolver = aiohttp.AsyncResolver()
# Its __del__ does send a warning but it doesn't always show up for some reason.
if self._connector and not self._connector._closed:
log.warning(
"The previous connector was not closed; it will remain open and be overwritten"
)
if self.redis_session.closed:
# If the RedisSession was somehow closed, we try to reconnect it
# here. Normally, this shouldn't happen.
self.loop.create_task(self.redis_session.connect())
# Use AF_INET as its socket family to prevent HTTPS related problems both locally
# and in production.
self._connector = aiohttp.TCPConnector(
resolver=self._resolver,
family=socket.AF_INET,
)
# Client.login() will call HTTPClient.static_login() which will create a session using
# this connector attribute.
self.http.connector = self._connector
# Its __del__ does send a warning but it doesn't always show up for some reason.
if self.http_session and not self.http_session.closed:
log.warning(
"The previous session was not closed; it will remain open and be overwritten"
)
self.http_session = aiohttp.ClientSession(connector=self._connector)
self.api_client.recreate(force=True, connector=self._connector)
# Build the FilterList cache
self.loop.create_task(self.cache_filter_list_data())
@classmethod
def create(cls) -> "Bot":
"""Create and return an instance of a Bot."""
loop = asyncio.get_event_loop()
allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES]
intents = discord.Intents().all()
intents.presences = False
intents.dm_typing = False
intents.dm_reactions = False
intents.invites = False
intents.webhooks = False
intents.integrations = False
return cls(
redis_session=_create_redis_session(loop),
loop=loop,
command_prefix=commands.when_mentioned_or(constants.Bot.prefix),
activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"),
case_insensitive=True,
max_messages=10_000,
allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles),
intents=intents,
)
def load_extensions(self) -> None:
"""Load all enabled extensions."""
# Must be done here to avoid a circular import.
from bot.utils.extensions import EXTENSIONS
extensions = set(EXTENSIONS) # Create a mutable copy.
if not constants.HelpChannels.enable:
extensions.remove("bot.exts.help_channels")
for extension in extensions:
self.load_extension(extension)
def add_cog(self, cog: commands.Cog) -> None:
"""Adds a "cog" to the bot and logs the operation."""
super().add_cog(cog)
log.info(f"Cog loaded: {cog.qualified_name}")
def add_command(self, command: commands.Command) -> None:
"""Add `command` as normal and then add its root aliases to the bot."""
super().add_command(command)
self._add_root_aliases(command)
def remove_command(self, name: str) -> Optional[commands.Command]:
"""
Remove a command/alias as normal and then remove its root aliases from the bot.
Individual root aliases cannot be removed by this function.
To remove them, either remove the entire command or manually edit `bot.all_commands`.
"""
command = super().remove_command(name)
if command is None:
# Even if it's a root alias, there's no way to get the Bot instance to remove the alias.
return
self._remove_root_aliases(command)
return command
def clear(self) -> None:
"""
Clears the internal state of the bot and recreates the connector and sessions.
Will cause a DeprecationWarning if called outside a coroutine.
"""
# Because discord.py recreates the HTTPClient session, may as well follow suit and recreate
# our own stuff here too.
self._recreate()
super().clear()
async def close(self) -> None:
"""Close the Discord connection and the aiohttp session, connector, statsd client, and resolver."""
await super().close()
await self.api_client.close()
if self.http_session:
await self.http_session.close()
if self._connector:
await self._connector.close()
if self._resolver:
await self._resolver.close()
if self.stats._transport:
self.stats._transport.close()
if self.redis_session:
await self.redis_session.close()
if self._statsd_timerhandle:
self._statsd_timerhandle.cancel()
def insert_item_into_filter_list_cache(self, item: Dict[str, str]) -> None:
"""Add an item to the bots filter_list_cache."""
type_ = item["type"]
allowed = item["allowed"]
content = item["content"]
self.filter_list_cache[f"{type_}.{allowed}"][content] = {
"id": item["id"],
"comment": item["comment"],
"created_at": item["created_at"],
"updated_at": item["updated_at"],
}
async def login(self, *args, **kwargs) -> None:
"""Re-create the connector and set up sessions before logging into Discord."""
self._recreate()
await self.stats.create_socket()
await super().login(*args, **kwargs)
async def on_guild_available(self, guild: discord.Guild) -> None:
"""
Set the internal guild available event when constants.Guild.id becomes available.
If the cache appears to still be empty (no members, no channels, or no roles), the event
will not be set.
"""
if guild.id != constants.Guild.id:
return
if not guild.roles or not guild.members or not guild.channels:
msg = "Guild available event was dispatched but the cache appears to still be empty!"
log.warning(msg)
try:
webhook = await self.fetch_webhook(constants.Webhooks.dev_log)
except discord.HTTPException as e:
log.error(f"Failed to fetch webhook to send empty cache warning: status {e.status}")
else:
await webhook.send(f"<@&{constants.Roles.admin}> {msg}")
return
self._guild_available.set()
async def on_guild_unavailable(self, guild: discord.Guild) -> None:
"""Clear the internal guild available event when constants.Guild.id becomes unavailable."""
if guild.id != constants.Guild.id:
return
self._guild_available.clear()
async def wait_until_guild_available(self) -> None:
"""
Wait until the constants.Guild.id guild is available (and the cache is ready).
The on_ready event is inadequate because it only waits 2 seconds for a GUILD_CREATE
gateway event before giving up and thus not populating the cache for unavailable guilds.
"""
await self._guild_available.wait()
async def on_error(self, event: str, *args, **kwargs) -> None:
"""Log errors raised in event listeners rather than printing them to stderr."""
self.stats.incr(f"errors.event.{event}")
with push_scope() as scope:
scope.set_tag("event", event)
scope.set_extra("args", args)
scope.set_extra("kwargs", kwargs)
log.exception(f"Unhandled exception in {event}.")
def _add_root_aliases(self, command: commands.Command) -> None:
"""Recursively add root aliases for `command` and any of its subcommands."""
if isinstance(command, commands.Group):
for subcommand in command.commands:
self._add_root_aliases(subcommand)
for alias in getattr(command, "root_aliases", ()):
if alias in self.all_commands:
raise commands.CommandRegistrationError(alias, alias_conflict=True)
self.all_commands[alias] = command
def _remove_root_aliases(self, command: commands.Command) -> None:
"""Recursively remove root aliases for `command` and any of its subcommands."""
if isinstance(command, commands.Group):
for subcommand in command.commands:
self._remove_root_aliases(subcommand)
for alias in getattr(command, "root_aliases", ()):
self.all_commands.pop(alias, None)
def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession:
"""
Create and connect to a redis session.
Ensure the connection is established before returning to prevent race conditions.
`loop` is the event loop on which to connect. The Bot should use this same event loop.
"""
redis_session = RedisSession(
address=(constants.Redis.host, constants.Redis.port),
password=constants.Redis.password,
minsize=1,
maxsize=20,
use_fakeredis=constants.Redis.use_fakeredis,
global_namespace="bot",
)
loop.run_until_complete(redis_session.connect())
return redis_session
| 38.152905 | 107 | 0.64083 | import asyncio
import logging
import socket
import warnings
from collections import defaultdict
from typing import Dict, Optional
import aiohttp
import discord
from async_rediscache import RedisSession
from discord.ext import commands
from sentry_sdk import push_scope
from bot import api, constants
from bot.async_stats import AsyncStatsClient
log = logging.getLogger('bot')
LOCALHOST = "127.0.0.1"
class Bot(commands.Bot):
def __init__(self, *args, redis_session: RedisSession, **kwargs):
if "connector" in kwargs:
warnings.warn(
"If login() is called (or the bot is started), the connector will be overwritten "
"with an internal one"
)
super().__init__(*args, **kwargs)
self.http_session: Optional[aiohttp.ClientSession] = None
self.redis_session = redis_session
self.api_client = api.APIClient(loop=self.loop)
self.filter_list_cache = defaultdict(dict)
self._connector = None
self._resolver = None
self._statsd_timerhandle: asyncio.TimerHandle = None
self._guild_available = asyncio.Event()
statsd_url = constants.Stats.statsd_host
if constants.DEBUG_MODE:
statsd_url = LOCALHOST
self.stats = AsyncStatsClient(self.loop, LOCALHOST)
self._connect_statsd(statsd_url)
def _connect_statsd(self, statsd_url: str, retry_after: int = 2, attempt: int = 1) -> None:
if attempt >= 8:
log.error("Reached 8 attempts trying to reconnect AsyncStatsClient. Aborting")
return
try:
self.stats = AsyncStatsClient(self.loop, statsd_url, 8125, prefix="bot")
except socket.gaierror:
log.warning(f"Statsd client failed to connect (Attempt(s): {attempt})")
self._statsd_timerhandle = self.loop.call_later(
retry_after,
self._connect_statsd,
statsd_url,
retry_after * 2,
attempt + 1
)
async def cache_filter_list_data(self) -> None:
full_cache = await self.api_client.get('bot/filter-lists')
for item in full_cache:
self.insert_item_into_filter_list_cache(item)
def _recreate(self) -> None:
# Doesn't seem to have any state with regards to being closed, so no need to worry?
self._resolver = aiohttp.AsyncResolver()
if self._connector and not self._connector._closed:
log.warning(
"The previous connector was not closed; it will remain open and be overwritten"
)
if self.redis_session.closed:
# If the RedisSession was somehow closed, we try to reconnect it
# here. Normally, this shouldn't happen.
self.loop.create_task(self.redis_session.connect())
self._connector = aiohttp.TCPConnector(
resolver=self._resolver,
family=socket.AF_INET,
)
self.http.connector = self._connector
if self.http_session and not self.http_session.closed:
log.warning(
"The previous session was not closed; it will remain open and be overwritten"
)
self.http_session = aiohttp.ClientSession(connector=self._connector)
self.api_client.recreate(force=True, connector=self._connector)
# Build the FilterList cache
self.loop.create_task(self.cache_filter_list_data())
@classmethod
def create(cls) -> "Bot":
loop = asyncio.get_event_loop()
allowed_roles = [discord.Object(id_) for id_ in constants.MODERATION_ROLES]
intents = discord.Intents().all()
intents.presences = False
intents.dm_typing = False
intents.dm_reactions = False
intents.invites = False
intents.webhooks = False
intents.integrations = False
return cls(
redis_session=_create_redis_session(loop),
loop=loop,
command_prefix=commands.when_mentioned_or(constants.Bot.prefix),
activity=discord.Game(name=f"Commands: {constants.Bot.prefix}help"),
case_insensitive=True,
max_messages=10_000,
allowed_mentions=discord.AllowedMentions(everyone=False, roles=allowed_roles),
intents=intents,
)
def load_extensions(self) -> None:
# Must be done here to avoid a circular import.
from bot.utils.extensions import EXTENSIONS
extensions = set(EXTENSIONS) # Create a mutable copy.
if not constants.HelpChannels.enable:
extensions.remove("bot.exts.help_channels")
for extension in extensions:
self.load_extension(extension)
def add_cog(self, cog: commands.Cog) -> None:
super().add_cog(cog)
log.info(f"Cog loaded: {cog.qualified_name}")
def add_command(self, command: commands.Command) -> None:
super().add_command(command)
self._add_root_aliases(command)
def remove_command(self, name: str) -> Optional[commands.Command]:
command = super().remove_command(name)
if command is None:
# Even if it's a root alias, there's no way to get the Bot instance to remove the alias.
return
self._remove_root_aliases(command)
return command
def clear(self) -> None:
# Because discord.py recreates the HTTPClient session, may as well follow suit and recreate
# our own stuff here too.
self._recreate()
super().clear()
async def close(self) -> None:
await super().close()
await self.api_client.close()
if self.http_session:
await self.http_session.close()
if self._connector:
await self._connector.close()
if self._resolver:
await self._resolver.close()
if self.stats._transport:
self.stats._transport.close()
if self.redis_session:
await self.redis_session.close()
if self._statsd_timerhandle:
self._statsd_timerhandle.cancel()
def insert_item_into_filter_list_cache(self, item: Dict[str, str]) -> None:
type_ = item["type"]
allowed = item["allowed"]
content = item["content"]
self.filter_list_cache[f"{type_}.{allowed}"][content] = {
"id": item["id"],
"comment": item["comment"],
"created_at": item["created_at"],
"updated_at": item["updated_at"],
}
async def login(self, *args, **kwargs) -> None:
self._recreate()
await self.stats.create_socket()
await super().login(*args, **kwargs)
async def on_guild_available(self, guild: discord.Guild) -> None:
if guild.id != constants.Guild.id:
return
if not guild.roles or not guild.members or not guild.channels:
msg = "Guild available event was dispatched but the cache appears to still be empty!"
log.warning(msg)
try:
webhook = await self.fetch_webhook(constants.Webhooks.dev_log)
except discord.HTTPException as e:
log.error(f"Failed to fetch webhook to send empty cache warning: status {e.status}")
else:
await webhook.send(f"<@&{constants.Roles.admin}> {msg}")
return
self._guild_available.set()
async def on_guild_unavailable(self, guild: discord.Guild) -> None:
if guild.id != constants.Guild.id:
return
self._guild_available.clear()
async def wait_until_guild_available(self) -> None:
await self._guild_available.wait()
async def on_error(self, event: str, *args, **kwargs) -> None:
self.stats.incr(f"errors.event.{event}")
with push_scope() as scope:
scope.set_tag("event", event)
scope.set_extra("args", args)
scope.set_extra("kwargs", kwargs)
log.exception(f"Unhandled exception in {event}.")
def _add_root_aliases(self, command: commands.Command) -> None:
if isinstance(command, commands.Group):
for subcommand in command.commands:
self._add_root_aliases(subcommand)
for alias in getattr(command, "root_aliases", ()):
if alias in self.all_commands:
raise commands.CommandRegistrationError(alias, alias_conflict=True)
self.all_commands[alias] = command
def _remove_root_aliases(self, command: commands.Command) -> None:
if isinstance(command, commands.Group):
for subcommand in command.commands:
self._remove_root_aliases(subcommand)
for alias in getattr(command, "root_aliases", ()):
self.all_commands.pop(alias, None)
def _create_redis_session(loop: asyncio.AbstractEventLoop) -> RedisSession:
redis_session = RedisSession(
address=(constants.Redis.host, constants.Redis.port),
password=constants.Redis.password,
minsize=1,
maxsize=20,
use_fakeredis=constants.Redis.use_fakeredis,
global_namespace="bot",
)
loop.run_until_complete(redis_session.connect())
return redis_session
| true | true |
f71f5dc2484d87171414c6d905bc5a1656c3625b | 4,043 | py | Python | encoders/audio/Wav2VecSpeechEncoder/__init__.py | akurniawan/jina-hub | d89bc5e8f527f1212c3228a15775e222983c0087 | [
"Apache-2.0"
] | null | null | null | encoders/audio/Wav2VecSpeechEncoder/__init__.py | akurniawan/jina-hub | d89bc5e8f527f1212c3228a15775e222983c0087 | [
"Apache-2.0"
] | null | null | null | encoders/audio/Wav2VecSpeechEncoder/__init__.py | akurniawan/jina-hub | d89bc5e8f527f1212c3228a15775e222983c0087 | [
"Apache-2.0"
] | null | null | null | __copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from typing import Optional
import numpy as np
from jina.executors.decorators import batching, as_ndarray
from jina.executors.encoders import BaseAudioEncoder
from jina.executors.encoders.frameworks import BaseTorchEncoder
from jina.excepts import PretrainedModelFileDoesNotExist
from jina.helper import cached_property
class Wav2VecSpeechEncoder(BaseTorchEncoder, BaseAudioEncoder):
"""
Use a pre-trained model (`wav2vec`) to encode audio signal.
:class:`Wav2VecSpeechEncoder` is a speech encoder based on `wav2vec`,
an unsupervised pre-trained model for speech recognition presented and implemented
by Facebook: https://github.com/pytorch/fairseq/tree/master/examples/wav2vec
It uses a pre-trained model to encode an audio signal from
a `Batch x Signal Length` ndarray into a `Batch x Concatenated Features` ndarray,
and produces a representation for each time step at a rate of 100 Hz.
:param model_path: the path of the pre-trained model.
The pre-trained model can be downloaded at
https://github.com/pytorch/fairseq/tree/master/examples/wav2vec/README.md#wav2vec
:param input_sample_rate: input sampling rate in Hz (22050 by default)
"""
def __init__(self,
model_path: Optional[str] = '/tmp/wav2vec_large.pt',
input_sample_rate: int = 22050,
*args,
**kwargs):
"""Set Constructor"""
super().__init__(*args, **kwargs)
self.model_path = model_path
self.input_sample_rate = input_sample_rate
def post_init(self):
super().post_init()
if self.model_path and os.path.exists(self.model_path):
import torch
from fairseq.models.wav2vec import Wav2VecModel
cp = torch.load(self.model_path, map_location=torch.device('cpu'))
self.model = Wav2VecModel.build_model(cp['args'], task=None)
self.model.load_state_dict(cp['model'])
self.model.eval()
self.to_device(self.model)
self._tensor_func = torch.tensor
else:
raise PretrainedModelFileDoesNotExist(f'model at {self.model_path} does not exist')
@batching
@as_ndarray
def encode(self, data: np.ndarray, *args, **kwargs) -> np.ndarray:
"""
Resample input audio signal to 16kHz.
Segments the resampled signal of each Doc into `wav2vec` frames,
encodes the frames and concatenates Doc frame embeddings into a
single Doc embedding.
:param data: A`Batch x Signal Length` ndarray, where
`Signal Length` is a number of samples
:return: A `Batch x Concatenated Features` ndarray,
where `Concatenated Features` is a 512-dimensional feature
vector times the number of the wav2vec frames.
"""
assert data.shape[1] >= 465, 'the signal must have at least 465 samples'
from librosa import resample
embeds = []
with self.session():
for chunk_data in data:
resampled_signal = resample(chunk_data, self.input_sample_rate, 16000)
signal_tensor = self.array2tensor(resampled_signal.reshape(1, -1))
features = self.model.feature_extractor(signal_tensor)
embed_tensor = self.model.feature_aggregator(features)[0]
chunk_embed = self.tensor2array(embed_tensor).T.flatten()
embeds.append(chunk_embed)
return embeds
def array2tensor(self, array):
tensor = self._tensor_func(array)
return tensor.cuda() if self.on_gpu else tensor
def tensor2array(self, tensor):
return tensor.cuda().numpy() if self.on_gpu else tensor.numpy()
@cached_property
def session(self):
return self.get_session()
def get_session(self):
from torch import no_grad
return no_grad | 40.838384 | 95 | 0.666337 | __copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from typing import Optional
import numpy as np
from jina.executors.decorators import batching, as_ndarray
from jina.executors.encoders import BaseAudioEncoder
from jina.executors.encoders.frameworks import BaseTorchEncoder
from jina.excepts import PretrainedModelFileDoesNotExist
from jina.helper import cached_property
class Wav2VecSpeechEncoder(BaseTorchEncoder, BaseAudioEncoder):
def __init__(self,
model_path: Optional[str] = '/tmp/wav2vec_large.pt',
input_sample_rate: int = 22050,
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.model_path = model_path
self.input_sample_rate = input_sample_rate
def post_init(self):
super().post_init()
if self.model_path and os.path.exists(self.model_path):
import torch
from fairseq.models.wav2vec import Wav2VecModel
cp = torch.load(self.model_path, map_location=torch.device('cpu'))
self.model = Wav2VecModel.build_model(cp['args'], task=None)
self.model.load_state_dict(cp['model'])
self.model.eval()
self.to_device(self.model)
self._tensor_func = torch.tensor
else:
raise PretrainedModelFileDoesNotExist(f'model at {self.model_path} does not exist')
@batching
@as_ndarray
def encode(self, data: np.ndarray, *args, **kwargs) -> np.ndarray:
assert data.shape[1] >= 465, 'the signal must have at least 465 samples'
from librosa import resample
embeds = []
with self.session():
for chunk_data in data:
resampled_signal = resample(chunk_data, self.input_sample_rate, 16000)
signal_tensor = self.array2tensor(resampled_signal.reshape(1, -1))
features = self.model.feature_extractor(signal_tensor)
embed_tensor = self.model.feature_aggregator(features)[0]
chunk_embed = self.tensor2array(embed_tensor).T.flatten()
embeds.append(chunk_embed)
return embeds
def array2tensor(self, array):
tensor = self._tensor_func(array)
return tensor.cuda() if self.on_gpu else tensor
def tensor2array(self, tensor):
return tensor.cuda().numpy() if self.on_gpu else tensor.numpy()
@cached_property
def session(self):
return self.get_session()
def get_session(self):
from torch import no_grad
return no_grad | true | true |
f71f5e67663079678fe379004ccba2d635f29cd6 | 3,572 | py | Python | cp_spider/cp_spider/settings.py | zachariah-chow/mas-cp-scrapy | 7c3cd8bcb9d6fc248a325621337da40398452cdb | [
"MIT"
] | null | null | null | cp_spider/cp_spider/settings.py | zachariah-chow/mas-cp-scrapy | 7c3cd8bcb9d6fc248a325621337da40398452cdb | [
"MIT"
] | null | null | null | cp_spider/cp_spider/settings.py | zachariah-chow/mas-cp-scrapy | 7c3cd8bcb9d6fc248a325621337da40398452cdb | [
"MIT"
] | null | null | null | # Scrapy settings for cp_spider project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'cp_spider'
SPIDER_MODULES = ['cp_spider.spiders']
NEWSPIDER_MODULE = 'cp_spider.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'cp_spider (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = False
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
# DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
# }
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
# SPIDER_MIDDLEWARES = {
# 'cp_spider.middlewares.CpSpiderSpiderMiddleware': 543,
# }
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# DOWNLOADER_MIDDLEWARES = {
# 'cp_spider.middlewares.CpSpiderDownloaderMiddleware': 543,
# }
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
# EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
# }
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# ITEM_PIPELINES = {
# 'cp_spider.pipelines.CpSpiderPipeline': 300,
# }
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
# Scrapy Splash Settings
SPLASH_URL = 'http://localhost:8050'
DOWNLOADER_MIDDLEWARES = {
'scrapy_splash.SplashCookiesMiddleware': 723,
'scrapy_splash.SplashMiddleware': 725,
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
}
SPIDER_MIDDLEWARES = {
'scrapy_splash.SplashDeduplicateArgsMiddleware': 100,
}
DUPEFILTER_CLASS = 'scrapy_splash.SplashAwareDupeFilter'
HTTPCACHE_STORAGE = 'scrapy_splash.SplashAwareFSCacheStorage'
| 33.698113 | 103 | 0.779395 |
BOT_NAME = 'cp_spider'
SPIDER_MODULES = ['cp_spider.spiders']
NEWSPIDER_MODULE = 'cp_spider.spiders'
ROBOTSTXT_OBEY = False
ocalhost:8050'
DOWNLOADER_MIDDLEWARES = {
'scrapy_splash.SplashCookiesMiddleware': 723,
'scrapy_splash.SplashMiddleware': 725,
'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware': 810,
}
SPIDER_MIDDLEWARES = {
'scrapy_splash.SplashDeduplicateArgsMiddleware': 100,
}
DUPEFILTER_CLASS = 'scrapy_splash.SplashAwareDupeFilter'
HTTPCACHE_STORAGE = 'scrapy_splash.SplashAwareFSCacheStorage'
| true | true |
f71f5e75839cd04c172644fd22b384312e83d690 | 6,499 | py | Python | grid_world.py | vigneshyaadav27/Grid-world | a5c4cab46cdafc6458526593ae31ac19a152001d | [
"MIT"
] | null | null | null | grid_world.py | vigneshyaadav27/Grid-world | a5c4cab46cdafc6458526593ae31ac19a152001d | [
"MIT"
] | null | null | null | grid_world.py | vigneshyaadav27/Grid-world | a5c4cab46cdafc6458526593ae31ac19a152001d | [
"MIT"
] | null | null | null | #######################################################################
# Copyright (C) #
# 2016-2018 Shangtong Zhang(zhangshangtong.cpp@gmail.com) #
# 2016 Kenta Shimada(hyperkentakun@gmail.com) #
# Permission given to modify the code as long as you keep this #
# declaration at the top #
#######################################################################
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.table import Table
matplotlib.use('Agg')
WORLD_SIZE = 5
A_POS = [0, 1]
A_PRIME_POS = [4, 1]
B_POS = [0, 3]
B_PRIME_POS = [2, 3]
DISCOUNT = 0.9
# left, up, right, down
ACTIONS = [np.array([0, -1]),
np.array([-1, 0]),
np.array([0, 1]),
np.array([1, 0])]
ACTIONS_FIGS=[ '←', '↑', '→', '↓']
ACTION_PROB = 0.25
def step(state, action):
if state == A_POS:
return A_PRIME_POS, 10
if state == B_POS:
return B_PRIME_POS, 5
next_state = (np.array(state) + action).tolist()
x, y = next_state
if x < 0 or x >= WORLD_SIZE or y < 0 or y >= WORLD_SIZE:
reward = -1.0
next_state = state
else:
reward = 0
return next_state, reward
def draw_image(image):
fig, ax = plt.subplots()
ax.set_axis_off()
tb = Table(ax, bbox=[0, 0, 1, 1])
nrows, ncols = image.shape
width, height = 1.0 / ncols, 1.0 / nrows
# Add cells
for (i, j), val in np.ndenumerate(image):
# add state labels
if [i, j] == A_POS:
val = str(val) + " (A)"
if [i, j] == A_PRIME_POS:
val = str(val) + " (A')"
if [i, j] == B_POS:
val = str(val) + " (B)"
if [i, j] == B_PRIME_POS:
val = str(val) + " (B')"
tb.add_cell(i, j, width, height, text=val,
loc='center', facecolor='white')
# Row and column labels...
for i in range(len(image)):
tb.add_cell(i, -1, width, height, text=i+1, loc='right',
edgecolor='none', facecolor='none')
tb.add_cell(-1, i, width, height/2, text=i+1, loc='center',
edgecolor='none', facecolor='none')
ax.add_table(tb)
def draw_policy(optimal_values):
fig, ax = plt.subplots()
ax.set_axis_off()
tb = Table(ax, bbox=[0, 0, 1, 1])
nrows, ncols = optimal_values.shape
width, height = 1.0 / ncols, 1.0 / nrows
# Add cells
for (i, j), val in np.ndenumerate(optimal_values):
next_vals=[]
for action in ACTIONS:
next_state, _ = step([i, j], action)
next_vals.append(optimal_values[next_state[0],next_state[1]])
best_actions=np.where(next_vals == np.max(next_vals))[0]
val=''
for ba in best_actions:
val+=ACTIONS_FIGS[ba]
# add state labels
if [i, j] == A_POS:
val = str(val) + " (A)"
if [i, j] == A_PRIME_POS:
val = str(val) + " (A')"
if [i, j] == B_POS:
val = str(val) + " (B)"
if [i, j] == B_PRIME_POS:
val = str(val) + " (B')"
tb.add_cell(i, j, width, height, text=val,
loc='center', facecolor='white')
# Row and column labels...
for i in range(len(optimal_values)):
tb.add_cell(i, -1, width, height, text=i+1, loc='right',
edgecolor='none', facecolor='none')
tb.add_cell(-1, i, width, height/2, text=i+1, loc='center',
edgecolor='none', facecolor='none')
ax.add_table(tb)
def figure_3_2():
value = np.zeros((WORLD_SIZE, WORLD_SIZE))
while True:
# keep iteration until convergence
new_value = np.zeros_like(value)
for i in range(WORLD_SIZE):
for j in range(WORLD_SIZE):
for action in ACTIONS:
(next_i, next_j), reward = step([i, j], action)
# bellman equation
new_value[i, j] += ACTION_PROB * (reward + DISCOUNT * value[next_i, next_j])
if np.sum(np.abs(value - new_value)) < 1e-4:
draw_image(np.round(new_value, decimals=2))
plt.savefig('../images/figure_3_2.png')
plt.close()
break
value = new_value
def figure_3_2_linear_system():
'''
Here we solve the linear system of equations to find the exact solution.
We do this by filling the coefficients for each of the states with their respective right side constant.
'''
A = -1 * np.eye(WORLD_SIZE * WORLD_SIZE)
b = np.zeros(WORLD_SIZE * WORLD_SIZE)
for i in range(WORLD_SIZE):
for j in range(WORLD_SIZE):
s = [i, j] # current state
index_s = np.ravel_multi_index(s, (WORLD_SIZE, WORLD_SIZE))
for a in ACTIONS:
s_, r = step(s, a)
index_s_ = np.ravel_multi_index(s_, (WORLD_SIZE, WORLD_SIZE))
A[index_s, index_s_] += ACTION_PROB * DISCOUNT
b[index_s] -= ACTION_PROB * r
x = np.linalg.solve(A, b)
draw_image(np.round(x.reshape(WORLD_SIZE, WORLD_SIZE), decimals=2))
plt.savefig('../images/figure_3_2_linear_system.png')
plt.close()
def figure_3_5():
value = np.zeros((WORLD_SIZE, WORLD_SIZE))
while True:
# keep iteration until convergence
new_value = np.zeros_like(value)
for i in range(WORLD_SIZE):
for j in range(WORLD_SIZE):
values = []
for action in ACTIONS:
(next_i, next_j), reward = step([i, j], action)
# value iteration
values.append(reward + DISCOUNT * value[next_i, next_j])
new_value[i, j] = np.max(values)
if np.sum(np.abs(new_value - value)) < 1e-4:
draw_image(np.round(new_value, decimals=2))
plt.savefig('../images/figure_3_5.png')
plt.close()
draw_policy(new_value)
plt.savefig('../images/figure_3_5_policy.png')
plt.close()
break
value = new_value
if __name__ == '__main__':
figure_3_2_linear_system()
figure_3_2()
figure_3_5()
| 33.158163 | 109 | 0.507617 | 4:
draw_image(np.round(new_value, decimals=2))
plt.savefig('../images/figure_3_5.png')
plt.close()
draw_policy(new_value)
plt.savefig('../images/figure_3_5_policy.png')
plt.close()
break
value = new_value
if __name__ == '__main__':
figure_3_2_linear_system()
figure_3_2()
figure_3_5()
| true | true |
f71f5f797ad336b6fedd52f0f7c38c754c946db7 | 245 | py | Python | mundo 2/aula 12/exer38.py | jonatan098/cursopython | 6e4cbaef6229e230fdbc66d80ec1b5a089887b0d | [
"MIT"
] | null | null | null | mundo 2/aula 12/exer38.py | jonatan098/cursopython | 6e4cbaef6229e230fdbc66d80ec1b5a089887b0d | [
"MIT"
] | null | null | null | mundo 2/aula 12/exer38.py | jonatan098/cursopython | 6e4cbaef6229e230fdbc66d80ec1b5a089887b0d | [
"MIT"
] | 1 | 2020-02-22T17:21:05.000Z | 2020-02-22T17:21:05.000Z | num1 = int(input('digite o primeiro valor: '))
num2 = int(input('digite o segundo valor: '))
if num1 > num2:
print('o primeiro numero e maior')
elif num2 > num1:
print('o segundo numero e maior')
else:
print('os numeros são iguais')
| 27.222222 | 46 | 0.665306 | num1 = int(input('digite o primeiro valor: '))
num2 = int(input('digite o segundo valor: '))
if num1 > num2:
print('o primeiro numero e maior')
elif num2 > num1:
print('o segundo numero e maior')
else:
print('os numeros são iguais')
| true | true |
f71f603e3e2b9119bf19c27949a553a350de4dbb | 3,576 | py | Python | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | 4 | 2020-02-06T01:10:13.000Z | 2021-12-11T01:06:10.000Z | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | 30 | 2020-02-03T23:54:10.000Z | 2022-03-18T18:50:31.000Z | nowcast/workers/ping_erddap.py | SalishSeaCast/SalishSeaNowcast | 947ba6fbb8952c7ae989a3aa96614b900748f55d | [
"Apache-2.0"
] | null | null | null | # Copyright 2013-2021 The Salish Sea MEOPAR contributors
# and The University of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SalishSeaCast worker that creates flag files to tell the ERDDAP server
to reload datasets for which new results have been downloaded.
"""
import logging
from pathlib import Path
from nemo_nowcast import NowcastWorker
NAME = "ping_erddap"
logger = logging.getLogger(NAME)
def main():
"""Set up and run the worker.
For command-line usage see:
:command:`python -m nowcast.workers.ping_erddap --help`
"""
worker = NowcastWorker(NAME, description=__doc__)
worker.init_cli()
worker.cli.add_argument(
"dataset",
choices={
"weather",
"SCVIP-CTD",
"SEVIP-CTD",
"USDDL-CTD",
"TWDP-ferry",
"VFPA-HADCP",
"nowcast-green",
"nemo-forecast",
"wwatch3-forecast",
"fvcom-x2-nowcast",
"fvcom-r12-nowcast",
"fvcom-forecast",
},
help="""
Type of dataset to notify ERDDAP of:
'weather' means atmospheric forcing downloaded & processed,
'SCVIP-CTD' means ONC SCVIP node CTD T&S observations downloaded &
processed,
'SEVIP-CTD' means ONC SEVIP node CTD T&S observations downloaded &
processed,
'USDDL-CTD' means ONC USDDL node CTD T&S observations downloaded &
processed,
'TWDP-ferry' means ONC Tsawwassen/Duke Pt. ferry observations
downloaded & processed,
'VFPA-HADCP' means VFPA 2nd Narrows Rail Bridge HADCP observations processed,
'nowcast-green' means nowcast green ocean run,
'nemo-forecast' means updated NEMO rolling forecast,
'wwatch3-forecast' means updated WaveWatch3 rolling forecast,
'fvcom-x2-nowcast' means updated VHFR FVCOM x2 nowcast run,
'fvcom-r12-nowcast' means updated VHFR FVCOM r12 nowcast run,
'fvcom-forecast' means updated VHFR FVCOM x2 rolling forecast
""",
)
worker.run(ping_erddap, success, failure)
return worker
def success(parsed_args):
logger.info(f"{parsed_args.dataset} ERDDAP dataset flag file(s) created")
msg_type = f"success {parsed_args.dataset}"
return msg_type
def failure(parsed_args):
logger.critical(
f"{parsed_args.dataset} ERDDAP dataset flag file(s) creation failed"
)
msg_type = f"failure {parsed_args.dataset}"
return msg_type
def ping_erddap(parsed_args, config, *args):
dataset = parsed_args.dataset
flag_path = Path(config["erddap"]["flag dir"])
checklist = {dataset: []}
try:
for dataset_id in config["erddap"]["datasetIDs"][dataset]:
(flag_path / dataset_id).touch()
logger.debug(f"{flag_path / dataset_id} touched")
checklist[dataset].append(dataset_id)
except KeyError:
# run type is not in datasetIDs dict
pass
return checklist
if __name__ == "__main__":
main() # pragma: no cover
| 33.420561 | 85 | 0.661074 |
import logging
from pathlib import Path
from nemo_nowcast import NowcastWorker
NAME = "ping_erddap"
logger = logging.getLogger(NAME)
def main():
worker = NowcastWorker(NAME, description=__doc__)
worker.init_cli()
worker.cli.add_argument(
"dataset",
choices={
"weather",
"SCVIP-CTD",
"SEVIP-CTD",
"USDDL-CTD",
"TWDP-ferry",
"VFPA-HADCP",
"nowcast-green",
"nemo-forecast",
"wwatch3-forecast",
"fvcom-x2-nowcast",
"fvcom-r12-nowcast",
"fvcom-forecast",
},
help="""
Type of dataset to notify ERDDAP of:
'weather' means atmospheric forcing downloaded & processed,
'SCVIP-CTD' means ONC SCVIP node CTD T&S observations downloaded &
processed,
'SEVIP-CTD' means ONC SEVIP node CTD T&S observations downloaded &
processed,
'USDDL-CTD' means ONC USDDL node CTD T&S observations downloaded &
processed,
'TWDP-ferry' means ONC Tsawwassen/Duke Pt. ferry observations
downloaded & processed,
'VFPA-HADCP' means VFPA 2nd Narrows Rail Bridge HADCP observations processed,
'nowcast-green' means nowcast green ocean run,
'nemo-forecast' means updated NEMO rolling forecast,
'wwatch3-forecast' means updated WaveWatch3 rolling forecast,
'fvcom-x2-nowcast' means updated VHFR FVCOM x2 nowcast run,
'fvcom-r12-nowcast' means updated VHFR FVCOM r12 nowcast run,
'fvcom-forecast' means updated VHFR FVCOM x2 rolling forecast
""",
)
worker.run(ping_erddap, success, failure)
return worker
def success(parsed_args):
logger.info(f"{parsed_args.dataset} ERDDAP dataset flag file(s) created")
msg_type = f"success {parsed_args.dataset}"
return msg_type
def failure(parsed_args):
logger.critical(
f"{parsed_args.dataset} ERDDAP dataset flag file(s) creation failed"
)
msg_type = f"failure {parsed_args.dataset}"
return msg_type
def ping_erddap(parsed_args, config, *args):
dataset = parsed_args.dataset
flag_path = Path(config["erddap"]["flag dir"])
checklist = {dataset: []}
try:
for dataset_id in config["erddap"]["datasetIDs"][dataset]:
(flag_path / dataset_id).touch()
logger.debug(f"{flag_path / dataset_id} touched")
checklist[dataset].append(dataset_id)
except KeyError:
pass
return checklist
if __name__ == "__main__":
main()
| true | true |
f71f6077b391c331cf27b94831a1bdac9c70c7a6 | 1,120 | py | Python | Numbers/Key/happy_numbers.py | CicadaMikoto/Projects | ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a | [
"MIT"
] | 1 | 2021-01-22T07:50:30.000Z | 2021-01-22T07:50:30.000Z | Numbers/Key/happy_numbers.py | CicadaMikoto/Projects | ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a | [
"MIT"
] | null | null | null | Numbers/Key/happy_numbers.py | CicadaMikoto/Projects | ccc3de5184a8dc9fcd108c3ddbe6fd72d6aa380a | [
"MIT"
] | null | null | null | """
Happy Numbers - A happy number is defined by the
following process. Starting with any positive integer,
replace the number by the sum of the squares of its
digits, and repeat the process until the number equals
1 (where it will stay), or it loops endlessly in a
cycle which does not include 1. Those numbers for which
this process ends in 1 are happy numbers, while those
that do not end in 1 are unhappy numbers. Take an input
number from user, and find first 8 happy numbers from
that input.
"""
NUMBERS_REQUIRED = 8 # number of happy numbers required
def is_happy_number(num):
seen = []
while True:
sum_digits = sum(int(digit) ** 2 for digit in str(num))
if sum_digits == 1:
return True
elif sum_digits in seen:
return False
else:
num = sum_digits
seen.append(num)
if __name__ == '__main__':
happies = [] # list of happy numbers found
num = input('Start at: ')
while len(happies) != NUMBERS_REQUIRED:
if is_happy_number(num):
happies.append(num)
num += 1
print happies
| 27.317073 | 63 | 0.658036 | """
Happy Numbers - A happy number is defined by the
following process. Starting with any positive integer,
replace the number by the sum of the squares of its
digits, and repeat the process until the number equals
1 (where it will stay), or it loops endlessly in a
cycle which does not include 1. Those numbers for which
this process ends in 1 are happy numbers, while those
that do not end in 1 are unhappy numbers. Take an input
number from user, and find first 8 happy numbers from
that input.
"""
NUMBERS_REQUIRED = 8
def is_happy_number(num):
seen = []
while True:
sum_digits = sum(int(digit) ** 2 for digit in str(num))
if sum_digits == 1:
return True
elif sum_digits in seen:
return False
else:
num = sum_digits
seen.append(num)
if __name__ == '__main__':
happies = []
num = input('Start at: ')
while len(happies) != NUMBERS_REQUIRED:
if is_happy_number(num):
happies.append(num)
num += 1
print happies
| false | true |
f71f61276a4576ec17d6d55cf5e8e0be9bdbeab7 | 918 | py | Python | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | 2 | 2019-02-15T09:19:44.000Z | 2019-02-15T09:21:01.000Z | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | null | null | null | FILE/file_merge.py | AceCoooool/python-example | 1d0068627210f08d31f027b6a333118d9f743956 | [
"MIT"
] | null | null | null | import os
import argparse
def file_merge(folder, out_file, ext):
files = [os.path.join(folder, file) for file in os.listdir(folder) if file.endswith(ext)]
with open(out_file, 'w') as f:
for file in files:
with open(file, 'r') as rf:
print('File {} readed.'.format(file))
f.write(rf.read() + '\n')
print('\n File {} wrote.'.format(out_file))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='File merge')
parser.add_argument('--folder', type=str, default='../data/txt')
parser.add_argument('--out_file', type=str, default='../data/results.txt')
parser.add_argument('--ext', type=str, default='txt')
config = parser.parse_args()
with open(config.out_file, 'w+'):
pass
if config.ext[0] != '.':
config.ext = '.' + config.ext
file_merge(config.folder, config.out_file, config.ext)
| 31.655172 | 93 | 0.61329 | import os
import argparse
def file_merge(folder, out_file, ext):
files = [os.path.join(folder, file) for file in os.listdir(folder) if file.endswith(ext)]
with open(out_file, 'w') as f:
for file in files:
with open(file, 'r') as rf:
print('File {} readed.'.format(file))
f.write(rf.read() + '\n')
print('\n File {} wrote.'.format(out_file))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='File merge')
parser.add_argument('--folder', type=str, default='../data/txt')
parser.add_argument('--out_file', type=str, default='../data/results.txt')
parser.add_argument('--ext', type=str, default='txt')
config = parser.parse_args()
with open(config.out_file, 'w+'):
pass
if config.ext[0] != '.':
config.ext = '.' + config.ext
file_merge(config.folder, config.out_file, config.ext)
| true | true |
f71f61538bc51d86e628c8573f4d9f8d27add351 | 5,304 | py | Python | ROMS/ROMS_rotate_compare.py | petercunning/notebook | 5b26f2dc96bcb36434542b397de6ca5fa3b61a0a | [
"MIT"
] | 32 | 2015-01-07T01:48:05.000Z | 2022-03-02T07:07:42.000Z | ROMS/ROMS_rotate_compare.py | petercunning/notebook | 5b26f2dc96bcb36434542b397de6ca5fa3b61a0a | [
"MIT"
] | 1 | 2015-04-13T21:00:18.000Z | 2015-04-13T21:00:18.000Z | ROMS/ROMS_rotate_compare.py | petercunning/notebook | 5b26f2dc96bcb36434542b397de6ca5fa3b61a0a | [
"MIT"
] | 30 | 2015-01-28T09:31:29.000Z | 2022-03-07T03:08:28.000Z | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
from pylab import *
import netCDF4
# <codecell>
tidx = 0 # just get the final frame, for now.
scale = 0.03
isub = 3
url = 'http://comt.sura.org/thredds/dodsC/comt_2_full/testing/ucsc2.nc'
# <codecell>
def shrink(a,b):
"""Return array shrunk to fit a specified shape by triming or averaging.
a = shrink(array, shape)
array is an numpy ndarray, and shape is a tuple (e.g., from
array.shape). a is the input array shrunk such that its maximum
dimensions are given by shape. If shape has more dimensions than
array, the last dimensions of shape are fit.
as, bs = shrink(a, b)
If the second argument is also an array, both a and b are shrunk to
the dimensions of each other. The input arrays must have the same
number of dimensions, and the resulting arrays will have the same
shape.
Example
-------
>>> shrink(rand(10, 10), (5, 9, 18)).shape
(9, 10)
>>> map(shape, shrink(rand(10, 10, 10), rand(5, 9, 18)))
[(5, 9, 10), (5, 9, 10)]
"""
if isinstance(b, np.ndarray):
if not len(a.shape) == len(b.shape):
raise Exception, \
'input arrays must have the same number of dimensions'
a = shrink(a,b.shape)
b = shrink(b,a.shape)
return (a, b)
if isinstance(b, int):
b = (b,)
if len(a.shape) == 1: # 1D array is a special case
dim = b[-1]
while a.shape[0] > dim: # only shrink a
if (dim - a.shape[0]) >= 2: # trim off edges evenly
a = a[1:-1]
else: # or average adjacent cells
a = 0.5*(a[1:] + a[:-1])
else:
for dim_idx in range(-(len(a.shape)),0):
dim = b[dim_idx]
a = a.swapaxes(0,dim_idx) # put working dim first
while a.shape[0] > dim: # only shrink a
if (a.shape[0] - dim) >= 2: # trim off edges evenly
a = a[1:-1,:]
if (a.shape[0] - dim) == 1: # or average adjacent cells
a = 0.5*(a[1:,:] + a[:-1,:])
a = a.swapaxes(0,dim_idx) # swap working dim back
return a
# <codecell>
def rot2d(x, y, ang):
'''rotate vectors by geometric angle'''
xr = x*np.cos(ang) - y*np.sin(ang)
yr = x*np.sin(ang) + y*np.cos(ang)
return xr, yr
# <codecell>
u = nc.variables['u']
shape(u)
# <codecell>
itime=0
u = nc.variables['u'][tidx, itime, :, :]
# <codecell>
nc = netCDF4.Dataset(url)
mask = nc.variables['mask_rho'][:]
lon_rho = nc.variables['lon_rho'][:]
lat_rho = nc.variables['lat_rho'][:]
anglev = nc.variables['angle'][:]
tidx=0
u = nc.variables['u'][tidx, -1, :, :]
v = nc.variables['v'][tidx, -1, :, :]
u = shrink(u, mask[1:-1, 1:-1].shape)
v = shrink(v, mask[1:-1, 1:-1].shape)
u, v = rot2d(u, v, anglev[1:-1, 1:-1])
# <codecell>
spd=sqrt(u*u+v*v)
spd=ma.masked_invalid(spd)
# <codecell>
lon_c = lon_rho[1:-1, 1:-1]
lat_c = lat_rho[1:-1, 1:-1]
# <markdowncell>
# ##Now we will make a *plot*
# <codecell>
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_c)*pi/180.0)))
pcolormesh(lon_c,lat_c,spd)
q = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
plt.quiverkey(q, 0.85, 0.07, 1.0, label=r'1 m s$^{-1}$', coordinates='figure');
# <codecell>
url2='http://comt.sura.org/thredds/dodsC/comt_2_full/testing/newer_ucsc2.nc'
nc = netCDF4.Dataset(url2)
mask = nc.variables['mask_rho'][:]
lon_rho = nc.variables['lon_rho'][:]
lat_rho = nc.variables['lat_rho'][:]
anglev = nc.variables['angle'][:]
tidx=0
u2 = nc.variables['u_rho'][tidx, -1, :, :]
v2 = nc.variables['v_rho'][tidx, -1, :, :]
spd2=sqrt(u2*u2+v2*v2)
spd2=ma.masked_invalid(spd2)
# <codecell>
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
pcolormesh(lon_rho,lat_rho,spd2)
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
plt.quiverkey(q, 0.85, 0.07, 1.0, label=r'1 m s$^{-1}$', coordinates='figure');
# <codecell>
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
isub=1
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
q2 = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003,color='red')
axis([-135, -130,30, 32]);
# <codecell>
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
isub=1
q2 = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003,color='red')
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
axis([-135, -130,30, 32]);
# <codecell>
| 28.826087 | 98 | 0.575792 |
from pylab import *
import netCDF4
tidx = 0
scale = 0.03
isub = 3
url = 'http://comt.sura.org/thredds/dodsC/comt_2_full/testing/ucsc2.nc'
def shrink(a,b):
"""Return array shrunk to fit a specified shape by triming or averaging.
a = shrink(array, shape)
array is an numpy ndarray, and shape is a tuple (e.g., from
array.shape). a is the input array shrunk such that its maximum
dimensions are given by shape. If shape has more dimensions than
array, the last dimensions of shape are fit.
as, bs = shrink(a, b)
If the second argument is also an array, both a and b are shrunk to
the dimensions of each other. The input arrays must have the same
number of dimensions, and the resulting arrays will have the same
shape.
Example
-------
>>> shrink(rand(10, 10), (5, 9, 18)).shape
(9, 10)
>>> map(shape, shrink(rand(10, 10, 10), rand(5, 9, 18)))
[(5, 9, 10), (5, 9, 10)]
"""
if isinstance(b, np.ndarray):
if not len(a.shape) == len(b.shape):
raise Exception, \
'input arrays must have the same number of dimensions'
a = shrink(a,b.shape)
b = shrink(b,a.shape)
return (a, b)
if isinstance(b, int):
b = (b,)
if len(a.shape) == 1:
dim = b[-1]
while a.shape[0] > dim:
if (dim - a.shape[0]) >= 2:
a = a[1:-1]
else:
a = 0.5*(a[1:] + a[:-1])
else:
for dim_idx in range(-(len(a.shape)),0):
dim = b[dim_idx]
a = a.swapaxes(0,dim_idx)
while a.shape[0] > dim:
if (a.shape[0] - dim) >= 2:
a = a[1:-1,:]
if (a.shape[0] - dim) == 1:
a = 0.5*(a[1:,:] + a[:-1,:])
a = a.swapaxes(0,dim_idx)
return a
def rot2d(x, y, ang):
'''rotate vectors by geometric angle'''
xr = x*np.cos(ang) - y*np.sin(ang)
yr = x*np.sin(ang) + y*np.cos(ang)
return xr, yr
u = nc.variables['u']
shape(u)
itime=0
u = nc.variables['u'][tidx, itime, :, :]
nc = netCDF4.Dataset(url)
mask = nc.variables['mask_rho'][:]
lon_rho = nc.variables['lon_rho'][:]
lat_rho = nc.variables['lat_rho'][:]
anglev = nc.variables['angle'][:]
tidx=0
u = nc.variables['u'][tidx, -1, :, :]
v = nc.variables['v'][tidx, -1, :, :]
u = shrink(u, mask[1:-1, 1:-1].shape)
v = shrink(v, mask[1:-1, 1:-1].shape)
u, v = rot2d(u, v, anglev[1:-1, 1:-1])
spd=sqrt(u*u+v*v)
spd=ma.masked_invalid(spd)
lon_c = lon_rho[1:-1, 1:-1]
lat_c = lat_rho[1:-1, 1:-1]
aspect=(1.0/cos(mean(lat_c)*pi/180.0)))
pcolormesh(lon_c,lat_c,spd)
q = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
plt.quiverkey(q, 0.85, 0.07, 1.0, label=r'1 m s$^{-1}$', coordinates='figure');
url2='http://comt.sura.org/thredds/dodsC/comt_2_full/testing/newer_ucsc2.nc'
nc = netCDF4.Dataset(url2)
mask = nc.variables['mask_rho'][:]
lon_rho = nc.variables['lon_rho'][:]
lat_rho = nc.variables['lat_rho'][:]
anglev = nc.variables['angle'][:]
tidx=0
u2 = nc.variables['u_rho'][tidx, -1, :, :]
v2 = nc.variables['v_rho'][tidx, -1, :, :]
spd2=sqrt(u2*u2+v2*v2)
spd2=ma.masked_invalid(spd2)
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
pcolormesh(lon_rho,lat_rho,spd2)
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
plt.quiverkey(q, 0.85, 0.07, 1.0, label=r'1 m s$^{-1}$', coordinates='figure');
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
isub=1
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
q2 = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003,color='red')
axis([-135, -130,30, 32]);
figure = plt.figure(figsize=(12,12))
subplot(111,aspect=(1.0/cos(mean(lat_rho)*pi/180.0)))
isub=1
q2 = quiver( lon_c[::isub,::isub], lat_c[::isub,::isub], u[::isub,::isub], v[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003,color='red')
q = quiver( lon_rho[::isub,::isub], lat_rho[::isub,::isub], u2[::isub,::isub], v2[::isub,::isub],
scale=1.0/scale, pivot='middle', zorder=1e35, width=0.003)
axis([-135, -130,30, 32]);
| false | true |
f71f61bbd250bef9d676ace26f835628c544adaa | 2,169 | py | Python | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | jaz230/Industrial-IoT | bd4c5abfe579cbb7086a621e8381978e6c70a563 | [
"MIT"
] | 1 | 2020-01-22T12:03:08.000Z | 2020-01-22T12:03:08.000Z | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | likithadt/Industrial-IoT | d4ea7b330eff08455ca0556fed76aa74d2034da5 | [
"MIT"
] | null | null | null | api/generated/python/azure-iiot-opc-twin/models/value_write_request_api_model.py | likithadt/Industrial-IoT | d4ea7b330eff08455ca0556fed76aa74d2034da5 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator 2.3.33.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ValueWriteRequestApiModel(Model):
"""Value write request model.
:param node_id: Node id to to write value to.
:type node_id: str
:param browse_path: An optional path from NodeId instance to
the actual node.
:type browse_path: list[str]
:param value: Value to write. The system tries to convert
the value according to the data type value,
e.g. convert comma seperated value strings
into arrays. (Mandatory)
:type value: object
:param data_type: A built in datatype for the value. This can
be a data type from browse, or a built in
type.
(default: best effort)
:type data_type: str
:param index_range: Index range to write
:type index_range: str
:param header:
:type header: ~azure-iiot-opc-twin.models.RequestHeaderApiModel
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'browse_path': {'key': 'browsePath', 'type': '[str]'},
'value': {'key': 'value', 'type': 'object'},
'data_type': {'key': 'dataType', 'type': 'str'},
'index_range': {'key': 'indexRange', 'type': 'str'},
'header': {'key': 'header', 'type': 'RequestHeaderApiModel'},
}
def __init__(self, value, node_id=None, browse_path=None, data_type=None, index_range=None, header=None):
super(ValueWriteRequestApiModel, self).__init__()
self.node_id = node_id
self.browse_path = browse_path
self.value = value
self.data_type = data_type
self.index_range = index_range
self.header = header
| 36.15 | 109 | 0.608575 |
from msrest.serialization import Model
class ValueWriteRequestApiModel(Model):
_validation = {
'value': {'required': True},
}
_attribute_map = {
'node_id': {'key': 'nodeId', 'type': 'str'},
'browse_path': {'key': 'browsePath', 'type': '[str]'},
'value': {'key': 'value', 'type': 'object'},
'data_type': {'key': 'dataType', 'type': 'str'},
'index_range': {'key': 'indexRange', 'type': 'str'},
'header': {'key': 'header', 'type': 'RequestHeaderApiModel'},
}
def __init__(self, value, node_id=None, browse_path=None, data_type=None, index_range=None, header=None):
super(ValueWriteRequestApiModel, self).__init__()
self.node_id = node_id
self.browse_path = browse_path
self.value = value
self.data_type = data_type
self.index_range = index_range
self.header = header
| true | true |
f71f61e720b4ca7b2e7ace2c709ec4297289840e | 130,096 | py | Python | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 1 | 2020-09-10T07:38:20.000Z | 2020-09-10T07:38:20.000Z | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 4 | 2016-05-10T22:05:34.000Z | 2016-05-20T18:10:13.000Z | salt/states/pkg.py | waynegemmell/salt | 88056db3589cccab8956c2ae4f9b733acce89461 | [
"Apache-2.0"
] | 1 | 2020-12-02T01:20:28.000Z | 2020-12-02T01:20:28.000Z | """
Installation of packages using OS package managers such as yum or apt-get
=========================================================================
.. note::
On minions running systemd>=205, as of version 2015.8.12, 2016.3.3, and
2016.11.0, `systemd-run(1)`_ is now used to isolate commands which modify
installed packages from the ``salt-minion`` daemon's control group. This is
done to keep systemd from killing the package manager commands spawned by
Salt, when Salt updates itself (see ``KillMode`` in the `systemd.kill(5)`_
manpage for more information). If desired, usage of `systemd-run(1)`_ can
be suppressed by setting a :mod:`config option <salt.modules.config.get>`
called ``systemd.use_scope``, with a value of ``False`` (no quotes).
.. _`systemd-run(1)`: https://www.freedesktop.org/software/systemd/man/systemd-run.html
.. _`systemd.kill(5)`: https://www.freedesktop.org/software/systemd/man/systemd.kill.html
Salt can manage software packages via the pkg state module, packages can be
set up to be installed, latest, removed and purged. Package management
declarations are typically rather simple:
.. code-block:: yaml
vim:
pkg.installed
A more involved example involves pulling from a custom repository.
.. code-block:: yaml
base:
pkgrepo.managed:
- name: ppa:wolfnet/logstash
- dist: precise
- file: /etc/apt/sources.list.d/logstash.list
- keyid: 28B04E4A
- keyserver: keyserver.ubuntu.com
logstash:
pkg.installed:
- fromrepo: ppa:wolfnet/logstash
Multiple packages can also be installed with the use of the pkgs
state module
.. code-block:: yaml
dotdeb.repo:
pkgrepo.managed:
- name: deb http://packages.dotdeb.org wheezy-php55 all
- dist: wheezy-php55
- file: /etc/apt/sources.list.d/dotbeb.list
- keyid: 89DF5277
- keyserver: keys.gnupg.net
- refresh_db: true
php.packages:
pkg.installed:
- fromrepo: wheezy-php55
- pkgs:
- php5-fpm
- php5-cli
- php5-curl
.. warning::
Package names are currently case-sensitive. If the minion is using a
package manager which is not case-sensitive (such as :mod:`pkgng
<salt.modules.pkgng>`), then this state will fail if the proper case is not
used. This will be addressed in a future release of Salt.
"""
import fnmatch
import logging
import os
import re
import salt.utils.pkg
import salt.utils.platform
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
from salt.modules.pkg_resource import _repack_pkgs
from salt.output import nested
from salt.utils.functools import namespaced_function as _namespaced_function
from salt.utils.odict import OrderedDict as _OrderedDict
# pylint: disable=invalid-name
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
if salt.utils.platform.is_windows():
# pylint: disable=import-error,no-name-in-module,unused-import
from urllib.parse import urlparse as _urlparse
from salt.exceptions import SaltRenderError
import collections
import datetime
import errno
import time
from functools import cmp_to_key
# pylint: disable=import-error
# pylint: enable=unused-import
from salt.modules.win_pkg import _get_package_info
from salt.modules.win_pkg import get_repo_data
from salt.modules.win_pkg import _get_repo_details
from salt.modules.win_pkg import _refresh_db_conditional
from salt.modules.win_pkg import refresh_db
from salt.modules.win_pkg import genrepo
from salt.modules.win_pkg import _repo_process_pkg_sls
from salt.modules.win_pkg import _get_latest_pkg_version
from salt.modules.win_pkg import _reverse_cmp_pkg_versions
_get_package_info = _namespaced_function(_get_package_info, globals())
get_repo_data = _namespaced_function(get_repo_data, globals())
_get_repo_details = _namespaced_function(_get_repo_details, globals())
_refresh_db_conditional = _namespaced_function(_refresh_db_conditional, globals())
refresh_db = _namespaced_function(refresh_db, globals())
genrepo = _namespaced_function(genrepo, globals())
_repo_process_pkg_sls = _namespaced_function(_repo_process_pkg_sls, globals())
_get_latest_pkg_version = _namespaced_function(_get_latest_pkg_version, globals())
_reverse_cmp_pkg_versions = _namespaced_function(
_reverse_cmp_pkg_versions, globals()
)
# The following imports are used by the namespaced win_pkg funcs
# and need to be included in their globals.
# pylint: disable=import-error,unused-import
import salt.utils.msgpack as msgpack
from salt.utils.versions import LooseVersion
# pylint: enable=import-error,unused-import
# pylint: enable=invalid-name
log = logging.getLogger(__name__)
def __virtual__():
"""
Only make these states available if a pkg provider has been detected or
assigned for this minion
"""
if "pkg.install" in __salt__:
return True
return (False, "pkg module could not be loaded")
def _get_comparison_spec(pkgver):
"""
Return a tuple containing the comparison operator and the version. If no
comparison operator was passed, the comparison is assumed to be an "equals"
comparison, and "==" will be the operator returned.
"""
oper, verstr = salt.utils.pkg.split_comparison(pkgver.strip())
if oper in ("=", ""):
oper = "=="
return oper, verstr
def _check_ignore_epoch(oper, desired_version, ignore_epoch=None):
"""
Conditionally ignore epoch, but only under all of the following
circumstances:
1. No value for ignore_epoch passed to state
2. desired_version has no epoch
3. oper does not contain a "<" or ">"
"""
if ignore_epoch is not None:
return ignore_epoch
return "<" not in oper and ">" not in oper and ":" not in desired_version
def _parse_version_string(version_conditions_string):
"""
Returns a list of two-tuples containing (operator, version).
"""
result = []
version_conditions_string = version_conditions_string.strip()
if not version_conditions_string:
return result
for version_condition in version_conditions_string.split(","):
operator_and_version = _get_comparison_spec(version_condition)
result.append(operator_and_version)
return result
def _fulfills_version_string(
installed_versions,
version_conditions_string,
ignore_epoch=None,
allow_updates=False,
):
"""
Returns True if any of the installed versions match the specified version conditions,
otherwise returns False.
installed_versions
The installed versions
version_conditions_string
The string containing all version conditions. E.G.
1.2.3-4
>=1.2.3-4
>=1.2.3-4, <2.3.4-5
>=1.2.3-4, <2.3.4-5, !=1.2.4-1
ignore_epoch : None
When a package version contains an non-zero epoch (e.g.
``1:3.14.159-2.el7``), and a specific version of a package is desired,
set this option to ``True`` to ignore the epoch when comparing
versions.
.. versionchanged:: 3001
If no value for this argument is passed to the state that calls
this helper function, and ``version_conditions_string`` contains no
epoch or greater-than/less-than, then the epoch will be ignored.
allow_updates : False
Allow the package to be updated outside Salt's control (e.g. auto updates on Windows).
This means a package on the Minion can have a newer version than the latest available in
the repository without enforcing a re-installation of the package.
(Only applicable if only one strict version condition is specified E.G. version: 2.0.6~ubuntu3)
"""
version_conditions = _parse_version_string(version_conditions_string)
for installed_version in installed_versions:
fullfills_all = True
for operator, version_string in version_conditions:
if allow_updates and len(version_conditions) == 1 and operator == "==":
operator = ">="
fullfills_all = fullfills_all and _fulfills_version_spec(
[installed_version], operator, version_string, ignore_epoch=ignore_epoch
)
if fullfills_all:
return True
return False
def _fulfills_version_spec(versions, oper, desired_version, ignore_epoch=None):
"""
Returns True if any of the installed versions match the specified version,
otherwise returns False
"""
cmp_func = __salt__.get("pkg.version_cmp")
# stripping "with_origin" dict wrapper
if salt.utils.platform.is_freebsd():
if isinstance(versions, dict) and "version" in versions:
versions = versions["version"]
for ver in versions:
if (
oper == "==" and fnmatch.fnmatch(ver, desired_version)
) or salt.utils.versions.compare(
ver1=ver,
oper=oper,
ver2=desired_version,
cmp_func=cmp_func,
ignore_epoch=_check_ignore_epoch(oper, desired_version, ignore_epoch),
):
return True
return False
def _find_unpurge_targets(desired, **kwargs):
"""
Find packages which are marked to be purged but can't yet be removed
because they are dependencies for other installed packages. These are the
packages which will need to be 'unpurged' because they are part of
pkg.installed states. This really just applies to Debian-based Linuxes.
"""
return [
x
for x in desired
if x in __salt__["pkg.list_pkgs"](purge_desired=True, **kwargs)
]
def _find_download_targets(
name=None,
version=None,
pkgs=None,
normalize=True,
skip_suggestions=False,
ignore_epoch=None,
**kwargs
):
"""
Inspect the arguments to pkg.downloaded and discover what packages need to
be downloaded. Return a dict of packages to download.
"""
cur_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
if pkgs:
# pylint: disable=not-callable
to_download = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
if not to_download:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
to_download = {_normalize_name(name): version}
else:
to_download = {name: version}
cver = cur_pkgs.get(name, {})
if name in to_download:
# Package already downloaded, no need to download again
if cver and version in cver:
return {
"name": name,
"changes": {},
"result": True,
"comment": (
"Version {} of package '{}' is already downloaded".format(
version, name
)
),
}
# if cver is not an empty string, the package is already downloaded
elif cver and version is None:
# The package is downloaded
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already downloaded".format(name),
}
version_spec = False
if not skip_suggestions:
try:
problems = _preflight_check(to_download, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
# Find out which packages will be targeted in the call to pkg.download
# Check current downloaded versions against specified versions
targets = {}
problems = []
for pkgname, pkgver in to_download.items():
cver = cur_pkgs.get(pkgname, {})
# Package not yet downloaded, so add to targets
if not cver:
targets[pkgname] = pkgver
continue
# No version specified but package is already downloaded
elif cver and not pkgver:
continue
version_spec = True
try:
if not _fulfills_version_string(
cver.keys(), pkgver, ignore_epoch=ignore_epoch
):
targets[pkgname] = pkgver
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
# All specified packages are already downloaded
msg = "All specified packages{} are already downloaded".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_advisory_targets(name=None, advisory_ids=None, **kwargs):
"""
Inspect the arguments to pkg.patch_installed and discover what advisory
patches need to be installed. Return a dict of advisory patches to install.
"""
cur_patches = __salt__["pkg.list_installed_patches"](**kwargs)
if advisory_ids:
to_download = advisory_ids
else:
to_download = [name]
if cur_patches.get(name, {}):
# Advisory patch already installed, no need to install it again
return {
"name": name,
"changes": {},
"result": True,
"comment": "Advisory patch {} is already installed".format(name),
}
# Find out which advisory patches will be targeted in the call to pkg.install
targets = []
for patch_name in to_download:
cver = cur_patches.get(patch_name, {})
# Advisory patch not yet installed, so add to targets
if not cver:
targets.append(patch_name)
continue
if not targets:
# All specified packages are already downloaded
msg = "All specified advisory patches are already installed"
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_remove_targets(
name=None, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs
):
"""
Inspect the arguments to pkg.removed and discover what packages need to
be removed. Return a dict of packages to remove.
"""
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if pkgs:
# pylint: disable=not-callable
to_remove = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
if not to_remove:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
_normalize_name = __salt__.get("pkg.normalize_name", lambda pkgname: pkgname)
to_remove = {_normalize_name(name): version}
version_spec = False
# Find out which packages will be targeted in the call to pkg.remove
# Check current versions against specified versions
targets = []
problems = []
for pkgname, pkgver in to_remove.items():
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search("/", pkgname))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
else:
cver = cur_pkgs.get(pkgname, [])
# Package not installed, no need to remove
if not cver:
continue
# No version specified and pkg is installed
elif __salt__["pkg_resource.version_clean"](pkgver) is None:
targets.append(pkgname)
continue
version_spec = True
try:
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
targets.append(pkgname)
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), will not remove",
cver,
pkgver,
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
# All specified packages are already absent
msg = "All specified packages{} are already absent".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_install_targets(
name=None,
version=None,
pkgs=None,
sources=None,
skip_suggestions=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
refresh=False,
**kwargs
):
"""
Inspect the arguments to pkg.installed and discover what packages need to
be installed. Return a dict of desired packages
"""
was_refreshed = False
if all((pkgs, sources)):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Only one of "pkgs" and "sources" is permitted.',
}
# dict for packages that fail pkg.verify and their altered files
altered_files = {}
# Get the ignore_types list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
# Get the verify_options list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
if salt.utils.platform.is_windows():
# Windows requires a refresh to establish a pkg db if refresh=True, so
# add it to the kwargs.
kwargs["refresh"] = refresh
resolve_capabilities = (
kwargs.get("resolve_capabilities", False) and "pkg.list_provides" in __salt__
)
try:
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
cur_prov = (
resolve_capabilities and __salt__["pkg.list_provides"](**kwargs) or dict()
)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if salt.utils.platform.is_windows() and kwargs.pop("refresh", False):
# We already refreshed when we called pkg.list_pkgs
was_refreshed = True
refresh = False
if any((pkgs, sources)):
if pkgs:
# pylint: disable=not-callable
desired = _repack_pkgs(pkgs, normalize=normalize)
# pylint: enable=not-callable
elif sources:
desired = __salt__["pkg_resource.pack_sources"](
sources,
normalize=normalize,
)
if not desired:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted '{}' parameter. See minion log.".format(
"pkgs" if pkgs else "sources"
),
}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
else:
if salt.utils.platform.is_windows():
# pylint: disable=not-callable
pkginfo = _get_package_info(name, saltenv=kwargs["saltenv"])
# pylint: enable=not-callable
if not pkginfo:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Package {} not found in the repository.".format(name),
}
if version is None:
# pylint: disable=not-callable
version = _get_latest_pkg_version(pkginfo)
# pylint: enable=not-callable
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
desired = {_normalize_name(name): version}
else:
desired = {name: version}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names
origin = bool(re.search("/", name))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == name]
else:
cver = cur_pkgs.get(name, [])
if name not in to_unpurge:
if version and version in cver and not reinstall and not pkg_verify:
# The package is installed and is the correct version
return {
"name": name,
"changes": {},
"result": True,
"comment": "Version {} of package '{}' is already installed".format(
version, name
),
}
# if cver is not an empty string, the package is already installed
elif cver and version is None and not reinstall and not pkg_verify:
# The package is installed
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already installed".format(name),
}
version_spec = False
if not sources:
# Check for alternate package names if strict processing is not
# enforced. Takes extra time. Disable for improved performance
if not skip_suggestions:
# Perform platform-specific pre-flight checks
not_installed = {
name: version
for name, version in desired.items()
if not (
name in cur_pkgs
and (
version is None
or _fulfills_version_string(
cur_pkgs[name], version, ignore_epoch=ignore_epoch
)
)
)
}
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
# Resolve the latest package version for any packages with "latest" in the
# package version
wants_latest = [] if sources else [x for x, y in desired.items() if y == "latest"]
if wants_latest:
resolved_latest = __salt__["pkg.latest_version"](
*wants_latest, refresh=refresh, **kwargs
)
if len(wants_latest) == 1:
resolved_latest = {wants_latest[0]: resolved_latest}
if refresh:
was_refreshed = True
refresh = False
# pkg.latest_version returns an empty string when the package is
# up-to-date. So check the currently-installed packages. If found, the
# resolved latest version will be the currently installed one from
# cur_pkgs. If not found, then the package doesn't exist and the
# resolved latest version will be None.
for key in resolved_latest:
if not resolved_latest[key]:
if key in cur_pkgs:
resolved_latest[key] = cur_pkgs[key][-1]
else:
resolved_latest[key] = None
# Update the desired versions with the ones we resolved
desired.update(resolved_latest)
# Find out which packages will be targeted in the call to pkg.install
targets = {}
to_reinstall = {}
problems = []
warnings = []
failed_verify = False
for package_name, version_string in desired.items():
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
# Package not yet installed, so add to targets
if not cver:
targets[package_name] = version_string
continue
if sources:
if reinstall:
to_reinstall[package_name] = version_string
continue
elif "lowpkg.bin_pkg_info" not in __salt__:
continue
# Metadata parser is available, cache the file and derive the
# package's name and version
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"]
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
continue
if not cached_path:
problems.append(err.format(version_string, "file not found"))
continue
elif not os.path.exists(cached_path):
problems.append("{} does not exist on minion".format(version_string))
continue
source_info = __salt__["lowpkg.bin_pkg_info"](cached_path)
if source_info is None:
warnings.append(
"Failed to parse metadata for {}".format(version_string)
)
continue
else:
verstr = source_info["version"]
else:
verstr = version_string
if reinstall:
to_reinstall[package_name] = version_string
continue
if not __salt__["pkg_resource.check_extra_requirements"](
package_name, version_string
):
targets[package_name] = version_string
continue
# No version specified and pkg is installed
elif __salt__["pkg_resource.version_clean"](version_string) is None:
if (not reinstall) and pkg_verify:
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
continue
version_fulfilled = False
allow_updates = bool(not sources and kwargs.get("allow_updates"))
try:
version_fulfilled = _fulfills_version_string(
cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
# Compare desired version against installed version.
version_spec = True
if not version_fulfilled:
if reinstall:
to_reinstall[package_name] = version_string
else:
version_conditions = _parse_version_string(version_string)
if pkg_verify and any(
oper == "==" for oper, version in version_conditions
):
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), adding to installation targets",
cver,
version_string,
)
targets[package_name] = version_string
if failed_verify:
problems.append(failed_verify)
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not any((targets, to_unpurge, to_reinstall)):
# All specified packages are installed
msg = "All specified packages are already installed{0}"
msg = msg.format(
" and are at the desired version" if version_spec and not sources else ""
)
ret = {"name": name, "changes": {}, "result": True, "comment": msg}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
return (
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
)
def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
"""
Determine whether or not the installed packages match what was requested in
the SLS file.
"""
_ok = []
failed = []
if not new_caps:
new_caps = dict()
for pkgname, pkgver in desired.items():
# FreeBSD pkg supports `openjdk` and `java/openjdk7` package names.
# Homebrew for Mac OSX does something similar with tap names
# prefixing package names, separated with a slash.
has_origin = "/" in pkgname
if __grains__["os"] == "FreeBSD" and has_origin:
cver = [k for k, v in new_pkgs.items() if v["origin"] == pkgname]
elif __grains__["os"] == "MacOS" and has_origin:
cver = new_pkgs.get(pkgname, new_pkgs.get(pkgname.split("/")[-1]))
elif __grains__["os"] == "OpenBSD":
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
cver = new_pkgs.get(new_caps.get(pkgname)[0])
if not cver:
failed.append(pkgname)
continue
elif pkgver == "latest":
_ok.append(pkgname)
continue
elif not __salt__["pkg_resource.version_clean"](pkgver):
_ok.append(pkgname)
continue
elif pkgver.endswith("*") and cver[0].startswith(pkgver[:-1]):
_ok.append(pkgname)
continue
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
_ok.append(pkgname)
else:
failed.append(pkgname)
return _ok, failed
def _get_desired_pkg(name, desired):
"""
Helper function that retrieves and nicely formats the desired pkg (and
version if specified) so that helpful information can be printed in the
comment for the state.
"""
if not desired[name] or desired[name].startswith(("<", ">", "=")):
oper = ""
else:
oper = "="
return "{}{}{}".format(name, oper, "" if not desired[name] else desired[name])
def _preflight_check(desired, fromrepo, **kwargs):
"""
Perform platform-specific checks on desired packages
"""
if "pkg.check_db" not in __salt__:
return {}
ret = {"suggest": {}, "no_suggest": []}
pkginfo = __salt__["pkg.check_db"](
*list(desired.keys()), fromrepo=fromrepo, **kwargs
)
for pkgname in pkginfo:
if pkginfo[pkgname]["found"] is False:
if pkginfo[pkgname]["suggestions"]:
ret["suggest"][pkgname] = pkginfo[pkgname]["suggestions"]
else:
ret["no_suggest"].append(pkgname)
return ret
def _nested_output(obj):
"""
Serialize obj and format for output
"""
nested.__opts__ = __opts__
ret = nested.output(obj).rstrip()
return ret
def _resolve_capabilities(pkgs, refresh=False, **kwargs):
"""
Resolve capabilities in ``pkgs`` and exchange them with real package
names, when the result is distinct.
This feature can be turned on while setting the paramter
``resolve_capabilities`` to True.
Return the input dictionary with replaced capability names and as
second return value a bool which say if a refresh need to be run.
In case of ``resolve_capabilities`` is False (disabled) or not
supported by the implementation the input is returned unchanged.
"""
if not pkgs or "pkg.resolve_capabilities" not in __salt__:
return pkgs, refresh
ret = __salt__["pkg.resolve_capabilities"](pkgs, refresh=refresh, **kwargs)
return ret, False
def installed(
name,
version=None,
refresh=None,
fromrepo=None,
skip_verify=False,
skip_suggestions=False,
pkgs=None,
sources=None,
allow_updates=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
update_holds=False,
**kwargs
):
"""
Ensure that the package is installed, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.installed:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
:param str name:
The name of the package to be installed. This parameter is ignored if
either "pkgs" or "sources" is used. Additionally, please note that this
option can only be used to install packages from a software repository.
To install a package file manually, use the "sources" option detailed
below.
:param str version:
Install a specific version of a package. This option is ignored if
"sources" is used. Currently, this option is supported
for the following pkg providers: :mod:`apt <salt.modules.aptpkg>`,
:mod:`ebuild <salt.modules.ebuild>`,
:mod:`pacman <salt.modules.pacman>`,
:mod:`pkgin <salt.modules.pkgin>`,
:mod:`win_pkg <salt.modules.win_pkg>`,
:mod:`yumpkg <salt.modules.yumpkg>`, and
:mod:`zypper <salt.modules.zypper>`. The version number includes the
release designation where applicable, to allow Salt to target a
specific release of a given version. When in doubt, using the
``pkg.latest_version`` function for an uninstalled package will tell
you the version available.
.. code-block:: bash
# salt myminion pkg.latest_version vim-enhanced
myminion:
2:7.4.160-1.el7
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the
``pkg.latest_version`` output above) must have the epoch included
when specifying the version number. For example:
.. code-block:: yaml
vim-enhanced:
pkg.installed:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed.
Also, while this function is not yet implemented for all pkg frontends,
:mod:`pkg.list_repo_pkgs <salt.modules.yumpkg.list_repo_pkgs>` will
show all versions available in the various repositories for a given
package, irrespective of whether or not it is installed.
.. code-block:: bash
# salt myminion pkg.list_repo_pkgs bash
myminion:
----------
bash:
- 4.2.46-21.el7_3
- 4.2.46-20.el7_2
This function was first added for :mod:`pkg.list_repo_pkgs
<salt.modules.yumpkg.list_repo_pkgs>` in 2014.1.0, and was expanded to
:py:func:`Debian/Ubuntu <salt.modules.aptpkg.list_repo_pkgs>` and
:py:func:`Arch Linux <salt.modules.pacman.list_repo_pkgs>`-based
distros in the 2017.7.0 release.
The version strings returned by either of these functions can be used
as version specifiers in pkg states.
You can install a specific version when using the ``pkgs`` argument by
including the version after the package:
.. code-block:: yaml
common_packages:
pkg.installed:
- pkgs:
- unzip
- dos2unix
- salt-minion: 2015.8.5-1.el6
If the version given is the string ``latest``, the latest available
package version will be installed à la ``pkg.latest``.
**WILDCARD VERSIONS**
As of the 2017.7.0 release, this state now supports wildcards in
package versions for SUSE SLES/Leap/Tumbleweed, Debian/Ubuntu,
RHEL/CentOS, Arch Linux, and their derivatives. Using wildcards can be
useful for packages where the release name is built into the version in
some way, such as for RHEL/CentOS which typically has version numbers
like ``1.2.34-5.el7``. An example of the usage for this would be:
.. code-block:: yaml
mypkg:
pkg.installed:
- version: '1.2.34*'
Keep in mind that using wildcard versions will result in a slower state
run since Salt must gather the available versions of the specified
packages and figure out which of them match the specified wildcard
expression.
:param bool refresh:
This parameter controls whether or not the package repo database is
updated prior to installing the requested package(s).
If ``True``, the package database will be refreshed (``apt-get
update`` or equivalent, depending on platform) before installing.
If ``False``, the package database will *not* be refreshed before
installing.
If unset, then Salt treats package database refreshes differently
depending on whether or not a ``pkg`` state has been executed already
during the current Salt run. Once a refresh has been performed in a
``pkg`` state, for the remainder of that Salt run no other refreshes
will be performed for ``pkg`` states which do not explicitly set
``refresh`` to ``True``. This prevents needless additional refreshes
from slowing down the Salt run.
:param str cache_valid_time:
.. versionadded:: 2016.11.0
This parameter sets the value in seconds after which the cache is
marked as invalid, and a cache update is necessary. This overwrites
the ``refresh`` parameter's default behavior.
Example:
.. code-block:: yaml
httpd:
pkg.installed:
- fromrepo: mycustomrepo
- skip_verify: True
- skip_suggestions: True
- version: 2.0.6~ubuntu3
- refresh: True
- cache_valid_time: 300
- allow_updates: True
- hold: False
In this case, a refresh will not take place for 5 minutes since the last
``apt-get update`` was executed on the system.
.. note::
This parameter is available only on Debian based distributions and
has no effect on the rest.
:param str fromrepo:
Specify a repository from which to install
.. note::
Distros which use APT (Debian, Ubuntu, etc.) do not have a concept
of repositories, in the same way as YUM-based distros do. When a
source is added, it is assigned to a given release. Consider the
following source configuration:
.. code-block:: text
deb http://ppa.launchpad.net/saltstack/salt/ubuntu precise main
The packages provided by this source would be made available via
the ``precise`` release, therefore ``fromrepo`` would need to be
set to ``precise`` for Salt to install the package from this
source.
Having multiple sources in the same release may result in the
default install candidate being newer than what is desired. If this
is the case, the desired version must be specified using the
``version`` parameter.
If the ``pkgs`` parameter is being used to install multiple
packages in the same state, then instead of using ``version``,
use the method of version specification described in the **Multiple
Package Installation Options** section below.
Running the shell command ``apt-cache policy pkgname`` on a minion
can help elucidate the APT configuration and aid in properly
configuring states:
.. code-block:: bash
root@saltmaster:~# salt ubuntu01 cmd.run 'apt-cache policy ffmpeg'
ubuntu01:
ffmpeg:
Installed: (none)
Candidate: 7:0.10.11-1~precise1
Version table:
7:0.10.11-1~precise1 0
500 http://ppa.launchpad.net/jon-severinsson/ffmpeg/ubuntu/ precise/main amd64 Packages
4:0.8.10-0ubuntu0.12.04.1 0
500 http://us.archive.ubuntu.com/ubuntu/ precise-updates/main amd64 Packages
500 http://security.ubuntu.com/ubuntu/ precise-security/main amd64 Packages
4:0.8.1-0ubuntu1 0
500 http://us.archive.ubuntu.com/ubuntu/ precise/main amd64 Packages
The release is located directly after the source's URL. The actual
release name is the part before the slash, so to install version
**4:0.8.10-0ubuntu0.12.04.1** either ``precise-updates`` or
``precise-security`` could be used for the ``fromrepo`` value.
:param bool skip_verify:
Skip the GPG verification check for the package to be installed
:param bool skip_suggestions:
Force strict package naming. Disables lookup of package alternatives.
.. versionadded:: 2014.1.1
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
:param bool allow_updates:
Allow the package to be updated outside Salt's control (e.g. auto
updates on Windows). This means a package on the Minion can have a
newer version than the latest available in the repository without
enforcing a re-installation of the package.
.. versionadded:: 2014.7.0
Example:
.. code-block:: yaml
httpd:
pkg.installed:
- fromrepo: mycustomrepo
- skip_verify: True
- skip_suggestions: True
- version: 2.0.6~ubuntu3
- refresh: True
- allow_updates: True
- hold: False
:param bool pkg_verify:
.. versionadded:: 2014.7.0
For requested packages that are already installed and would not be
targeted for upgrade or downgrade, use pkg.verify to determine if any
of the files installed by the package have been altered. If files have
been altered, the reinstall option of pkg.install is used to force a
reinstall. Types to ignore can be passed to pkg.verify. Additionally,
``verify_options`` can be used to modify further the behavior of
pkg.verify. See examples below. Currently, this option is supported
for the following pkg providers: :mod:`yumpkg <salt.modules.yumpkg>`.
Examples:
.. code-block:: yaml
httpd:
pkg.installed:
- version: 2.2.15-30.el6.centos
- pkg_verify: True
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
- pkg_verify:
- ignore_types:
- config
- doc
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
- pkg_verify:
- ignore_types:
- config
- doc
- verify_options:
- nodeps
- nofiledigest
:param list ignore_types:
List of types to ignore when verifying the package
.. versionadded:: 2014.7.0
:param list verify_options:
List of additional options to pass when verifying the package. These
options will be added to the ``rpm -V`` command, prepended with ``--``
(for example, when ``nodeps`` is passed in this option, ``rpm -V`` will
be run with ``--nodeps``).
.. versionadded:: 2016.11.0
:param bool normalize:
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2014.7.0
Example:
.. code-block:: yaml
gpfs.gplbin-2.6.32-279.31.1.el6.x86_64:
pkg.installed:
- normalize: False
:param bool ignore_epoch:
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
|
**MULTIPLE PACKAGE INSTALLATION OPTIONS: (not supported in pkgng)**
:param list pkgs:
A list of packages to install from a software repository. All packages
listed under ``pkgs`` will be installed via a single command.
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar
- baz
- hold: True
``NOTE:`` For :mod:`apt <salt.modules.aptpkg>`,
:mod:`ebuild <salt.modules.ebuild>`,
:mod:`pacman <salt.modules.pacman>`,
:mod:`winrepo <salt.modules.win_pkg>`,
:mod:`yumpkg <salt.modules.yumpkg>`, and
:mod:`zypper <salt.modules.zypper>`,
version numbers can be specified
in the ``pkgs`` argument. For example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: 1.2.3-4
- baz
Additionally, :mod:`ebuild <salt.modules.ebuild>`, :mod:`pacman
<salt.modules.pacman>`, :mod:`zypper <salt.modules.zypper>`,
:mod:`yum/dnf <salt.modules.yumpkg>`, and :mod:`apt
<salt.modules.aptpkg>` support the ``<``, ``<=``, ``>=``, and ``>``
operators for more control over what versions will be installed. For
example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo
- bar: '>=1.2.3-4'
- baz
``NOTE:`` When using comparison operators, the expression must be enclosed
in quotes to avoid a YAML render error.
With :mod:`ebuild <salt.modules.ebuild>` is also possible to specify a
use flag list and/or if the given packages should be in
package.accept_keywords file and/or the overlay from which you want the
package to be installed. For example:
.. code-block:: yaml
mypkgs:
pkg.installed:
- pkgs:
- foo: '~'
- bar: '~>=1.2:slot::overlay[use,-otheruse]'
- baz
:param list sources:
A list of packages to install, along with the source URI or local path
from which to install each package. In the example below, ``foo``,
``bar``, ``baz``, etc. refer to the name of the package, as it would
appear in the output of the ``pkg.version`` or ``pkg.list_pkgs`` salt
CLI commands.
.. code-block:: yaml
mypkgs:
pkg.installed:
- sources:
- foo: salt://rpms/foo.rpm
- bar: http://somesite.org/bar.rpm
- baz: ftp://someothersite.org/baz.rpm
- qux: /minion/path/to/qux.rpm
**PLATFORM-SPECIFIC ARGUMENTS**
These are specific to each OS. If it does not apply to the execution
module for your OS, it is ignored.
:param bool hold:
Force the package to be held at the current installed version.
Supported on YUM/DNF & APT based systems.
.. versionadded:: 2014.7.0
Supported on Zypper-based systems.
.. versionadded:: 3003
:param bool update_holds:
If ``True``, and this function would update the package version, any
packages which are being held will be temporarily unheld so that they
can be updated. Otherwise, if this function attempts to update a held
package, the held package(s) will be skipped and the state will fail.
By default, this parameter is set to ``False``.
Supported on YUM/DNF & APT based systems.
.. versionadded:: 2016.11.0
Supported on Zypper-based systems.
.. versionadded:: 3003
:param list names:
A list of packages to install from a software repository. Each package
will be installed individually by the package manager.
.. warning::
Unlike ``pkgs``, the ``names`` parameter cannot specify a version.
In addition, it makes a separate call to the package management
frontend to install each package, whereas ``pkgs`` makes just a
single call. It is therefore recommended to use ``pkgs`` instead of
``names`` to install multiple packages, both for the additional
features and the performance improvement that it brings.
:param bool install_recommends:
Whether to install the packages marked as recommended. Default is
``True``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.installed:
- install_recommends: False
:param bool only_upgrade:
Only upgrade the packages, if they are already installed. Default is
``False``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.installed:
- only_upgrade: True
.. note::
If this parameter is set to True and the package is not already
installed, the state will fail.
:param bool report_reboot_exit_codes:
If the installer exits with a recognized exit code indicating that
a reboot is required, the module function
*win_system.set_reboot_required_witnessed*
will be called, preserving the knowledge of this event
for the remainder of the current boot session. For the time being,
``3010`` is the only recognized exit code,
but this is subject to future refinement.
The value of this param
defaults to ``True``. This parameter has no effect
on non-Windows systems.
.. versionadded:: 2016.11.0
.. code-block:: yaml
ms vcpp installed:
pkg.installed:
- name: ms-vcpp
- version: 10.0.40219
- report_reboot_exit_codes: False
:return:
A dictionary containing the state of the software installation
:rtype dict:
.. note::
The ``pkg.installed`` state supports the usage of ``reload_modules``.
This functionality allows you to force Salt to reload all modules. In
many cases, Salt is clever enough to transparently reload the modules.
For example, if you install a package, Salt reloads modules because some
other module or state might require the package which was installed.
However, there are some edge cases where this may not be the case, which
is what ``reload_modules`` is meant to resolve.
You should only use ``reload_modules`` if your ``pkg.installed`` does some
sort of installation where if you do not reload the modules future items
in your state which rely on the software being installed will fail. Please
see the :ref:`Reloading Modules <reloading-modules>` documentation for more
information.
.. seealso:: unless and onlyif
If running pkg commands together with :ref:`aggregate <mod-aggregate-state>`
isn't an option, you can use the :ref:`creates <creates-requisite>`,
:ref:`unless <unless-requisite>`, or :ref:`onlyif <onlyif-requisite>`
syntax to skip a full package run. This can be helpful in large environments
with multiple states that include requisites for packages to be installed.
.. code-block:: yaml
# Using creates for a simple single-factor check
install_nginx:
pkg.installed:
- name: nginx
- creates:
- /etc/nginx/nginx.conf
.. code-block:: yaml
# Using file.file_exists for a single-factor check
install_nginx:
pkg.installed:
- name: nginx
- unless:
- fun: file.file_exists
args:
- /etc/nginx/nginx.conf
# Using unless with a shell test
install_nginx:
pkg.installed:
- name: nginx
- unless: test -f /etc/nginx/nginx.conf
.. code-block:: yaml
# Using file.search for a two-factor check
install_nginx:
pkg.installed:
- name: nginx
- unless:
- fun: file.search
args:
- /etc/nginx/nginx.conf
- 'user www-data;'
The above examples use different methods to reasonably ensure
that a package has already been installed. First, with checking for a
file that would be created with the package. Second, by checking for
specific text within a file that would be created or managed by salt.
With these requisists satisfied, creates/unless will return ``True`` and the
``pkg.installed`` state will be skipped.
.. code-block:: bash
# Example of state run without unless used
salt 'saltdev' state.apply nginx
saltdev:
----------
ID: install_nginx
Function: pkg.installed
Name: nginx
Result: True
Comment: All specified packages are already installed
Started: 20:11:56.388331
Duration: 4290.0 ms
Changes:
# Example of state run using unless requisite
salt 'saltdev' state.apply nginx
saltdev:
----------
ID: install_nginx
Function: pkg.installed
Name: nginx
Result: True
Comment: unless condition is true
Started: 20:10:50.659215
Duration: 1530.0 ms
Changes:
The result is a reduction of almost 3 seconds. In larger environments,
small reductions in waiting time can add up.
:ref:`Unless Requisite <unless-requisite>`
"""
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
# If just a name (and optionally a version) is passed, just pack them into
# the pkgs argument.
if name and not any((pkgs, sources)):
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
kwargs["saltenv"] = __env__
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
# check if capabilities should be checked and modify the requested packages
# accordingly.
if pkgs:
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
if not isinstance(pkg_verify, list):
pkg_verify = pkg_verify is True
if (pkg_verify or isinstance(pkg_verify, list)) and "pkg.verify" not in __salt__:
return {
"name": name,
"changes": {},
"result": False,
"comment": "pkg.verify not implemented",
}
if not isinstance(version, str) and version is not None:
version = str(version)
kwargs["allow_updates"] = allow_updates
result = _find_install_targets(
name,
version,
pkgs,
sources,
fromrepo=fromrepo,
skip_suggestions=skip_suggestions,
pkg_verify=pkg_verify,
normalize=normalize,
ignore_epoch=ignore_epoch,
reinstall=reinstall,
refresh=refresh,
**kwargs
)
try:
(
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
) = result
if was_refreshed:
refresh = False
except ValueError:
# _find_install_targets() found no targets or encountered an error
# check that the hold function is available
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=pkgs, sources=sources)
except (CommandExecutionError, SaltInvocationError) as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": str(exc),
}
if "result" in hold_ret and not hold_ret["result"]:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
for i in modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
result["changes"][i["name"]] = i["changes"]
for i in not_modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
for i in failed_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
return result
if to_unpurge and "lowpkg.unpurge" not in __salt__:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "lowpkg.unpurge not implemented",
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
# Remove any targets not returned by _find_install_targets
if pkgs:
pkgs = [dict([(x, y)]) for x, y in targets.items()]
pkgs.extend([dict([(x, y)]) for x, y in to_reinstall.items()])
elif sources:
oldsources = sources
sources = [x for x in oldsources if next(iter(list(x.keys()))) in targets]
sources.extend(
[x for x in oldsources if next(iter(list(x.keys()))) in to_reinstall]
)
comment = []
changes = {"installed": {}}
if __opts__["test"]:
if targets:
if sources:
_targets = targets
else:
_targets = [_get_desired_pkg(x, targets) for x in targets]
summary = ", ".join(targets)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in targets}
)
comment.append(
"The following packages would be installed/updated: {}".format(summary)
)
if to_unpurge:
comment.append(
"The following packages would have their selection status "
"changed from 'purge' to 'install': {}".format(", ".join(to_unpurge))
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in to_unpurge}
)
if to_reinstall:
# Add a comment for each package in to_reinstall with its
# pkg.verify output
if reinstall:
reinstall_targets = []
for reinstall_pkg in to_reinstall:
if sources:
reinstall_targets.append(reinstall_pkg)
else:
reinstall_targets.append(
_get_desired_pkg(reinstall_pkg, to_reinstall)
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in reinstall_targets}
)
msg = "The following packages would be reinstalled: "
msg += ", ".join(reinstall_targets)
comment.append(msg)
else:
for reinstall_pkg in to_reinstall:
if sources:
pkgstr = reinstall_pkg
else:
pkgstr = _get_desired_pkg(reinstall_pkg, to_reinstall)
comment.append(
"Package '{}' would be reinstalled because the "
"following files have been altered:".format(pkgstr)
)
changes["installed"].update({reinstall_pkg: {}})
comment.append(_nested_output(altered_files[reinstall_pkg]))
ret = {
"name": name,
"changes": changes,
"result": None,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
modified_hold = None
not_modified_hold = None
failed_hold = None
if targets or to_reinstall:
try:
pkg_ret = __salt__["pkg.install"](
name=None,
refresh=refresh,
version=version,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=pkgs,
sources=sources,
reinstall=bool(to_reinstall),
normalize=normalize,
update_holds=update_holds,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while installing package(s): {}".format(
exc
)
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if refresh:
refresh = False
if isinstance(pkg_ret, dict):
changes["installed"].update(pkg_ret)
elif isinstance(pkg_ret, str):
comment.append(pkg_ret)
# Code below will be looking for a dictionary. If this is a string
# it means that there was an exception raised and that no packages
# changed, so now that we have added this error to the comments we
# set this to an empty dictionary so that the code below which
# checks reinstall targets works.
pkg_ret = {}
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=desired)
except (CommandExecutionError, SaltInvocationError) as exc:
comment.append(str(exc))
ret = {
"name": name,
"changes": changes,
"result": False,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
if "result" in hold_ret and not hold_ret["result"]:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
if to_unpurge:
changes["purge_desired"] = __salt__["lowpkg.unpurge"](*to_unpurge)
# Analyze pkg.install results for packages in targets
if sources:
modified = [x for x in changes["installed"] if x in targets]
not_modified = [
x for x in desired if x not in targets and x not in to_reinstall
]
failed = [x for x in targets if x not in modified]
else:
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
new_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if (
kwargs.get("resolve_capabilities", False)
and "pkg.list_provides" in __salt__
):
new_caps = __salt__["pkg.list_provides"](**kwargs)
else:
new_caps = {}
_ok, failed = _verify_install(
desired, new_pkgs, ignore_epoch=ignore_epoch, new_caps=new_caps
)
modified = [x for x in _ok if x in targets]
not_modified = [x for x in _ok if x not in targets and x not in to_reinstall]
failed = [x for x in failed if x in targets]
# If there was nothing unpurged, just set the changes dict to the contents
# of changes['installed'].
if not changes.get("purge_desired"):
changes = changes["installed"]
if modified:
if sources:
summary = ", ".join(modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in modified])
if len(summary) < 20:
comment.append(
"The following packages were installed/updated: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} installed/updated.".format(
len(modified),
"s" if len(modified) > 1 else "",
"were" if len(modified) > 1 else "was",
)
)
if modified_hold:
for i in modified_hold:
change_name = i["name"]
if change_name in changes:
comment.append(i["comment"])
if len(changes[change_name]["new"]) > 0:
changes[change_name]["new"] += "\n"
changes[change_name]["new"] += "{}".format(i["changes"]["new"])
if len(changes[change_name]["old"]) > 0:
changes[change_name]["old"] += "\n"
changes[change_name]["old"] += "{}".format(i["changes"]["old"])
else:
comment.append(i["comment"])
changes[change_name] = {}
changes[change_name]["new"] = "{}".format(i["changes"]["new"])
# Any requested packages that were not targeted for install or reinstall
if not_modified:
if sources:
summary = ", ".join(not_modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in not_modified])
if len(not_modified) <= 20:
comment.append(
"The following packages were already installed: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} already installed".format(
len(not_modified),
"s" if len(not_modified) > 1 else "",
"were" if len(not_modified) > 1 else "was",
)
)
if not_modified_hold:
for i in not_modified_hold:
comment.append(i["comment"])
result = True
if failed:
if sources:
summary = ", ".join(failed)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in failed])
comment.insert(
0, "The following packages failed to install/update: {}".format(summary)
)
result = False
if failed_hold:
for i in failed_hold:
comment.append(i["comment"])
result = False
# Get the ignore_types list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
# Get the verify_options list if any from the pkg_verify argument
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
# Rerun pkg.verify for packages in to_reinstall to determine failed
modified = []
failed = []
for reinstall_pkg in to_reinstall:
if reinstall:
if reinstall_pkg in pkg_ret:
modified.append(reinstall_pkg)
else:
failed.append(reinstall_pkg)
elif pkg_verify:
# No need to wrap this in a try/except because we would already
# have caught invalid arguments earlier.
verify_result = __salt__["pkg.verify"](
reinstall_pkg,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
if verify_result:
failed.append(reinstall_pkg)
altered_files[reinstall_pkg] = verify_result
else:
modified.append(reinstall_pkg)
if modified:
# Add a comment for each package in modified with its pkg.verify output
for modified_pkg in modified:
if sources:
pkgstr = modified_pkg
else:
pkgstr = _get_desired_pkg(modified_pkg, desired)
msg = "Package {} was reinstalled.".format(pkgstr)
if modified_pkg in altered_files:
msg += " The following files were remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[modified_pkg]))
else:
comment.append(msg)
if failed:
# Add a comment for each package in failed with its pkg.verify output
for failed_pkg in failed:
if sources:
pkgstr = failed_pkg
else:
pkgstr = _get_desired_pkg(failed_pkg, desired)
msg = "Reinstall was not successful for package {}.".format(pkgstr)
if failed_pkg in altered_files:
msg += " The following files could not be remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[failed_pkg]))
else:
comment.append(msg)
result = False
ret = {
"name": name,
"changes": changes,
"result": result,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
def downloaded(
name, version=None, pkgs=None, fromrepo=None, ignore_epoch=None, **kwargs
):
"""
.. versionadded:: 2017.7.0
Ensure that the package is downloaded, and that it is the correct version
(if specified).
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to download the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.downloaded:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>`, :mod:`zypper <salt.modules.zypper>` and :mod:`zypper <salt.modules.aptpkg>`
:param str name:
The name of the package to be downloaded. This parameter is ignored if
either "pkgs" is used. Additionally, please note that this option can
only be used to download packages from a software repository.
:param str version:
Download a specific version of a package.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon must have the epoch included
when specifying the version number. For example:
.. code-block:: yaml
vim-enhanced:
pkg.downloaded:
- version: 2:7.4.160-1.el7
An **ignore_epoch** argument has been added to which causes the
epoch to be disregarded when the state checks to see if the desired
version was installed.
You can install a specific version when using the ``pkgs`` argument by
including the version after the package:
.. code-block:: yaml
common_packages:
pkg.downloaded:
- pkgs:
- unzip
- dos2unix
- salt-minion: 2015.8.5-1.el6
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
CLI Example:
.. code-block:: yaml
zsh:
pkg.downloaded:
- version: 5.0.5-4.63
- fromrepo: "myrepository"
"""
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_downloaded" not in __salt__:
ret["result"] = False
ret["comment"] = "The pkg.downloaded state is not available on this platform"
return ret
if isinstance(pkgs, list) and len(pkgs) == 0:
ret["result"] = True
ret["comment"] = "No packages to download provided"
return ret
# If just a name (and optionally a version) is passed, just pack them into
# the pkgs argument.
if name and not pkgs:
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
# It doesn't make sense here to received 'downloadonly' as kwargs
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
pkgs, _refresh = _resolve_capabilities(pkgs, **kwargs)
# Only downloading not yet downloaded packages
targets = _find_download_targets(
name, version, pkgs, fromrepo=fromrepo, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, dict):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret["comment"] = "The following packages would be downloaded: {}".format(
summary
)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name,
pkgs=pkgs,
version=version,
downloadonly=True,
fromrepo=fromrepo,
ignore_epoch=ignore_epoch,
**kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
new_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
_ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch)
if failed:
summary = ", ".join([_get_desired_pkg(x, targets) for x in failed])
ret["result"] = False
ret["comment"] = "The following packages failed to download: {}".format(summary)
if not ret["changes"] and not ret["comment"]:
ret["result"] = True
ret["comment"] = "Packages downloaded: {}".format(", ".join(targets))
return ret
def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
"""
.. versionadded:: 2017.7.0
Ensure that packages related to certain advisory ids are installed.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the patch(es).
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
CLI Example:
.. code-block:: yaml
issue-foo-fixed:
pkg.patch_installed:
- advisory_ids:
- SUSE-SLE-SERVER-12-SP2-2017-185
- SUSE-SLE-SERVER-12-SP2-2017-150
- SUSE-SLE-SERVER-12-SP2-2017-120
"""
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_patches" not in __salt__:
ret["result"] = False
ret[
"comment"
] = "The pkg.patch_installed state is not available on this platform"
return ret
if isinstance(advisory_ids, list) and len(advisory_ids) == 0:
ret["result"] = True
ret["comment"] = "No advisory ids provided"
return ret
# Only downloading not yet downloaded packages
targets = _find_advisory_targets(name, advisory_ids, **kwargs)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret[
"comment"
] = "The following advisory patches would be downloaded: {}".format(summary)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name, advisory_ids=advisory_ids, downloadonly=downloadonly, **kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
if not ret["changes"] and not ret["comment"]:
status = "downloaded" if downloadonly else "installed"
ret["result"] = True
ret[
"comment"
] = "Advisory patch is not needed or related packages are already {}".format(
status
)
return ret
def patch_downloaded(name, advisory_ids=None, **kwargs):
"""
.. versionadded:: 2017.7.0
Ensure that packages related to certain advisory ids are downloaded.
Currently supported for the following pkg providers:
:mod:`yumpkg <salt.modules.yumpkg>` and :mod:`zypper <salt.modules.zypper>`
CLI Example:
.. code-block:: yaml
preparing-to-fix-issues:
pkg.patch_downloaded:
- advisory_ids:
- SUSE-SLE-SERVER-12-SP2-2017-185
- SUSE-SLE-SERVER-12-SP2-2017-150
- SUSE-SLE-SERVER-12-SP2-2017-120
"""
if "pkg.list_patches" not in __salt__:
return {
"name": name,
"result": False,
"changes": {},
"comment": (
"The pkg.patch_downloaded state is not available on this platform"
),
}
# It doesn't make sense here to received 'downloadonly' as kwargs
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
return patch_installed(
name=name, advisory_ids=advisory_ids, downloadonly=True, **kwargs
)
def latest(
name,
refresh=None,
fromrepo=None,
skip_verify=False,
pkgs=None,
watch_flags=True,
**kwargs
):
"""
Ensure that the named package is installed and the latest available
package. If the package can be updated, this state function will update
the package. Generally it is better for the
:mod:`installed <salt.states.pkg.installed>` function to be
used, as :mod:`latest <salt.states.pkg.latest>` will update the package
whenever a new package is available.
.. note::
Any argument which is either a) not explicitly defined for this state,
or b) not a global state argument like ``saltenv``, or
``reload_modules``, will be passed through to the call to
``pkg.install`` to install the package(s). For example, you can include
a ``disablerepo`` argument on platforms that use yum/dnf to disable
that repo:
.. code-block:: yaml
mypkg:
pkg.latest:
- disablerepo: base,updates
To see what is supported, check :ref:`this page <virtual-pkg>` to find
the documentation for your platform's ``pkg`` module, then look at the
documentation for the ``install`` function.
Any argument that is passed through to the ``install`` function, which
is not defined for that function, will be silently ignored.
name
The name of the package to maintain at the latest available version.
This parameter is ignored if "pkgs" is used.
fromrepo
Specify a repository from which to install
skip_verify
Skip the GPG verification check for the package to be installed
refresh
This parameter controls whether or not the package repo database is
updated prior to checking for the latest available version of the
requested packages.
If ``True``, the package database will be refreshed (``apt-get update``
or equivalent, depending on platform) before checking for the latest
available version of the requested packages.
If ``False``, the package database will *not* be refreshed before
checking.
If unset, then Salt treats package database refreshes differently
depending on whether or not a ``pkg`` state has been executed already
during the current Salt run. Once a refresh has been performed in a
``pkg`` state, for the remainder of that Salt run no other refreshes
will be performed for ``pkg`` states which do not explicitly set
``refresh`` to ``True``. This prevents needless additional refreshes
from slowing down the Salt run.
:param str cache_valid_time:
.. versionadded:: 2016.11.0
This parameter sets the value in seconds after which the cache is
marked as invalid, and a cache update is necessary. This overwrites
the ``refresh`` parameter's default behavior.
Example:
.. code-block:: yaml
httpd:
pkg.latest:
- refresh: True
- cache_valid_time: 300
In this case, a refresh will not take place for 5 minutes since the last
``apt-get update`` was executed on the system.
.. note::
This parameter is available only on Debian based distributions and
has no effect on the rest.
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
Multiple Package Installation Options:
(Not yet supported for: FreeBSD, OpenBSD, MacOS, and Solaris pkgutil)
pkgs
A list of packages to maintain at the latest available version.
.. code-block:: yaml
mypkgs:
pkg.latest:
- pkgs:
- foo
- bar
- baz
install_recommends
Whether to install the packages marked as recommended. Default is
``True``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.latest:
- install_recommends: False
only_upgrade
Only upgrade the packages, if they are already installed. Default is
``False``. Currently only works with APT-based systems.
.. versionadded:: 2015.5.0
.. code-block:: yaml
httpd:
pkg.latest:
- only_upgrade: True
.. note::
If this parameter is set to True and the package is not already
installed, the state will fail.
report_reboot_exit_codes
If the installer exits with a recognized exit code indicating that
a reboot is required, the module function
*win_system.set_reboot_required_witnessed*
will be called, preserving the knowledge of this event
for the remainder of the current boot session. For the time being,
``3010`` is the only recognized exit code, but this
is subject to future refinement. The value of this param
defaults to ``True``. This parameter has no effect on
non-Windows systems.
.. versionadded:: 2016.11.0
.. code-block:: yaml
ms vcpp installed:
pkg.latest:
- name: ms-vcpp
- report_reboot_exit_codes: False
"""
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if kwargs.get("sources"):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'The "sources" parameter is not supported.',
}
elif pkgs:
desired_pkgs = list(_repack_pkgs(pkgs).keys()) # pylint: disable=not-callable
if not desired_pkgs:
# Badly-formatted SLS
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Invalidly formatted "pkgs" parameter. See minion log.',
}
else:
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
else:
desired_pkgs = [name]
kwargs["saltenv"] = __env__
# check if capabilities should be checked and modify the requested packages
# accordingly.
desired_pkgs, refresh = _resolve_capabilities(
desired_pkgs, refresh=refresh, **kwargs
)
try:
avail = __salt__["pkg.latest_version"](
*desired_pkgs, fromrepo=fromrepo, refresh=refresh, **kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while checking the "
"newest available version of package(s): {}".format(exc)
),
}
try:
cur = __salt__["pkg.version"](*desired_pkgs, **kwargs)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
# Repack the cur/avail data if only a single package is being checked
if isinstance(cur, str):
cur = {desired_pkgs[0]: cur}
if isinstance(avail, str):
avail = {desired_pkgs[0]: avail}
targets = {}
problems = []
for pkg in desired_pkgs:
if not avail.get(pkg):
# Package either a) is up-to-date, or b) does not exist
if not cur.get(pkg):
# Package does not exist
msg = "No information found for '{}'.".format(pkg)
log.error(msg)
problems.append(msg)
elif (
watch_flags
and __grains__.get("os") == "Gentoo"
and __salt__["portage_config.is_changed_uses"](pkg)
):
# Package is up-to-date, but Gentoo USE flags are changing so
# we need to add it to the targets
targets[pkg] = cur[pkg]
else:
# Package either a) is not installed, or b) is installed and has an
# upgrade available
targets[pkg] = avail[pkg]
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if targets:
# Find up-to-date packages
if not pkgs:
# There couldn't have been any up-to-date packages if this state
# only targeted a single package and is being allowed to proceed to
# the install step.
up_to_date = []
else:
up_to_date = [x for x in pkgs if x not in targets]
if __opts__["test"]:
comments = []
comments.append(
"The following packages would be installed/upgraded: "
+ ", ".join(sorted(targets))
)
if up_to_date:
up_to_date_count = len(up_to_date)
if up_to_date_count <= 10:
comments.append(
"The following packages are already up-to-date: "
+ ", ".join(
["{} ({})".format(x, cur[x]) for x in sorted(up_to_date)]
)
)
else:
comments.append(
"{} packages are already up-to-date".format(up_to_date_count)
)
return {
"name": name,
"changes": {},
"result": None,
"comment": "\n".join(comments),
}
if salt.utils.platform.is_windows():
# pkg.install execution module on windows ensures the software
# package is installed when no version is specified, it does not
# upgrade the software to the latest. This is per the design.
# Build updated list of pkgs *with verion number*, exclude
# non-targeted ones
targeted_pkgs = [{x: targets[x]} for x in targets]
else:
# Build updated list of pkgs to exclude non-targeted ones
targeted_pkgs = list(targets)
# No need to refresh, if a refresh was necessary it would have been
# performed above when pkg.latest_version was run.
try:
changes = __salt__["pkg.install"](
name=None,
refresh=False,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=targeted_pkgs,
**kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while installing package(s): {}".format(
exc
)
),
}
if changes:
# Find failed and successful updates
failed = [
x
for x in targets
if not changes.get(x)
or changes[x].get("new") != targets[x]
and targets[x] != "latest"
]
successful = [x for x in targets if x not in failed]
comments = []
if failed:
msg = "The following packages failed to update: {}".format(
", ".join(sorted(failed))
)
comments.append(msg)
if successful:
msg = (
"The following packages were successfully "
"installed/upgraded: "
"{}".format(", ".join(sorted(successful)))
)
comments.append(msg)
if up_to_date:
if len(up_to_date) <= 10:
msg = "The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
else:
msg = "{} packages were already up-to-date ".format(len(up_to_date))
comments.append(msg)
return {
"name": name,
"changes": changes,
"result": False if failed else True,
"comment": " ".join(comments),
}
else:
if len(targets) > 10:
comment = (
"{} targeted packages failed to update. "
"See debug log for details.".format(len(targets))
)
elif len(targets) > 1:
comment = (
"The following targeted packages failed to update. "
"See debug log for details: ({}).".format(
", ".join(sorted(targets))
)
)
else:
comment = "Package {} failed to update.".format(
next(iter(list(targets.keys())))
)
if up_to_date:
if len(up_to_date) <= 10:
comment += (
" The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
)
else:
comment += "{} packages were already up-to-date".format(
len(up_to_date)
)
return {
"name": name,
"changes": changes,
"result": False,
"comment": comment,
}
else:
if len(desired_pkgs) > 10:
comment = "All {} packages are up-to-date.".format(len(desired_pkgs))
elif len(desired_pkgs) > 1:
comment = "All packages are up-to-date ({}).".format(
", ".join(sorted(desired_pkgs))
)
else:
comment = "Package {} is already up-to-date".format(desired_pkgs[0])
return {"name": name, "changes": {}, "result": True, "comment": comment}
def _uninstall(
action="remove",
name=None,
version=None,
pkgs=None,
normalize=True,
ignore_epoch=None,
**kwargs
):
"""
Common function for package removal
"""
if action not in ("remove", "purge"):
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalid action '{}'. This is probably a bug.".format(action),
}
try:
pkg_params = __salt__["pkg_resource.parse_targets"](
name, pkgs, normalize=normalize
)[0]
except MinionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while parsing targets: {}".format(exc),
}
targets = _find_remove_targets(
name, version, pkgs, normalize, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while checking targets: {}".format(
targets
),
}
if action == "purge":
old_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
targets.extend([x for x in pkg_params if x in old_removed])
targets.sort()
if not targets:
return {
"name": name,
"changes": {},
"result": True,
"comment": "None of the targeted packages are installed{}".format(
" or partially installed" if action == "purge" else ""
),
}
if __opts__["test"]:
_changes = {}
_changes.update({x: {"new": "{}d".format(action), "old": ""} for x in targets})
return {
"name": name,
"changes": _changes,
"result": None,
"comment": "The following packages will be {}d: {}.".format(
action, ", ".join(targets)
),
}
changes = __salt__["pkg.{}".format(action)](
name, pkgs=pkgs, version=version, **kwargs
)
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
failed = []
for param in pkg_params:
if __grains__["os_family"] in ["Suse", "RedHat"]:
# Check if the package version set to be removed is actually removed:
if param in new and not pkg_params[param]:
failed.append(param)
elif param in new and pkg_params[param] in new[param]:
failed.append(param + "-" + pkg_params[param])
elif param in new:
failed.append(param)
if action == "purge":
new_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
failed.extend([x for x in pkg_params if x in new_removed])
failed.sort()
if failed:
return {
"name": name,
"changes": changes,
"result": False,
"comment": "The following packages failed to {}: {}.".format(
action, ", ".join(failed)
),
}
comments = []
not_installed = sorted([x for x in pkg_params if x not in targets])
if not_installed:
comments.append(
"The following packages were not installed: {}".format(
", ".join(not_installed)
)
)
comments.append(
"The following packages were {}d: {}.".format(action, ", ".join(targets))
)
else:
comments.append("All targeted packages were {}d.".format(action))
return {
"name": name,
"changes": changes,
"result": True,
"comment": " ".join(comments),
}
def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
"""
Verify that a package is not installed, calling ``pkg.remove`` if necessary
to remove the package.
name
The name of the package to be removed.
version
The version of the package that should be removed. Don't do anything if
the package is installed with an unmatching version.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the example above)
must have the epoch included when specifying the version number.
For example:
.. code-block:: yaml
vim-enhanced:
pkg.removed:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed. If **ignore_epoch** was not set
to ``True``, and instead of ``2:7.4.160-1.el7`` a version of
``7.4.160-1.el7`` were used, this state would report success since
the actual installed version includes the epoch, and the specified
version would not match.
normalize : True
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2015.8.0
ignore_epoch : None
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
Multiple Package Options:
pkgs
A list of packages to remove. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed. It accepts
version numbers as well.
.. versionadded:: 0.16.0
"""
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="remove",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while removing package(s): {}".format(exc)
return ret
def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
"""
Verify that a package is not installed, calling ``pkg.purge`` if necessary
to purge the package. All configuration files are also removed.
name
The name of the package to be purged.
version
The version of the package that should be removed. Don't do anything if
the package is installed with an unmatching version.
.. important::
As of version 2015.8.7, for distros which use yum/dnf, packages
which have a version with a nonzero epoch (that is, versions which
start with a number followed by a colon like in the example above)
must have the epoch included when specifying the version number.
For example:
.. code-block:: yaml
vim-enhanced:
pkg.purged:
- version: 2:7.4.160-1.el7
In version 2015.8.9, an **ignore_epoch** argument has been added to
:py:mod:`pkg.installed <salt.states.pkg.installed>`,
:py:mod:`pkg.removed <salt.states.pkg.removed>`, and
:py:mod:`pkg.purged <salt.states.pkg.purged>` states, which
causes the epoch to be disregarded when the state checks to see if
the desired version was installed. If **ignore_epoch** was not set
to ``True``, and instead of ``2:7.4.160-1.el7`` a version of
``7.4.160-1.el7`` were used, this state would report success since
the actual installed version includes the epoch, and the specified
version would not match.
normalize : True
Normalize the package name by removing the architecture, if the
architecture of the package is different from the architecture of the
operating system. The ability to disable this behavior is useful for
poorly-created packages which include the architecture as an actual
part of the name, such as kernel modules which match a specific kernel
version.
.. versionadded:: 2015.8.0
ignore_epoch : None
If this option is not explicitly set, and there is no epoch in the
desired package version, the epoch will be implicitly ignored. Set this
argument to ``True`` to explicitly ignore the epoch, and ``False`` to
strictly enforce it.
.. versionadded:: 2015.8.9
.. versionchanged:: 3001
In prior releases, the default behavior was to strictly enforce
epochs unless this argument was set to ``True``.
Multiple Package Options:
pkgs
A list of packages to purge. Must be passed as a python list. The
``name`` parameter will be ignored if this option is passed. It accepts
version numbers as well.
.. versionadded:: 0.16.0
"""
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="purge",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while purging package(s): {}".format(exc)
return ret
def uptodate(name, refresh=False, pkgs=None, **kwargs):
"""
.. versionadded:: 2014.7.0
.. versionchanged:: 2018.3.0
Added support for the ``pkgin`` provider.
Verify that the system is completely up to date.
name
The name has no functional value and is only used as a tracking
reference
refresh
refresh the package database before checking for new upgrades
pkgs
list of packages to upgrade
:param str cache_valid_time:
This parameter sets the value in seconds after which cache marked as invalid,
and cache update is necessary. This overwrite ``refresh`` parameter
default behavior.
In this case cache_valid_time is set, refresh will not take place for
amount in seconds since last ``apt-get update`` executed on the system.
.. note::
This parameter available only on Debian based distributions, and
have no effect on the rest.
:param bool resolve_capabilities:
Turn on resolving capabilities. This allow one to name "provides" or alias names for packages.
.. versionadded:: 2018.3.0
kwargs
Any keyword arguments to pass through to ``pkg.upgrade``.
.. versionadded:: 2015.5.0
"""
ret = {"name": name, "changes": {}, "result": False, "comment": "Failed to update"}
if "pkg.list_upgrades" not in __salt__:
ret["comment"] = "State pkg.uptodate is not available"
return ret
# emerge --update doesn't appear to support repo notation
if "fromrepo" in kwargs and __grains__["os"] == "Gentoo":
ret["comment"] = "'fromrepo' argument not supported on this platform"
return ret
if isinstance(refresh, bool):
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
try:
packages = __salt__["pkg.list_upgrades"](refresh=refresh, **kwargs)
expected = {
pkgname: {
"new": pkgver,
"old": __salt__["pkg.version"](pkgname, **kwargs),
}
for pkgname, pkgver in packages.items()
}
if isinstance(pkgs, list):
packages = [pkg for pkg in packages if pkg in pkgs]
expected = {
pkgname: pkgver
for pkgname, pkgver in expected.items()
if pkgname in pkgs
}
except Exception as exc: # pylint: disable=broad-except
ret["comment"] = str(exc)
return ret
else:
ret["comment"] = "refresh must be either True or False"
return ret
if not packages:
ret["comment"] = "System is already up-to-date"
ret["result"] = True
return ret
elif __opts__["test"]:
ret["comment"] = "System update will be performed"
ret["changes"] = expected
ret["result"] = None
return ret
try:
ret["changes"] = __salt__["pkg.upgrade"](refresh=refresh, pkgs=pkgs, **kwargs)
except CommandExecutionError as exc:
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while updating packages: {}".format(exc)
return ret
# If a package list was provided, ensure those packages were updated
missing = []
if isinstance(pkgs, list):
missing = [pkg for pkg in expected.keys() if pkg not in ret["changes"]]
if missing:
ret["comment"] = "The following package(s) failed to update: {}".format(
", ".join(missing)
)
ret["result"] = False
else:
ret["comment"] = "Upgrade ran successfully"
ret["result"] = True
return ret
def group_installed(name, skip=None, include=None, **kwargs):
"""
.. versionadded:: 2015.8.0
.. versionchanged:: 2016.11.0
Added support in :mod:`pacman <salt.modules.pacman>`
Ensure that an entire package group is installed. This state is currently
only supported for the :mod:`yum <salt.modules.yumpkg>` and :mod:`pacman <salt.modules.pacman>`
package managers.
skip
Packages that would normally be installed by the package group
("default" packages), which should not be installed.
.. code-block:: yaml
Load Balancer:
pkg.group_installed:
- skip:
- piranha
include
Packages which are included in a group, which would not normally be
installed by a ``yum groupinstall`` ("optional" packages). Note that
this will not enforce group membership; if you include packages which
are not members of the specified groups, they will still be installed.
.. code-block:: yaml
Load Balancer:
pkg.group_installed:
- include:
- haproxy
.. versionchanged:: 2016.3.0
This option can no longer be passed as a comma-separated list, it
must now be passed as a list (as shown in the above example).
.. note::
Because this is essentially a wrapper around :py:func:`pkg.install
<salt.modules.yumpkg.install>`, any argument which can be passed to
pkg.install may also be included here, and it will be passed on to the
call to :py:func:`pkg.install <salt.modules.yumpkg.install>`.
"""
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if "pkg.group_diff" not in __salt__:
ret["comment"] = "pkg.group_install not available for this platform"
return ret
if skip is None:
skip = []
else:
if not isinstance(skip, list):
ret["comment"] = "skip must be formatted as a list"
return ret
for idx, item in enumerate(skip):
if not isinstance(item, str):
skip[idx] = str(item)
if include is None:
include = []
else:
if not isinstance(include, list):
ret["comment"] = "include must be formatted as a list"
return ret
for idx, item in enumerate(include):
if not isinstance(item, str):
include[idx] = str(item)
try:
diff = __salt__["pkg.group_diff"](name)
except CommandExecutionError as err:
ret[
"comment"
] = "An error was encountered while installing/updating group '{}': {}.".format(
name, err
)
return ret
mandatory = diff["mandatory"]["installed"] + diff["mandatory"]["not installed"]
invalid_skip = [x for x in mandatory if x in skip]
if invalid_skip:
ret[
"comment"
] = "The following mandatory packages cannot be skipped: {}".format(
", ".join(invalid_skip)
)
return ret
targets = diff["mandatory"]["not installed"]
targets.extend([x for x in diff["default"]["not installed"] if x not in skip])
targets.extend(include)
if not targets:
ret["result"] = True
ret["comment"] = "Group '{}' is already installed".format(name)
return ret
partially_installed = (
diff["mandatory"]["installed"]
or diff["default"]["installed"]
or diff["optional"]["installed"]
)
if __opts__["test"]:
ret["result"] = None
if partially_installed:
ret[
"comment"
] = "Group '{}' is partially installed and will be updated".format(name)
else:
ret["comment"] = "Group '{}' will be installed".format(name)
return ret
try:
ret["changes"] = __salt__["pkg.install"](pkgs=targets, **kwargs)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while "
"installing/updating group '{}': {}".format(name, exc)
)
return ret
failed = [x for x in targets if x not in __salt__["pkg.list_pkgs"](**kwargs)]
if failed:
ret["comment"] = "Failed to install the following packages: {}".format(
", ".join(failed)
)
return ret
ret["result"] = True
ret["comment"] = "Group '{}' was {}".format(
name, "updated" if partially_installed else "installed"
)
return ret
def mod_init(low):
"""
Set a flag to tell the install functions to refresh the package database.
This ensures that the package database is refreshed only once during
a state run significantly improving the speed of package management
during a state run.
It sets a flag for a number of reasons, primarily due to timeline logic.
When originally setting up the mod_init for pkg a number of corner cases
arose with different package managers and how they refresh package data.
It also runs the "ex_mod_init" from the package manager module that is
currently loaded. The "ex_mod_init" is expected to work as a normal
"mod_init" function.
.. seealso::
:py:func:`salt.modules.ebuild.ex_mod_init`
"""
ret = True
if "pkg.ex_mod_init" in __salt__:
ret = __salt__["pkg.ex_mod_init"](low)
if low["fun"] == "installed" or low["fun"] == "latest":
salt.utils.pkg.write_rtag(__opts__)
return ret
return False
def mod_aggregate(low, chunks, running):
"""
The mod_aggregate function which looks up all packages in the available
low chunks and merges them into a single pkgs ref in the present low data
"""
pkgs = []
pkg_type = None
agg_enabled = [
"installed",
"latest",
"removed",
"purged",
]
if low.get("fun") not in agg_enabled:
return low
for chunk in chunks:
tag = __utils__["state.gen_tag"](chunk)
if tag in running:
# Already ran the pkg state, skip aggregation
continue
if chunk.get("state") == "pkg":
if "__agg__" in chunk:
continue
# Check for the same function
if chunk.get("fun") != low.get("fun"):
continue
# Check for the same repo
if chunk.get("fromrepo") != low.get("fromrepo"):
continue
# Check first if 'sources' was passed so we don't aggregate pkgs
# and sources together.
if "sources" in chunk:
if pkg_type is None:
pkg_type = "sources"
if pkg_type == "sources":
pkgs.extend(chunk["sources"])
chunk["__agg__"] = True
else:
# If hold exists in the chunk, do not add to aggregation
# otherwise all packages will be held or unheld.
# setting a package to be held/unheld is not as
# time consuming as installing/uninstalling.
if "hold" not in chunk:
if pkg_type is None:
pkg_type = "pkgs"
if pkg_type == "pkgs":
# Pull out the pkg names!
if "pkgs" in chunk:
pkgs.extend(chunk["pkgs"])
chunk["__agg__"] = True
elif "name" in chunk:
version = chunk.pop("version", None)
if version is not None:
pkgs.append({chunk["name"]: version})
else:
pkgs.append(chunk["name"])
chunk["__agg__"] = True
if pkg_type is not None and pkgs:
if pkg_type in low:
low[pkg_type].extend(pkgs)
else:
low[pkg_type] = pkgs
return low
def mod_watch(name, **kwargs):
"""
Install/reinstall a package based on a watch requisite
.. note::
This state exists to support special handling of the ``watch``
:ref:`requisite <requisites>`. It should not be called directly.
Parameters for this function should be set by the state being triggered.
"""
sfun = kwargs.pop("sfun", None)
mapfun = {
"purged": purged,
"latest": latest,
"removed": removed,
"installed": installed,
}
if sfun in mapfun:
return mapfun[sfun](name, **kwargs)
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the watch requisite".format(sfun),
"result": False,
}
def mod_beacon(name, **kwargs):
"""
Create a beacon to monitor a package or packages
based on a beacon state argument.
.. note::
This state exists to support special handling of the ``beacon``
state argument for supported state functions. It should not be called directly.
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
sfun = kwargs.pop("sfun", None)
supported_funcs = ["installed", "removed"]
if sfun in supported_funcs:
if kwargs.get("beacon"):
beacon_module = "pkg"
beacon_name = "beacon_{}_{}".format(beacon_module, name)
beacon_kwargs = {
"name": beacon_name,
"pkgs": kwargs.get("pkgs", [name]),
"interval": 60,
"beacon_module": beacon_module,
}
ret = __states__["beacon.present"](**beacon_kwargs)
return ret
else:
return {
"name": name,
"changes": {},
"comment": "Not adding beacon.",
"result": True,
}
else:
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the mod_beacon state function".format(
sfun
),
"result": False,
}
| 35.672059 | 116 | 0.563891 |
import fnmatch
import logging
import os
import re
import salt.utils.pkg
import salt.utils.platform
import salt.utils.versions
from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError
from salt.modules.pkg_resource import _repack_pkgs
from salt.output import nested
from salt.utils.functools import namespaced_function as _namespaced_function
from salt.utils.odict import OrderedDict as _OrderedDict
_repack_pkgs = _namespaced_function(_repack_pkgs, globals())
if salt.utils.platform.is_windows():
from urllib.parse import urlparse as _urlparse
from salt.exceptions import SaltRenderError
import collections
import datetime
import errno
import time
from functools import cmp_to_key
from salt.modules.win_pkg import _get_package_info
from salt.modules.win_pkg import get_repo_data
from salt.modules.win_pkg import _get_repo_details
from salt.modules.win_pkg import _refresh_db_conditional
from salt.modules.win_pkg import refresh_db
from salt.modules.win_pkg import genrepo
from salt.modules.win_pkg import _repo_process_pkg_sls
from salt.modules.win_pkg import _get_latest_pkg_version
from salt.modules.win_pkg import _reverse_cmp_pkg_versions
_get_package_info = _namespaced_function(_get_package_info, globals())
get_repo_data = _namespaced_function(get_repo_data, globals())
_get_repo_details = _namespaced_function(_get_repo_details, globals())
_refresh_db_conditional = _namespaced_function(_refresh_db_conditional, globals())
refresh_db = _namespaced_function(refresh_db, globals())
genrepo = _namespaced_function(genrepo, globals())
_repo_process_pkg_sls = _namespaced_function(_repo_process_pkg_sls, globals())
_get_latest_pkg_version = _namespaced_function(_get_latest_pkg_version, globals())
_reverse_cmp_pkg_versions = _namespaced_function(
_reverse_cmp_pkg_versions, globals()
)
import salt.utils.msgpack as msgpack
from salt.utils.versions import LooseVersion
log = logging.getLogger(__name__)
def __virtual__():
if "pkg.install" in __salt__:
return True
return (False, "pkg module could not be loaded")
def _get_comparison_spec(pkgver):
oper, verstr = salt.utils.pkg.split_comparison(pkgver.strip())
if oper in ("=", ""):
oper = "=="
return oper, verstr
def _check_ignore_epoch(oper, desired_version, ignore_epoch=None):
if ignore_epoch is not None:
return ignore_epoch
return "<" not in oper and ">" not in oper and ":" not in desired_version
def _parse_version_string(version_conditions_string):
result = []
version_conditions_string = version_conditions_string.strip()
if not version_conditions_string:
return result
for version_condition in version_conditions_string.split(","):
operator_and_version = _get_comparison_spec(version_condition)
result.append(operator_and_version)
return result
def _fulfills_version_string(
installed_versions,
version_conditions_string,
ignore_epoch=None,
allow_updates=False,
):
version_conditions = _parse_version_string(version_conditions_string)
for installed_version in installed_versions:
fullfills_all = True
for operator, version_string in version_conditions:
if allow_updates and len(version_conditions) == 1 and operator == "==":
operator = ">="
fullfills_all = fullfills_all and _fulfills_version_spec(
[installed_version], operator, version_string, ignore_epoch=ignore_epoch
)
if fullfills_all:
return True
return False
def _fulfills_version_spec(versions, oper, desired_version, ignore_epoch=None):
cmp_func = __salt__.get("pkg.version_cmp")
if salt.utils.platform.is_freebsd():
if isinstance(versions, dict) and "version" in versions:
versions = versions["version"]
for ver in versions:
if (
oper == "==" and fnmatch.fnmatch(ver, desired_version)
) or salt.utils.versions.compare(
ver1=ver,
oper=oper,
ver2=desired_version,
cmp_func=cmp_func,
ignore_epoch=_check_ignore_epoch(oper, desired_version, ignore_epoch),
):
return True
return False
def _find_unpurge_targets(desired, **kwargs):
return [
x
for x in desired
if x in __salt__["pkg.list_pkgs"](purge_desired=True, **kwargs)
]
def _find_download_targets(
name=None,
version=None,
pkgs=None,
normalize=True,
skip_suggestions=False,
ignore_epoch=None,
**kwargs
):
cur_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
if pkgs:
to_download = _repack_pkgs(pkgs, normalize=normalize)
if not to_download:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
to_download = {_normalize_name(name): version}
else:
to_download = {name: version}
cver = cur_pkgs.get(name, {})
if name in to_download:
if cver and version in cver:
return {
"name": name,
"changes": {},
"result": True,
"comment": (
"Version {} of package '{}' is already downloaded".format(
version, name
)
),
}
elif cver and version is None:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already downloaded".format(name),
}
version_spec = False
if not skip_suggestions:
try:
problems = _preflight_check(to_download, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
targets = {}
problems = []
for pkgname, pkgver in to_download.items():
cver = cur_pkgs.get(pkgname, {})
if not cver:
targets[pkgname] = pkgver
continue
elif cver and not pkgver:
continue
version_spec = True
try:
if not _fulfills_version_string(
cver.keys(), pkgver, ignore_epoch=ignore_epoch
):
targets[pkgname] = pkgver
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
msg = "All specified packages{} are already downloaded".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_advisory_targets(name=None, advisory_ids=None, **kwargs):
cur_patches = __salt__["pkg.list_installed_patches"](**kwargs)
if advisory_ids:
to_download = advisory_ids
else:
to_download = [name]
if cur_patches.get(name, {}):
return {
"name": name,
"changes": {},
"result": True,
"comment": "Advisory patch {} is already installed".format(name),
}
targets = []
for patch_name in to_download:
cver = cur_patches.get(patch_name, {})
if not cver:
targets.append(patch_name)
continue
if not targets:
msg = "All specified advisory patches are already installed"
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_remove_targets(
name=None, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs
):
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if pkgs:
to_remove = _repack_pkgs(pkgs, normalize=normalize)
if not to_remove:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted pkgs parameter. See minion log.",
}
else:
_normalize_name = __salt__.get("pkg.normalize_name", lambda pkgname: pkgname)
to_remove = {_normalize_name(name): version}
version_spec = False
targets = []
problems = []
for pkgname, pkgver in to_remove.items():
origin = bool(re.search("/", pkgname))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == pkgname]
else:
cver = cur_pkgs.get(pkgname, [])
if not cver:
continue
elif __salt__["pkg_resource.version_clean"](pkgver) is None:
targets.append(pkgname)
continue
version_spec = True
try:
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
targets.append(pkgname)
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), will not remove",
cver,
pkgver,
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not targets:
msg = "All specified packages{} are already absent".format(
" (matching specified versions)" if version_spec else ""
)
return {"name": name, "changes": {}, "result": True, "comment": msg}
return targets
def _find_install_targets(
name=None,
version=None,
pkgs=None,
sources=None,
skip_suggestions=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
refresh=False,
**kwargs
):
was_refreshed = False
if all((pkgs, sources)):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Only one of "pkgs" and "sources" is permitted.',
}
altered_files = {}
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
if salt.utils.platform.is_windows():
kwargs["refresh"] = refresh
resolve_capabilities = (
kwargs.get("resolve_capabilities", False) and "pkg.list_provides" in __salt__
)
try:
cur_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
cur_prov = (
resolve_capabilities and __salt__["pkg.list_provides"](**kwargs) or dict()
)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if salt.utils.platform.is_windows() and kwargs.pop("refresh", False):
was_refreshed = True
refresh = False
if any((pkgs, sources)):
if pkgs:
desired = _repack_pkgs(pkgs, normalize=normalize)
elif sources:
desired = __salt__["pkg_resource.pack_sources"](
sources,
normalize=normalize,
)
if not desired:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalidly formatted '{}' parameter. See minion log.".format(
"pkgs" if pkgs else "sources"
),
}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
else:
if salt.utils.platform.is_windows():
pkginfo = _get_package_info(name, saltenv=kwargs["saltenv"])
if not pkginfo:
return {
"name": name,
"changes": {},
"result": False,
"comment": "Package {} not found in the repository.".format(name),
}
if version is None:
version = _get_latest_pkg_version(pkginfo)
if normalize:
_normalize_name = __salt__.get(
"pkg.normalize_name", lambda pkgname: pkgname
)
desired = {_normalize_name(name): version}
else:
desired = {name: version}
to_unpurge = _find_unpurge_targets(desired, **kwargs)
origin = bool(re.search("/", name))
if __grains__["os"] == "FreeBSD" and origin:
cver = [k for k, v in cur_pkgs.items() if v["origin"] == name]
else:
cver = cur_pkgs.get(name, [])
if name not in to_unpurge:
if version and version in cver and not reinstall and not pkg_verify:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Version {} of package '{}' is already installed".format(
version, name
),
}
elif cver and version is None and not reinstall and not pkg_verify:
return {
"name": name,
"changes": {},
"result": True,
"comment": "Package {} is already installed".format(name),
}
version_spec = False
if not sources:
if not skip_suggestions:
not_installed = {
name: version
for name, version in desired.items()
if not (
name in cur_pkgs
and (
version is None
or _fulfills_version_string(
cur_pkgs[name], version, ignore_epoch=ignore_epoch
)
)
)
}
if not_installed:
try:
problems = _preflight_check(not_installed, **kwargs)
except CommandExecutionError:
pass
else:
comments = []
if problems.get("no_suggest"):
comments.append(
"The following package(s) were not found, and no "
"possible matches were found in the package db: "
"{}".format(", ".join(sorted(problems["no_suggest"])))
)
if problems.get("suggest"):
for pkgname, suggestions in problems["suggest"].items():
comments.append(
"Package '{}' not found (possible matches: {})".format(
pkgname, ", ".join(suggestions)
)
)
if comments:
if len(comments) > 1:
comments.append("")
return {
"name": name,
"changes": {},
"result": False,
"comment": ". ".join(comments).rstrip(),
}
wants_latest = [] if sources else [x for x, y in desired.items() if y == "latest"]
if wants_latest:
resolved_latest = __salt__["pkg.latest_version"](
*wants_latest, refresh=refresh, **kwargs
)
if len(wants_latest) == 1:
resolved_latest = {wants_latest[0]: resolved_latest}
if refresh:
was_refreshed = True
refresh = False
# resolved latest version will be None.
for key in resolved_latest:
if not resolved_latest[key]:
if key in cur_pkgs:
resolved_latest[key] = cur_pkgs[key][-1]
else:
resolved_latest[key] = None
# Update the desired versions with the ones we resolved
desired.update(resolved_latest)
# Find out which packages will be targeted in the call to pkg.install
targets = {}
to_reinstall = {}
problems = []
warnings = []
failed_verify = False
for package_name, version_string in desired.items():
cver = cur_pkgs.get(package_name, [])
if resolve_capabilities and not cver and package_name in cur_prov:
cver = cur_pkgs.get(cur_prov.get(package_name)[0], [])
# Package not yet installed, so add to targets
if not cver:
targets[package_name] = version_string
continue
if sources:
if reinstall:
to_reinstall[package_name] = version_string
continue
elif "lowpkg.bin_pkg_info" not in __salt__:
continue
# Metadata parser is available, cache the file and derive the
# package's name and version
err = "Unable to cache {0}: {1}"
try:
cached_path = __salt__["cp.cache_file"](
version_string, saltenv=kwargs["saltenv"]
)
except CommandExecutionError as exc:
problems.append(err.format(version_string, exc))
continue
if not cached_path:
problems.append(err.format(version_string, "file not found"))
continue
elif not os.path.exists(cached_path):
problems.append("{} does not exist on minion".format(version_string))
continue
source_info = __salt__["lowpkg.bin_pkg_info"](cached_path)
if source_info is None:
warnings.append(
"Failed to parse metadata for {}".format(version_string)
)
continue
else:
verstr = source_info["version"]
else:
verstr = version_string
if reinstall:
to_reinstall[package_name] = version_string
continue
if not __salt__["pkg_resource.check_extra_requirements"](
package_name, version_string
):
targets[package_name] = version_string
continue
elif __salt__["pkg_resource.version_clean"](version_string) is None:
if (not reinstall) and pkg_verify:
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
continue
version_fulfilled = False
allow_updates = bool(not sources and kwargs.get("allow_updates"))
try:
version_fulfilled = _fulfills_version_string(
cver, verstr, ignore_epoch=ignore_epoch, allow_updates=allow_updates
)
except CommandExecutionError as exc:
problems.append(exc.strerror)
continue
version_spec = True
if not version_fulfilled:
if reinstall:
to_reinstall[package_name] = version_string
else:
version_conditions = _parse_version_string(version_string)
if pkg_verify and any(
oper == "==" for oper, version in version_conditions
):
try:
verify_result = __salt__["pkg.verify"](
package_name,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
except (CommandExecutionError, SaltInvocationError) as exc:
failed_verify = exc.strerror
continue
if verify_result:
to_reinstall[package_name] = version_string
altered_files[package_name] = verify_result
else:
log.debug(
"Current version (%s) did not match desired version "
"specification (%s), adding to installation targets",
cver,
version_string,
)
targets[package_name] = version_string
if failed_verify:
problems.append(failed_verify)
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if not any((targets, to_unpurge, to_reinstall)):
msg = "All specified packages are already installed{0}"
msg = msg.format(
" and are at the desired version" if version_spec and not sources else ""
)
ret = {"name": name, "changes": {}, "result": True, "comment": msg}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
return (
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
)
def _verify_install(desired, new_pkgs, ignore_epoch=None, new_caps=None):
_ok = []
failed = []
if not new_caps:
new_caps = dict()
for pkgname, pkgver in desired.items():
has_origin = "/" in pkgname
if __grains__["os"] == "FreeBSD" and has_origin:
cver = [k for k, v in new_pkgs.items() if v["origin"] == pkgname]
elif __grains__["os"] == "MacOS" and has_origin:
cver = new_pkgs.get(pkgname, new_pkgs.get(pkgname.split("/")[-1]))
elif __grains__["os"] == "OpenBSD":
cver = new_pkgs.get(pkgname.split("%")[0])
elif __grains__["os_family"] == "Debian":
cver = new_pkgs.get(pkgname.split("=")[0])
else:
cver = new_pkgs.get(pkgname)
if not cver and pkgname in new_caps:
cver = new_pkgs.get(new_caps.get(pkgname)[0])
if not cver:
failed.append(pkgname)
continue
elif pkgver == "latest":
_ok.append(pkgname)
continue
elif not __salt__["pkg_resource.version_clean"](pkgver):
_ok.append(pkgname)
continue
elif pkgver.endswith("*") and cver[0].startswith(pkgver[:-1]):
_ok.append(pkgname)
continue
if _fulfills_version_string(cver, pkgver, ignore_epoch=ignore_epoch):
_ok.append(pkgname)
else:
failed.append(pkgname)
return _ok, failed
def _get_desired_pkg(name, desired):
if not desired[name] or desired[name].startswith(("<", ">", "=")):
oper = ""
else:
oper = "="
return "{}{}{}".format(name, oper, "" if not desired[name] else desired[name])
def _preflight_check(desired, fromrepo, **kwargs):
if "pkg.check_db" not in __salt__:
return {}
ret = {"suggest": {}, "no_suggest": []}
pkginfo = __salt__["pkg.check_db"](
*list(desired.keys()), fromrepo=fromrepo, **kwargs
)
for pkgname in pkginfo:
if pkginfo[pkgname]["found"] is False:
if pkginfo[pkgname]["suggestions"]:
ret["suggest"][pkgname] = pkginfo[pkgname]["suggestions"]
else:
ret["no_suggest"].append(pkgname)
return ret
def _nested_output(obj):
nested.__opts__ = __opts__
ret = nested.output(obj).rstrip()
return ret
def _resolve_capabilities(pkgs, refresh=False, **kwargs):
if not pkgs or "pkg.resolve_capabilities" not in __salt__:
return pkgs, refresh
ret = __salt__["pkg.resolve_capabilities"](pkgs, refresh=refresh, **kwargs)
return ret, False
def installed(
name,
version=None,
refresh=None,
fromrepo=None,
skip_verify=False,
skip_suggestions=False,
pkgs=None,
sources=None,
allow_updates=False,
pkg_verify=False,
normalize=True,
ignore_epoch=None,
reinstall=False,
update_holds=False,
**kwargs
):
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
if name and not any((pkgs, sources)):
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
kwargs["saltenv"] = __env__
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if pkgs:
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
if not isinstance(pkg_verify, list):
pkg_verify = pkg_verify is True
if (pkg_verify or isinstance(pkg_verify, list)) and "pkg.verify" not in __salt__:
return {
"name": name,
"changes": {},
"result": False,
"comment": "pkg.verify not implemented",
}
if not isinstance(version, str) and version is not None:
version = str(version)
kwargs["allow_updates"] = allow_updates
result = _find_install_targets(
name,
version,
pkgs,
sources,
fromrepo=fromrepo,
skip_suggestions=skip_suggestions,
pkg_verify=pkg_verify,
normalize=normalize,
ignore_epoch=ignore_epoch,
reinstall=reinstall,
refresh=refresh,
**kwargs
)
try:
(
desired,
targets,
to_unpurge,
to_reinstall,
altered_files,
warnings,
was_refreshed,
) = result
if was_refreshed:
refresh = False
except ValueError:
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=pkgs, sources=sources)
except (CommandExecutionError, SaltInvocationError) as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": str(exc),
}
if "result" in hold_ret and not hold_ret["result"]:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
for i in modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
result["changes"][i["name"]] = i["changes"]
for i in not_modified_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
for i in failed_hold:
result["comment"] += ".\n{}".format(i["comment"])
result["result"] = i["result"]
return result
if to_unpurge and "lowpkg.unpurge" not in __salt__:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": "lowpkg.unpurge not implemented",
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if pkgs:
pkgs = [dict([(x, y)]) for x, y in targets.items()]
pkgs.extend([dict([(x, y)]) for x, y in to_reinstall.items()])
elif sources:
oldsources = sources
sources = [x for x in oldsources if next(iter(list(x.keys()))) in targets]
sources.extend(
[x for x in oldsources if next(iter(list(x.keys()))) in to_reinstall]
)
comment = []
changes = {"installed": {}}
if __opts__["test"]:
if targets:
if sources:
_targets = targets
else:
_targets = [_get_desired_pkg(x, targets) for x in targets]
summary = ", ".join(targets)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in targets}
)
comment.append(
"The following packages would be installed/updated: {}".format(summary)
)
if to_unpurge:
comment.append(
"The following packages would have their selection status "
"changed from 'purge' to 'install': {}".format(", ".join(to_unpurge))
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in to_unpurge}
)
if to_reinstall:
if reinstall:
reinstall_targets = []
for reinstall_pkg in to_reinstall:
if sources:
reinstall_targets.append(reinstall_pkg)
else:
reinstall_targets.append(
_get_desired_pkg(reinstall_pkg, to_reinstall)
)
changes["installed"].update(
{x: {"new": "installed", "old": ""} for x in reinstall_targets}
)
msg = "The following packages would be reinstalled: "
msg += ", ".join(reinstall_targets)
comment.append(msg)
else:
for reinstall_pkg in to_reinstall:
if sources:
pkgstr = reinstall_pkg
else:
pkgstr = _get_desired_pkg(reinstall_pkg, to_reinstall)
comment.append(
"Package '{}' would be reinstalled because the "
"following files have been altered:".format(pkgstr)
)
changes["installed"].update({reinstall_pkg: {}})
comment.append(_nested_output(altered_files[reinstall_pkg]))
ret = {
"name": name,
"changes": changes,
"result": None,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
modified_hold = None
not_modified_hold = None
failed_hold = None
if targets or to_reinstall:
try:
pkg_ret = __salt__["pkg.install"](
name=None,
refresh=refresh,
version=version,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=pkgs,
sources=sources,
reinstall=bool(to_reinstall),
normalize=normalize,
update_holds=update_holds,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while installing package(s): {}".format(
exc
)
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
if refresh:
refresh = False
if isinstance(pkg_ret, dict):
changes["installed"].update(pkg_ret)
elif isinstance(pkg_ret, str):
comment.append(pkg_ret)
pkg_ret = {}
if "pkg.hold" in __salt__ and "hold" in kwargs:
try:
action = "pkg.hold" if kwargs["hold"] else "pkg.unhold"
hold_ret = __salt__[action](name=name, pkgs=desired)
except (CommandExecutionError, SaltInvocationError) as exc:
comment.append(str(exc))
ret = {
"name": name,
"changes": changes,
"result": False,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
if "result" in hold_ret and not hold_ret["result"]:
ret = {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while "
"holding/unholding package(s): {}".format(hold_ret["comment"])
),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
else:
modified_hold = [
hold_ret[x] for x in hold_ret if hold_ret[x]["changes"]
]
not_modified_hold = [
hold_ret[x]
for x in hold_ret
if not hold_ret[x]["changes"] and hold_ret[x]["result"]
]
failed_hold = [
hold_ret[x] for x in hold_ret if not hold_ret[x]["result"]
]
if to_unpurge:
changes["purge_desired"] = __salt__["lowpkg.unpurge"](*to_unpurge)
if sources:
modified = [x for x in changes["installed"] if x in targets]
not_modified = [
x for x in desired if x not in targets and x not in to_reinstall
]
failed = [x for x in targets if x not in modified]
else:
if __grains__["os"] == "FreeBSD":
kwargs["with_origin"] = True
new_pkgs = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
if (
kwargs.get("resolve_capabilities", False)
and "pkg.list_provides" in __salt__
):
new_caps = __salt__["pkg.list_provides"](**kwargs)
else:
new_caps = {}
_ok, failed = _verify_install(
desired, new_pkgs, ignore_epoch=ignore_epoch, new_caps=new_caps
)
modified = [x for x in _ok if x in targets]
not_modified = [x for x in _ok if x not in targets and x not in to_reinstall]
failed = [x for x in failed if x in targets]
if not changes.get("purge_desired"):
changes = changes["installed"]
if modified:
if sources:
summary = ", ".join(modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in modified])
if len(summary) < 20:
comment.append(
"The following packages were installed/updated: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} installed/updated.".format(
len(modified),
"s" if len(modified) > 1 else "",
"were" if len(modified) > 1 else "was",
)
)
if modified_hold:
for i in modified_hold:
change_name = i["name"]
if change_name in changes:
comment.append(i["comment"])
if len(changes[change_name]["new"]) > 0:
changes[change_name]["new"] += "\n"
changes[change_name]["new"] += "{}".format(i["changes"]["new"])
if len(changes[change_name]["old"]) > 0:
changes[change_name]["old"] += "\n"
changes[change_name]["old"] += "{}".format(i["changes"]["old"])
else:
comment.append(i["comment"])
changes[change_name] = {}
changes[change_name]["new"] = "{}".format(i["changes"]["new"])
if not_modified:
if sources:
summary = ", ".join(not_modified)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in not_modified])
if len(not_modified) <= 20:
comment.append(
"The following packages were already installed: {}".format(summary)
)
else:
comment.append(
"{} targeted package{} {} already installed".format(
len(not_modified),
"s" if len(not_modified) > 1 else "",
"were" if len(not_modified) > 1 else "was",
)
)
if not_modified_hold:
for i in not_modified_hold:
comment.append(i["comment"])
result = True
if failed:
if sources:
summary = ", ".join(failed)
else:
summary = ", ".join([_get_desired_pkg(x, desired) for x in failed])
comment.insert(
0, "The following packages failed to install/update: {}".format(summary)
)
result = False
if failed_hold:
for i in failed_hold:
comment.append(i["comment"])
result = False
if isinstance(pkg_verify, list) and any(
x.get("ignore_types") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "ignore_types" in x
):
ignore_types = next(
x.get("ignore_types") for x in pkg_verify if "ignore_types" in x
)
else:
ignore_types = []
if isinstance(pkg_verify, list) and any(
x.get("verify_options") is not None
for x in pkg_verify
if isinstance(x, _OrderedDict) and "verify_options" in x
):
verify_options = next(
x.get("verify_options") for x in pkg_verify if "verify_options" in x
)
else:
verify_options = []
modified = []
failed = []
for reinstall_pkg in to_reinstall:
if reinstall:
if reinstall_pkg in pkg_ret:
modified.append(reinstall_pkg)
else:
failed.append(reinstall_pkg)
elif pkg_verify:
verify_result = __salt__["pkg.verify"](
reinstall_pkg,
ignore_types=ignore_types,
verify_options=verify_options,
**kwargs
)
if verify_result:
failed.append(reinstall_pkg)
altered_files[reinstall_pkg] = verify_result
else:
modified.append(reinstall_pkg)
if modified:
for modified_pkg in modified:
if sources:
pkgstr = modified_pkg
else:
pkgstr = _get_desired_pkg(modified_pkg, desired)
msg = "Package {} was reinstalled.".format(pkgstr)
if modified_pkg in altered_files:
msg += " The following files were remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[modified_pkg]))
else:
comment.append(msg)
if failed:
for failed_pkg in failed:
if sources:
pkgstr = failed_pkg
else:
pkgstr = _get_desired_pkg(failed_pkg, desired)
msg = "Reinstall was not successful for package {}.".format(pkgstr)
if failed_pkg in altered_files:
msg += " The following files could not be remediated:"
comment.append(msg)
comment.append(_nested_output(altered_files[failed_pkg]))
else:
comment.append(msg)
result = False
ret = {
"name": name,
"changes": changes,
"result": result,
"comment": "\n".join(comment),
}
if warnings:
ret.setdefault("warnings", []).extend(warnings)
return ret
def downloaded(
name, version=None, pkgs=None, fromrepo=None, ignore_epoch=None, **kwargs
):
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_downloaded" not in __salt__:
ret["result"] = False
ret["comment"] = "The pkg.downloaded state is not available on this platform"
return ret
if isinstance(pkgs, list) and len(pkgs) == 0:
ret["result"] = True
ret["comment"] = "No packages to download provided"
return ret
if name and not pkgs:
if version:
pkgs = [{name: version}]
version = None
else:
pkgs = [name]
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
pkgs, _refresh = _resolve_capabilities(pkgs, **kwargs)
targets = _find_download_targets(
name, version, pkgs, fromrepo=fromrepo, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, dict):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret["comment"] = "The following packages would be downloaded: {}".format(
summary
)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name,
pkgs=pkgs,
version=version,
downloadonly=True,
fromrepo=fromrepo,
ignore_epoch=ignore_epoch,
**kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
new_pkgs = __salt__["pkg.list_downloaded"](**kwargs)
_ok, failed = _verify_install(targets, new_pkgs, ignore_epoch=ignore_epoch)
if failed:
summary = ", ".join([_get_desired_pkg(x, targets) for x in failed])
ret["result"] = False
ret["comment"] = "The following packages failed to download: {}".format(summary)
if not ret["changes"] and not ret["comment"]:
ret["result"] = True
ret["comment"] = "Packages downloaded: {}".format(", ".join(targets))
return ret
def patch_installed(name, advisory_ids=None, downloadonly=None, **kwargs):
ret = {"name": name, "changes": {}, "result": None, "comment": ""}
if "pkg.list_patches" not in __salt__:
ret["result"] = False
ret[
"comment"
] = "The pkg.patch_installed state is not available on this platform"
return ret
if isinstance(advisory_ids, list) and len(advisory_ids) == 0:
ret["result"] = True
ret["comment"] = "No advisory ids provided"
return ret
targets = _find_advisory_targets(name, advisory_ids, **kwargs)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
ret["result"] = False
ret["comment"] = "An error was encountered while checking targets: {}".format(
targets
)
return ret
if __opts__["test"]:
summary = ", ".join(targets)
ret[
"comment"
] = "The following advisory patches would be downloaded: {}".format(summary)
return ret
try:
pkg_ret = __salt__["pkg.install"](
name=name, advisory_ids=advisory_ids, downloadonly=downloadonly, **kwargs
)
ret["result"] = True
ret["changes"].update(pkg_ret)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while downloading package(s): {}".format(exc)
return ret
if not ret["changes"] and not ret["comment"]:
status = "downloaded" if downloadonly else "installed"
ret["result"] = True
ret[
"comment"
] = "Advisory patch is not needed or related packages are already {}".format(
status
)
return ret
def patch_downloaded(name, advisory_ids=None, **kwargs):
if "pkg.list_patches" not in __salt__:
return {
"name": name,
"result": False,
"changes": {},
"comment": (
"The pkg.patch_downloaded state is not available on this platform"
),
}
# as we're explicitly passing 'downloadonly=True' to execution module.
if "downloadonly" in kwargs:
del kwargs["downloadonly"]
return patch_installed(
name=name, advisory_ids=advisory_ids, downloadonly=True, **kwargs
)
def latest(
name,
refresh=None,
fromrepo=None,
skip_verify=False,
pkgs=None,
watch_flags=True,
**kwargs
):
refresh = salt.utils.pkg.check_refresh(__opts__, refresh)
if kwargs.get("sources"):
return {
"name": name,
"changes": {},
"result": False,
"comment": 'The "sources" parameter is not supported.',
}
elif pkgs:
desired_pkgs = list(_repack_pkgs(pkgs).keys())
if not desired_pkgs:
return {
"name": name,
"changes": {},
"result": False,
"comment": 'Invalidly formatted "pkgs" parameter. See minion log.',
}
else:
if isinstance(pkgs, list) and len(pkgs) == 0:
return {
"name": name,
"changes": {},
"result": True,
"comment": "No packages to install provided",
}
else:
desired_pkgs = [name]
kwargs["saltenv"] = __env__
desired_pkgs, refresh = _resolve_capabilities(
desired_pkgs, refresh=refresh, **kwargs
)
try:
avail = __salt__["pkg.latest_version"](
*desired_pkgs, fromrepo=fromrepo, refresh=refresh, **kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while checking the "
"newest available version of package(s): {}".format(exc)
),
}
try:
cur = __salt__["pkg.version"](*desired_pkgs, **kwargs)
except CommandExecutionError as exc:
return {"name": name, "changes": {}, "result": False, "comment": exc.strerror}
if isinstance(cur, str):
cur = {desired_pkgs[0]: cur}
if isinstance(avail, str):
avail = {desired_pkgs[0]: avail}
targets = {}
problems = []
for pkg in desired_pkgs:
if not avail.get(pkg):
if not cur.get(pkg):
msg = "No information found for '{}'.".format(pkg)
log.error(msg)
problems.append(msg)
elif (
watch_flags
and __grains__.get("os") == "Gentoo"
and __salt__["portage_config.is_changed_uses"](pkg)
):
targets[pkg] = cur[pkg]
else:
targets[pkg] = avail[pkg]
if problems:
return {
"name": name,
"changes": {},
"result": False,
"comment": " ".join(problems),
}
if targets:
if not pkgs:
# only targeted a single package and is being allowed to proceed to
# the install step.
up_to_date = []
else:
up_to_date = [x for x in pkgs if x not in targets]
if __opts__["test"]:
comments = []
comments.append(
"The following packages would be installed/upgraded: "
+ ", ".join(sorted(targets))
)
if up_to_date:
up_to_date_count = len(up_to_date)
if up_to_date_count <= 10:
comments.append(
"The following packages are already up-to-date: "
+ ", ".join(
["{} ({})".format(x, cur[x]) for x in sorted(up_to_date)]
)
)
else:
comments.append(
"{} packages are already up-to-date".format(up_to_date_count)
)
return {
"name": name,
"changes": {},
"result": None,
"comment": "\n".join(comments),
}
if salt.utils.platform.is_windows():
# pkg.install execution module on windows ensures the software
# package is installed when no version is specified, it does not
# upgrade the software to the latest. This is per the design.
# Build updated list of pkgs *with verion number*, exclude
# non-targeted ones
targeted_pkgs = [{x: targets[x]} for x in targets]
else:
# Build updated list of pkgs to exclude non-targeted ones
targeted_pkgs = list(targets)
# No need to refresh, if a refresh was necessary it would have been
# performed above when pkg.latest_version was run.
try:
changes = __salt__["pkg.install"](
name=None,
refresh=False,
fromrepo=fromrepo,
skip_verify=skip_verify,
pkgs=targeted_pkgs,
**kwargs
)
except CommandExecutionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": (
"An error was encountered while installing package(s): {}".format(
exc
)
),
}
if changes:
# Find failed and successful updates
failed = [
x
for x in targets
if not changes.get(x)
or changes[x].get("new") != targets[x]
and targets[x] != "latest"
]
successful = [x for x in targets if x not in failed]
comments = []
if failed:
msg = "The following packages failed to update: {}".format(
", ".join(sorted(failed))
)
comments.append(msg)
if successful:
msg = (
"The following packages were successfully "
"installed/upgraded: "
"{}".format(", ".join(sorted(successful)))
)
comments.append(msg)
if up_to_date:
if len(up_to_date) <= 10:
msg = "The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
else:
msg = "{} packages were already up-to-date ".format(len(up_to_date))
comments.append(msg)
return {
"name": name,
"changes": changes,
"result": False if failed else True,
"comment": " ".join(comments),
}
else:
if len(targets) > 10:
comment = (
"{} targeted packages failed to update. "
"See debug log for details.".format(len(targets))
)
elif len(targets) > 1:
comment = (
"The following targeted packages failed to update. "
"See debug log for details: ({}).".format(
", ".join(sorted(targets))
)
)
else:
comment = "Package {} failed to update.".format(
next(iter(list(targets.keys())))
)
if up_to_date:
if len(up_to_date) <= 10:
comment += (
" The following packages were already up-to-date: {}".format(
", ".join(sorted(up_to_date))
)
)
else:
comment += "{} packages were already up-to-date".format(
len(up_to_date)
)
return {
"name": name,
"changes": changes,
"result": False,
"comment": comment,
}
else:
if len(desired_pkgs) > 10:
comment = "All {} packages are up-to-date.".format(len(desired_pkgs))
elif len(desired_pkgs) > 1:
comment = "All packages are up-to-date ({}).".format(
", ".join(sorted(desired_pkgs))
)
else:
comment = "Package {} is already up-to-date".format(desired_pkgs[0])
return {"name": name, "changes": {}, "result": True, "comment": comment}
def _uninstall(
action="remove",
name=None,
version=None,
pkgs=None,
normalize=True,
ignore_epoch=None,
**kwargs
):
if action not in ("remove", "purge"):
return {
"name": name,
"changes": {},
"result": False,
"comment": "Invalid action '{}'. This is probably a bug.".format(action),
}
try:
pkg_params = __salt__["pkg_resource.parse_targets"](
name, pkgs, normalize=normalize
)[0]
except MinionError as exc:
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while parsing targets: {}".format(exc),
}
targets = _find_remove_targets(
name, version, pkgs, normalize, ignore_epoch=ignore_epoch, **kwargs
)
if isinstance(targets, dict) and "result" in targets:
return targets
elif not isinstance(targets, list):
return {
"name": name,
"changes": {},
"result": False,
"comment": "An error was encountered while checking targets: {}".format(
targets
),
}
if action == "purge":
old_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
targets.extend([x for x in pkg_params if x in old_removed])
targets.sort()
if not targets:
return {
"name": name,
"changes": {},
"result": True,
"comment": "None of the targeted packages are installed{}".format(
" or partially installed" if action == "purge" else ""
),
}
if __opts__["test"]:
_changes = {}
_changes.update({x: {"new": "{}d".format(action), "old": ""} for x in targets})
return {
"name": name,
"changes": _changes,
"result": None,
"comment": "The following packages will be {}d: {}.".format(
action, ", ".join(targets)
),
}
changes = __salt__["pkg.{}".format(action)](
name, pkgs=pkgs, version=version, **kwargs
)
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
failed = []
for param in pkg_params:
if __grains__["os_family"] in ["Suse", "RedHat"]:
# Check if the package version set to be removed is actually removed:
if param in new and not pkg_params[param]:
failed.append(param)
elif param in new and pkg_params[param] in new[param]:
failed.append(param + "-" + pkg_params[param])
elif param in new:
failed.append(param)
if action == "purge":
new_removed = __salt__["pkg.list_pkgs"](
versions_as_list=True, removed=True, **kwargs
)
failed.extend([x for x in pkg_params if x in new_removed])
failed.sort()
if failed:
return {
"name": name,
"changes": changes,
"result": False,
"comment": "The following packages failed to {}: {}.".format(
action, ", ".join(failed)
),
}
comments = []
not_installed = sorted([x for x in pkg_params if x not in targets])
if not_installed:
comments.append(
"The following packages were not installed: {}".format(
", ".join(not_installed)
)
)
comments.append(
"The following packages were {}d: {}.".format(action, ", ".join(targets))
)
else:
comments.append("All targeted packages were {}d.".format(action))
return {
"name": name,
"changes": changes,
"result": True,
"comment": " ".join(comments),
}
def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="remove",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while removing package(s): {}".format(exc)
return ret
def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **kwargs):
kwargs["saltenv"] = __env__
try:
return _uninstall(
action="purge",
name=name,
version=version,
pkgs=pkgs,
normalize=normalize,
ignore_epoch=ignore_epoch,
**kwargs
)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
# Get information for state return from the exception.
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while purging package(s): {}".format(exc)
return ret
def uptodate(name, refresh=False, pkgs=None, **kwargs):
ret = {"name": name, "changes": {}, "result": False, "comment": "Failed to update"}
if "pkg.list_upgrades" not in __salt__:
ret["comment"] = "State pkg.uptodate is not available"
return ret
# emerge --update doesn't appear to support repo notation
if "fromrepo" in kwargs and __grains__["os"] == "Gentoo":
ret["comment"] = "'fromrepo' argument not supported on this platform"
return ret
if isinstance(refresh, bool):
pkgs, refresh = _resolve_capabilities(pkgs, refresh=refresh, **kwargs)
try:
packages = __salt__["pkg.list_upgrades"](refresh=refresh, **kwargs)
expected = {
pkgname: {
"new": pkgver,
"old": __salt__["pkg.version"](pkgname, **kwargs),
}
for pkgname, pkgver in packages.items()
}
if isinstance(pkgs, list):
packages = [pkg for pkg in packages if pkg in pkgs]
expected = {
pkgname: pkgver
for pkgname, pkgver in expected.items()
if pkgname in pkgs
}
except Exception as exc:
ret["comment"] = str(exc)
return ret
else:
ret["comment"] = "refresh must be either True or False"
return ret
if not packages:
ret["comment"] = "System is already up-to-date"
ret["result"] = True
return ret
elif __opts__["test"]:
ret["comment"] = "System update will be performed"
ret["changes"] = expected
ret["result"] = None
return ret
try:
ret["changes"] = __salt__["pkg.upgrade"](refresh=refresh, pkgs=pkgs, **kwargs)
except CommandExecutionError as exc:
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret[
"comment"
] = "An error was encountered while updating packages: {}".format(exc)
return ret
missing = []
if isinstance(pkgs, list):
missing = [pkg for pkg in expected.keys() if pkg not in ret["changes"]]
if missing:
ret["comment"] = "The following package(s) failed to update: {}".format(
", ".join(missing)
)
ret["result"] = False
else:
ret["comment"] = "Upgrade ran successfully"
ret["result"] = True
return ret
def group_installed(name, skip=None, include=None, **kwargs):
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
if "pkg.group_diff" not in __salt__:
ret["comment"] = "pkg.group_install not available for this platform"
return ret
if skip is None:
skip = []
else:
if not isinstance(skip, list):
ret["comment"] = "skip must be formatted as a list"
return ret
for idx, item in enumerate(skip):
if not isinstance(item, str):
skip[idx] = str(item)
if include is None:
include = []
else:
if not isinstance(include, list):
ret["comment"] = "include must be formatted as a list"
return ret
for idx, item in enumerate(include):
if not isinstance(item, str):
include[idx] = str(item)
try:
diff = __salt__["pkg.group_diff"](name)
except CommandExecutionError as err:
ret[
"comment"
] = "An error was encountered while installing/updating group '{}': {}.".format(
name, err
)
return ret
mandatory = diff["mandatory"]["installed"] + diff["mandatory"]["not installed"]
invalid_skip = [x for x in mandatory if x in skip]
if invalid_skip:
ret[
"comment"
] = "The following mandatory packages cannot be skipped: {}".format(
", ".join(invalid_skip)
)
return ret
targets = diff["mandatory"]["not installed"]
targets.extend([x for x in diff["default"]["not installed"] if x not in skip])
targets.extend(include)
if not targets:
ret["result"] = True
ret["comment"] = "Group '{}' is already installed".format(name)
return ret
partially_installed = (
diff["mandatory"]["installed"]
or diff["default"]["installed"]
or diff["optional"]["installed"]
)
if __opts__["test"]:
ret["result"] = None
if partially_installed:
ret[
"comment"
] = "Group '{}' is partially installed and will be updated".format(name)
else:
ret["comment"] = "Group '{}' will be installed".format(name)
return ret
try:
ret["changes"] = __salt__["pkg.install"](pkgs=targets, **kwargs)
except CommandExecutionError as exc:
ret = {"name": name, "result": False}
if exc.info:
ret["changes"] = exc.info.get("changes", {})
ret["comment"] = exc.strerror_without_changes
else:
ret["changes"] = {}
ret["comment"] = (
"An error was encountered while "
"installing/updating group '{}': {}".format(name, exc)
)
return ret
failed = [x for x in targets if x not in __salt__["pkg.list_pkgs"](**kwargs)]
if failed:
ret["comment"] = "Failed to install the following packages: {}".format(
", ".join(failed)
)
return ret
ret["result"] = True
ret["comment"] = "Group '{}' was {}".format(
name, "updated" if partially_installed else "installed"
)
return ret
def mod_init(low):
ret = True
if "pkg.ex_mod_init" in __salt__:
ret = __salt__["pkg.ex_mod_init"](low)
if low["fun"] == "installed" or low["fun"] == "latest":
salt.utils.pkg.write_rtag(__opts__)
return ret
return False
def mod_aggregate(low, chunks, running):
pkgs = []
pkg_type = None
agg_enabled = [
"installed",
"latest",
"removed",
"purged",
]
if low.get("fun") not in agg_enabled:
return low
for chunk in chunks:
tag = __utils__["state.gen_tag"](chunk)
if tag in running:
continue
if chunk.get("state") == "pkg":
if "__agg__" in chunk:
continue
if chunk.get("fun") != low.get("fun"):
continue
if chunk.get("fromrepo") != low.get("fromrepo"):
continue
# and sources together.
if "sources" in chunk:
if pkg_type is None:
pkg_type = "sources"
if pkg_type == "sources":
pkgs.extend(chunk["sources"])
chunk["__agg__"] = True
else:
# If hold exists in the chunk, do not add to aggregation
# otherwise all packages will be held or unheld.
# setting a package to be held/unheld is not as
# time consuming as installing/uninstalling.
if "hold" not in chunk:
if pkg_type is None:
pkg_type = "pkgs"
if pkg_type == "pkgs":
# Pull out the pkg names!
if "pkgs" in chunk:
pkgs.extend(chunk["pkgs"])
chunk["__agg__"] = True
elif "name" in chunk:
version = chunk.pop("version", None)
if version is not None:
pkgs.append({chunk["name"]: version})
else:
pkgs.append(chunk["name"])
chunk["__agg__"] = True
if pkg_type is not None and pkgs:
if pkg_type in low:
low[pkg_type].extend(pkgs)
else:
low[pkg_type] = pkgs
return low
def mod_watch(name, **kwargs):
sfun = kwargs.pop("sfun", None)
mapfun = {
"purged": purged,
"latest": latest,
"removed": removed,
"installed": installed,
}
if sfun in mapfun:
return mapfun[sfun](name, **kwargs)
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the watch requisite".format(sfun),
"result": False,
}
def mod_beacon(name, **kwargs):
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
sfun = kwargs.pop("sfun", None)
supported_funcs = ["installed", "removed"]
if sfun in supported_funcs:
if kwargs.get("beacon"):
beacon_module = "pkg"
beacon_name = "beacon_{}_{}".format(beacon_module, name)
beacon_kwargs = {
"name": beacon_name,
"pkgs": kwargs.get("pkgs", [name]),
"interval": 60,
"beacon_module": beacon_module,
}
ret = __states__["beacon.present"](**beacon_kwargs)
return ret
else:
return {
"name": name,
"changes": {},
"comment": "Not adding beacon.",
"result": True,
}
else:
return {
"name": name,
"changes": {},
"comment": "pkg.{} does not work with the mod_beacon state function".format(
sfun
),
"result": False,
}
| true | true |
f71f6287f35f2c7ff53b83f6c0121a0e0b75c1ea | 13,549 | py | Python | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | chainer/training/extensions/variable_statistics_plot.py | seiyab/chainer | 39fffb9597a6e9646307fba27ad3233c65d38632 | [
"MIT"
] | null | null | null | from __future__ import division
import os
import warnings
import numpy
import six
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer.training import extension
from chainer.training import trigger as trigger_module
from chainer.utils import argument
_available = None
def _try_import_matplotlib():
global matplotlib, _available
global _plot_color, _plot_color_trans, _plot_common_kwargs
try:
import matplotlib
_available = True
except ImportError:
_available = False
if _available:
if hasattr(matplotlib.colors, 'to_rgba'):
_to_rgba = matplotlib.colors.to_rgba
else:
# For matplotlib 1.x
_to_rgba = matplotlib.colors.ColorConverter().to_rgba
_plot_color = _to_rgba('#1f77b4') # C0 color
_plot_color_trans = _plot_color[:3] + (0.2,) # apply alpha
_plot_common_kwargs = {
'alpha': 0.2, 'linewidth': 0, 'color': _plot_color_trans}
def _check_available():
if _available is None:
_try_import_matplotlib()
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
def _unpack_variables(x, memo=None):
if memo is None:
memo = ()
if isinstance(x, chainer.Variable):
memo += (x,)
elif isinstance(x, chainer.Link):
memo += tuple(x.params(include_uninit=True))
elif isinstance(x, (list, tuple)):
for xi in x:
memo += _unpack_variables(xi)
return memo
class Reservoir(object):
"""Reservoir sample with a fixed sized buffer."""
def __init__(self, size, data_shape, dtype=numpy.float32):
self.size = size
self.data = numpy.zeros((size,) + data_shape, dtype=dtype)
self.idxs = numpy.zeros((size,), dtype=numpy.int32)
self.counter = 0
def add(self, x, idx=None):
if self.counter < self.size:
self.data[self.counter] = x
self.idxs[self.counter] = idx or self.counter
elif self.counter >= self.size and \
numpy.random.random() < self.size / float(self.counter + 1):
i = numpy.random.randint(self.size)
self.data[i] = x
self.idxs[i] = idx or self.counter
self.counter += 1
def get_data(self):
idxs = self.idxs[:min(self.counter, self.size)]
sorted_args = numpy.argsort(idxs)
return idxs[sorted_args], self.data[sorted_args]
class Statistician(object):
"""Helper to compute basic NumPy-like statistics."""
def __init__(self, collect_mean, collect_std, percentile_sigmas):
self.collect_mean = collect_mean
self.collect_std = collect_std
self.percentile_sigmas = percentile_sigmas
def __call__(self, x, axis=0, dtype=None, xp=None):
if axis is None:
axis = tuple(range(x.ndim))
elif not isinstance(axis, (tuple, list)):
axis = axis,
return self.collect(x, axis)
def collect(self, x, axis):
out = dict()
if self.collect_mean:
out['mean'] = x.mean(axis=axis)
if self.collect_std:
out['std'] = x.std(axis=axis)
if self.percentile_sigmas:
xp = cuda.get_array_module(x)
p = xp.percentile(x, self.percentile_sigmas, axis=axis)
out['percentile'] = p
return out
class VariableStatisticsPlot(extension.Extension):
"""__init__(\
targets, max_sample_size=1000, report_data=True,\
report_grad=True, plot_mean=True, plot_std=True,\
percentile_sigmas=(0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87,\
100), trigger=(1, 'epoch'), filename='statistics.png',\
figsize=None, marker=None, grid=True)
Trainer extension to plot statistics for :class:`Variable`\\s.
This extension collects statistics for a single :class:`Variable`, a list
of :class:`Variable`\\s or similarly a single or a list of
:class:`Link`\\s containing one or more :class:`Variable`\\s. In case
multiple :class:`Variable`\\s are found, the means are computed. The
collected statistics are plotted and saved as an image in the directory
specified by the :class:`Trainer`.
Statistics include mean, standard deviation and percentiles.
This extension uses reservoir sampling to preserve memory, using a fixed
size running sample. This means that collected items in the sample are
discarded uniformly at random when the number of items becomes larger
than the maximum sample size, but each item is expected to occur in the
sample with equal probability.
Args:
targets (:class:`Variable`, :class:`Link` or list of either):
Parameters for which statistics are collected.
max_sample_size (int):
Maximum number of running samples.
report_data (bool):
If ``True``, data (e.g. weights) statistics are plotted. If
``False``, they are neither computed nor plotted.
report_grad (bool):
If ``True``, gradient statistics are plotted. If ``False``, they
are neither computed nor plotted.
plot_mean (bool):
If ``True``, means are plotted. If ``False``, they are
neither computed nor plotted.
plot_std (bool):
If ``True``, standard deviations are plotted. If ``False``, they
are neither computed nor plotted.
percentile_sigmas (float or tuple of floats):
Percentiles to plot in the range :math:`[0, 100]`.
trigger:
Trigger that decides when to save the plots as an image. This is
distinct from the trigger of this extension itself. If it is a
tuple in the form ``<int>, 'epoch'`` or ``<int>, 'iteration'``, it
is passed to :class:`IntervalTrigger`.
filename (str):
Name of the output image file under the output directory.
For historical reasons ``file_name`` is also accepted as an alias
of this argument.
figsize (tuple of int):
Matlotlib ``figsize`` argument that specifies the size of the
output image.
marker (str):
Matplotlib ``marker`` argument that specified the marker style of
the plots.
grid (bool):
Matplotlib ``grid`` argument that specifies whether grids are
rendered in in the plots or not.
"""
def __init__(self, targets, max_sample_size=1000,
report_data=True, report_grad=True,
plot_mean=True, plot_std=True,
percentile_sigmas=(
0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87, 100),
trigger=(1, 'epoch'), filename=None,
figsize=None, marker=None, grid=True, **kwargs):
file_name, = argument.parse_kwargs(
kwargs, ('file_name', 'statistics.png')
)
if filename is None:
filename = file_name
del file_name # avoid accidental use
self._vars = _unpack_variables(targets)
if not self._vars:
raise ValueError(
'Need at least one variables for which to collect statistics.'
'\nActual: 0 <= 0')
if not any((plot_mean, plot_std, bool(percentile_sigmas))):
raise ValueError('Nothing to plot')
self._keys = []
if report_data:
self._keys.append('data')
if report_grad:
self._keys.append('grad')
self._report_data = report_data
self._report_grad = report_grad
self._statistician = Statistician(
collect_mean=plot_mean, collect_std=plot_std,
percentile_sigmas=percentile_sigmas)
self._plot_mean = plot_mean
self._plot_std = plot_std
self._plot_percentile = bool(percentile_sigmas)
self._trigger = trigger_module.get_trigger(trigger)
self._filename = filename
self._figsize = figsize
self._marker = marker
self._grid = grid
if not self._plot_percentile:
n_percentile = 0
else:
if not isinstance(percentile_sigmas, (list, tuple)):
n_percentile = 1 # scalar, single percentile
else:
n_percentile = len(percentile_sigmas)
self._data_shape = (
len(self._keys), int(plot_mean) + int(plot_std) + n_percentile)
self._samples = Reservoir(max_sample_size, data_shape=self._data_shape)
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if self.available():
# Dynamically import pyplot to call matplotlib.use()
# after importing chainer.training.extensions
import matplotlib.pyplot as plt
else:
return
xp = backend.get_array_module(self._vars[0].data)
stats = xp.zeros(self._data_shape, dtype=xp.float32)
for i, k in enumerate(self._keys):
xs = []
for var in self._vars:
x = getattr(var, k, None)
if x is not None:
xs.append(x.ravel())
if xs:
stat_dict = self._statistician(
xp.concatenate(xs, axis=0), axis=0, xp=xp)
stat_list = []
if self._plot_mean:
stat_list.append(xp.atleast_1d(stat_dict['mean']))
if self._plot_std:
stat_list.append(xp.atleast_1d(stat_dict['std']))
if self._plot_percentile:
stat_list.append(xp.atleast_1d(stat_dict['percentile']))
stats[i] = xp.concatenate(stat_list, axis=0)
if xp == cuda.cupy:
stats = cuda.to_cpu(stats)
self._samples.add(stats, idx=trainer.updater.iteration)
if self._trigger(trainer):
file_path = os.path.join(trainer.out, self._filename)
self.save_plot_using_module(file_path, plt)
def save_plot_using_module(self, file_path, plt):
nrows = int(self._plot_mean or self._plot_std) \
+ int(self._plot_percentile)
ncols = len(self._keys)
fig, axes = plt.subplots(
nrows, ncols, figsize=self._figsize, sharex=True)
if not isinstance(axes, numpy.ndarray): # single subplot
axes = numpy.asarray([axes])
if nrows == 1:
axes = axes[None, :]
elif ncols == 1:
axes = axes[:, None]
assert axes.ndim == 2
idxs, data = self._samples.get_data()
# Offset to access percentile data from `data`
offset = int(self._plot_mean) + int(self._plot_std)
n_percentile = data.shape[-1] - offset
n_percentile_mid_floor = n_percentile // 2
n_percentile_odd = n_percentile % 2 == 1
for col in six.moves.range(ncols):
row = 0
ax = axes[row, col]
ax.set_title(self._keys[col]) # `data` or `grad`
if self._plot_mean or self._plot_std:
if self._plot_mean and self._plot_std:
ax.errorbar(
idxs, data[:, col, 0], data[:, col, 1],
color=_plot_color, ecolor=_plot_color_trans,
label='mean, std', marker=self._marker)
else:
if self._plot_mean:
label = 'mean'
elif self._plot_std:
label = 'std'
ax.plot(
idxs, data[:, col, 0], color=_plot_color, label=label,
marker=self._marker)
row += 1
if self._plot_percentile:
ax = axes[row, col]
for i in six.moves.range(n_percentile_mid_floor + 1):
if n_percentile_odd and i == n_percentile_mid_floor:
# Enters at most once per sub-plot, in case there is
# only a single percentile to plot or when this
# percentile is the mid percentile and the number of
# percentiles are odd
ax.plot(
idxs, data[:, col, offset + i], color=_plot_color,
label='percentile', marker=self._marker)
else:
if i == n_percentile_mid_floor:
# Last percentiles and the number of all
# percentiles are even
label = 'percentile'
else:
label = '_nolegend_'
ax.fill_between(
idxs,
data[:, col, offset + i],
data[:, col, -i - 1],
label=label,
**_plot_common_kwargs)
ax.set_xlabel('iteration')
for ax in axes.ravel():
ax.legend()
if self._grid:
ax.grid()
ax.set_axisbelow(True)
fig.savefig(file_path)
plt.close()
| 36.817935 | 79 | 0.569489 | from __future__ import division
import os
import warnings
import numpy
import six
import chainer
from chainer import backend
from chainer.backends import cuda
from chainer.training import extension
from chainer.training import trigger as trigger_module
from chainer.utils import argument
_available = None
def _try_import_matplotlib():
global matplotlib, _available
global _plot_color, _plot_color_trans, _plot_common_kwargs
try:
import matplotlib
_available = True
except ImportError:
_available = False
if _available:
if hasattr(matplotlib.colors, 'to_rgba'):
_to_rgba = matplotlib.colors.to_rgba
else:
_to_rgba = matplotlib.colors.ColorConverter().to_rgba
_plot_color = _to_rgba('#1f77b4')
_plot_color_trans = _plot_color[:3] + (0.2,)
_plot_common_kwargs = {
'alpha': 0.2, 'linewidth': 0, 'color': _plot_color_trans}
def _check_available():
if _available is None:
_try_import_matplotlib()
if not _available:
warnings.warn('matplotlib is not installed on your environment, '
'so nothing will be plotted at this time. '
'Please install matplotlib to plot figures.\n\n'
' $ pip install matplotlib\n')
def _unpack_variables(x, memo=None):
if memo is None:
memo = ()
if isinstance(x, chainer.Variable):
memo += (x,)
elif isinstance(x, chainer.Link):
memo += tuple(x.params(include_uninit=True))
elif isinstance(x, (list, tuple)):
for xi in x:
memo += _unpack_variables(xi)
return memo
class Reservoir(object):
def __init__(self, size, data_shape, dtype=numpy.float32):
self.size = size
self.data = numpy.zeros((size,) + data_shape, dtype=dtype)
self.idxs = numpy.zeros((size,), dtype=numpy.int32)
self.counter = 0
def add(self, x, idx=None):
if self.counter < self.size:
self.data[self.counter] = x
self.idxs[self.counter] = idx or self.counter
elif self.counter >= self.size and \
numpy.random.random() < self.size / float(self.counter + 1):
i = numpy.random.randint(self.size)
self.data[i] = x
self.idxs[i] = idx or self.counter
self.counter += 1
def get_data(self):
idxs = self.idxs[:min(self.counter, self.size)]
sorted_args = numpy.argsort(idxs)
return idxs[sorted_args], self.data[sorted_args]
class Statistician(object):
def __init__(self, collect_mean, collect_std, percentile_sigmas):
self.collect_mean = collect_mean
self.collect_std = collect_std
self.percentile_sigmas = percentile_sigmas
def __call__(self, x, axis=0, dtype=None, xp=None):
if axis is None:
axis = tuple(range(x.ndim))
elif not isinstance(axis, (tuple, list)):
axis = axis,
return self.collect(x, axis)
def collect(self, x, axis):
out = dict()
if self.collect_mean:
out['mean'] = x.mean(axis=axis)
if self.collect_std:
out['std'] = x.std(axis=axis)
if self.percentile_sigmas:
xp = cuda.get_array_module(x)
p = xp.percentile(x, self.percentile_sigmas, axis=axis)
out['percentile'] = p
return out
class VariableStatisticsPlot(extension.Extension):
def __init__(self, targets, max_sample_size=1000,
report_data=True, report_grad=True,
plot_mean=True, plot_std=True,
percentile_sigmas=(
0, 0.13, 2.28, 15.87, 50, 84.13, 97.72, 99.87, 100),
trigger=(1, 'epoch'), filename=None,
figsize=None, marker=None, grid=True, **kwargs):
file_name, = argument.parse_kwargs(
kwargs, ('file_name', 'statistics.png')
)
if filename is None:
filename = file_name
del file_name
self._vars = _unpack_variables(targets)
if not self._vars:
raise ValueError(
'Need at least one variables for which to collect statistics.'
'\nActual: 0 <= 0')
if not any((plot_mean, plot_std, bool(percentile_sigmas))):
raise ValueError('Nothing to plot')
self._keys = []
if report_data:
self._keys.append('data')
if report_grad:
self._keys.append('grad')
self._report_data = report_data
self._report_grad = report_grad
self._statistician = Statistician(
collect_mean=plot_mean, collect_std=plot_std,
percentile_sigmas=percentile_sigmas)
self._plot_mean = plot_mean
self._plot_std = plot_std
self._plot_percentile = bool(percentile_sigmas)
self._trigger = trigger_module.get_trigger(trigger)
self._filename = filename
self._figsize = figsize
self._marker = marker
self._grid = grid
if not self._plot_percentile:
n_percentile = 0
else:
if not isinstance(percentile_sigmas, (list, tuple)):
n_percentile = 1
else:
n_percentile = len(percentile_sigmas)
self._data_shape = (
len(self._keys), int(plot_mean) + int(plot_std) + n_percentile)
self._samples = Reservoir(max_sample_size, data_shape=self._data_shape)
@staticmethod
def available():
_check_available()
return _available
def __call__(self, trainer):
if self.available():
import matplotlib.pyplot as plt
else:
return
xp = backend.get_array_module(self._vars[0].data)
stats = xp.zeros(self._data_shape, dtype=xp.float32)
for i, k in enumerate(self._keys):
xs = []
for var in self._vars:
x = getattr(var, k, None)
if x is not None:
xs.append(x.ravel())
if xs:
stat_dict = self._statistician(
xp.concatenate(xs, axis=0), axis=0, xp=xp)
stat_list = []
if self._plot_mean:
stat_list.append(xp.atleast_1d(stat_dict['mean']))
if self._plot_std:
stat_list.append(xp.atleast_1d(stat_dict['std']))
if self._plot_percentile:
stat_list.append(xp.atleast_1d(stat_dict['percentile']))
stats[i] = xp.concatenate(stat_list, axis=0)
if xp == cuda.cupy:
stats = cuda.to_cpu(stats)
self._samples.add(stats, idx=trainer.updater.iteration)
if self._trigger(trainer):
file_path = os.path.join(trainer.out, self._filename)
self.save_plot_using_module(file_path, plt)
def save_plot_using_module(self, file_path, plt):
nrows = int(self._plot_mean or self._plot_std) \
+ int(self._plot_percentile)
ncols = len(self._keys)
fig, axes = plt.subplots(
nrows, ncols, figsize=self._figsize, sharex=True)
if not isinstance(axes, numpy.ndarray):
axes = numpy.asarray([axes])
if nrows == 1:
axes = axes[None, :]
elif ncols == 1:
axes = axes[:, None]
assert axes.ndim == 2
idxs, data = self._samples.get_data()
offset = int(self._plot_mean) + int(self._plot_std)
n_percentile = data.shape[-1] - offset
n_percentile_mid_floor = n_percentile // 2
n_percentile_odd = n_percentile % 2 == 1
for col in six.moves.range(ncols):
row = 0
ax = axes[row, col]
ax.set_title(self._keys[col])
if self._plot_mean or self._plot_std:
if self._plot_mean and self._plot_std:
ax.errorbar(
idxs, data[:, col, 0], data[:, col, 1],
color=_plot_color, ecolor=_plot_color_trans,
label='mean, std', marker=self._marker)
else:
if self._plot_mean:
label = 'mean'
elif self._plot_std:
label = 'std'
ax.plot(
idxs, data[:, col, 0], color=_plot_color, label=label,
marker=self._marker)
row += 1
if self._plot_percentile:
ax = axes[row, col]
for i in six.moves.range(n_percentile_mid_floor + 1):
if n_percentile_odd and i == n_percentile_mid_floor:
ax.plot(
idxs, data[:, col, offset + i], color=_plot_color,
label='percentile', marker=self._marker)
else:
if i == n_percentile_mid_floor:
label = 'percentile'
else:
label = '_nolegend_'
ax.fill_between(
idxs,
data[:, col, offset + i],
data[:, col, -i - 1],
label=label,
**_plot_common_kwargs)
ax.set_xlabel('iteration')
for ax in axes.ravel():
ax.legend()
if self._grid:
ax.grid()
ax.set_axisbelow(True)
fig.savefig(file_path)
plt.close()
| true | true |
f71f62b04af3fdacd6538bdd099ff2935e8e0a14 | 2,893 | py | Python | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 2,141 | 2020-02-14T02:34:34.000Z | 2022-03-31T22:43:20.000Z | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 660 | 2020-02-06T16:15:57.000Z | 2022-03-31T22:55:01.000Z | tests/test_param_grid.py | MarcoJHB/ploomber | 4849ef6915572f7934392443b4faf138172b9596 | [
"Apache-2.0"
] | 122 | 2020-02-14T18:53:05.000Z | 2022-03-27T22:33:24.000Z | import datetime
from dateutil.relativedelta import relativedelta
import pytest
from ploomber.util import ParamGrid, Interval
def compare(a, b):
for element in a:
if element not in b:
return False
return len(a) == len(b)
def test_interval():
interval = Interval(datetime.date(year=2010, month=1, day=1),
datetime.date(year=2012, month=1, day=1),
relativedelta(years=1))
expanded = interval.expand()
repr_ = ('Interval from 2010-01-01 to 2012-01-01 with '
'delta relativedelta(years=+1)')
expected = [(datetime.date(2010, 1, 1), datetime.date(2011, 1, 1)),
(datetime.date(2011, 1, 1), datetime.date(2012, 1, 1))]
assert expanded == expected
assert repr(interval) == repr_
def test_param_grid():
pg = ParamGrid({'a': [1, 2, 3], 'b': [2, 4, 6]})
assert compare(list(pg.zip()), [{
'a': 1,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 3,
'b': 6
}])
assert compare(list(pg.product()), [{
'a': 1,
'b': 2
}, {
'a': 1,
'b': 4
}, {
'a': 1,
'b': 6
}, {
'a': 2,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 2,
'b': 6
}, {
'a': 3,
'b': 2
}, {
'a': 3,
'b': 4
}, {
'a': 3,
'b': 6
}])
def test_param_grid_w_interval():
pg = ParamGrid({'a': Interval(0, 10, 2), 'b': [2, 4, 6, 8, 10]})
assert compare(list(pg.zip()), [{
'a': (0, 2),
'b': 2
}, {
'a': (2, 4),
'b': 4
}, {
'a': (4, 6),
'b': 6
}, {
'a': (6, 8),
'b': 8
}, {
'a': (8, 10),
'b': 10
}])
def test_param_grid_list():
first = {'a': [1, 2], 'b': [1, 2]}
second = {'c': [3, 4], 'd': [3, 4]}
pg = ParamGrid([first, second])
assert list(pg.product()) == [{
'a': 1,
'b': 1
}, {
'a': 1,
'b': 2
}, {
'a': 2,
'b': 1
}, {
'a': 2,
'b': 2
}, {
'c': 3,
'd': 3
}, {
'c': 3,
'd': 4
}, {
'c': 4,
'd': 3
}, {
'c': 4,
'd': 4
}]
def test_param_grid_with_str_list():
pg = ParamGrid({
'a': ['one', 'another'],
'b': ['more', 'final'],
})
assert len(list(pg.product())) == 4
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_product_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more', 'final']})
assert len(list(pg.product())) == 2
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_zip_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more']})
assert len(list(pg.zip())) == 1
| 19.15894 | 71 | 0.407881 | import datetime
from dateutil.relativedelta import relativedelta
import pytest
from ploomber.util import ParamGrid, Interval
def compare(a, b):
for element in a:
if element not in b:
return False
return len(a) == len(b)
def test_interval():
interval = Interval(datetime.date(year=2010, month=1, day=1),
datetime.date(year=2012, month=1, day=1),
relativedelta(years=1))
expanded = interval.expand()
repr_ = ('Interval from 2010-01-01 to 2012-01-01 with '
'delta relativedelta(years=+1)')
expected = [(datetime.date(2010, 1, 1), datetime.date(2011, 1, 1)),
(datetime.date(2011, 1, 1), datetime.date(2012, 1, 1))]
assert expanded == expected
assert repr(interval) == repr_
def test_param_grid():
pg = ParamGrid({'a': [1, 2, 3], 'b': [2, 4, 6]})
assert compare(list(pg.zip()), [{
'a': 1,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 3,
'b': 6
}])
assert compare(list(pg.product()), [{
'a': 1,
'b': 2
}, {
'a': 1,
'b': 4
}, {
'a': 1,
'b': 6
}, {
'a': 2,
'b': 2
}, {
'a': 2,
'b': 4
}, {
'a': 2,
'b': 6
}, {
'a': 3,
'b': 2
}, {
'a': 3,
'b': 4
}, {
'a': 3,
'b': 6
}])
def test_param_grid_w_interval():
pg = ParamGrid({'a': Interval(0, 10, 2), 'b': [2, 4, 6, 8, 10]})
assert compare(list(pg.zip()), [{
'a': (0, 2),
'b': 2
}, {
'a': (2, 4),
'b': 4
}, {
'a': (4, 6),
'b': 6
}, {
'a': (6, 8),
'b': 8
}, {
'a': (8, 10),
'b': 10
}])
def test_param_grid_list():
first = {'a': [1, 2], 'b': [1, 2]}
second = {'c': [3, 4], 'd': [3, 4]}
pg = ParamGrid([first, second])
assert list(pg.product()) == [{
'a': 1,
'b': 1
}, {
'a': 1,
'b': 2
}, {
'a': 2,
'b': 1
}, {
'a': 2,
'b': 2
}, {
'c': 3,
'd': 3
}, {
'c': 3,
'd': 4
}, {
'c': 4,
'd': 3
}, {
'c': 4,
'd': 4
}]
def test_param_grid_with_str_list():
pg = ParamGrid({
'a': ['one', 'another'],
'b': ['more', 'final'],
})
assert len(list(pg.product())) == 4
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_product_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more', 'final']})
assert len(list(pg.product())) == 2
@pytest.mark.parametrize('val', [
'one',
1,
1.1,
])
def test_param_grid_zip_with_single_value(val):
pg = ParamGrid({'a': val, 'b': ['more']})
assert len(list(pg.zip())) == 1
| true | true |
f71f63419874a18aec03723ca69a1e11494c93fe | 27 | py | Python | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | btd6_memory_info/generated/NinjaKiwi/LiNK/Lobbies/LatencyMeasurements/StatsExtensions/stats_extensions.py | 56kyle/bloons_auto | 419d55b51d1cddc49099593970adf1c67985b389 | [
"MIT"
] | null | null | null | class StatsExtensions: pass | 27 | 27 | 0.888889 | class StatsExtensions: pass | true | true |
f71f638a703961e5577fb4b19745f2c50b5b4f2c | 478 | py | Python | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | Django/SOC2/MyChat/chat/routing.py | JanStoltman/100DaysOfCode | 1d18b76ed1e3e942e8392006a5d4bfb41484d047 | [
"MIT"
] | null | null | null | from channels import route
from .consumers import ws_connect, ws_receive, ws_disconnect, chat_join, chat_leave, chat_send
websocket_routing = [
route("websocket.connect", ws_connect),
route("websocket.receive", ws_receive),
route("websocket.disconnect", ws_disconnect),
]
custom_routing = [
route("chat.receive", chat_join, command="^join$"),
route("chat.receive", chat_leave, command="^leave$"),
route("chat.receive", chat_send, command="^send$"),
] | 29.875 | 94 | 0.717573 | from channels import route
from .consumers import ws_connect, ws_receive, ws_disconnect, chat_join, chat_leave, chat_send
websocket_routing = [
route("websocket.connect", ws_connect),
route("websocket.receive", ws_receive),
route("websocket.disconnect", ws_disconnect),
]
custom_routing = [
route("chat.receive", chat_join, command="^join$"),
route("chat.receive", chat_leave, command="^leave$"),
route("chat.receive", chat_send, command="^send$"),
] | true | true |
f71f65dc650c5a613143b036baee3ed96b5449c9 | 5,089 | py | Python | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | jinahub/encoders/audio/VGGISHAudioEncoder/vggish_audio_encoder.py | Gikiman/executors | 98658b4136859164390cfccbde8cf0f7cf843593 | [
"Apache-2.0"
] | null | null | null | __copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from pathlib import Path
from typing import Any, Optional, List, Iterable
from jina import Executor, requests, DocumentArray
from jina.logging.logger import JinaLogger
import requests as _requests
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from .vggish.vggish_postprocess import *
from .vggish.vggish_slim import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
class VggishAudioEncoder(Executor):
"""
Encode audio data with Vggish embeddings
:param model_path: path of the models directory
:param default_traversal_paths: fallback batch size in case there is not batch size sent in the request
"""
def __init__(self,
model_path: str = Path(cur_dir) / 'models',
default_traversal_paths: Optional[Iterable[str]] = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.default_traversal_paths = default_traversal_paths or ['r']
self.logger = JinaLogger(self.__class__.__name__)
self.model_path = Path(model_path)
self.vgg_model_path = self.model_path / 'vggish_model.ckpt'
self.pca_model_path = self.model_path / 'vggish_pca_params.ckpt'
self.model_path.mkdir(exist_ok=True) # Create the model directory if it does not exist yet
if not self.vgg_model_path.exists():
self.logger.info('VGGish model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_model.ckpt')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download vggish model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download vggish model')
raise
with open(self.vgg_model_path, 'wb') as f:
f.write(r.content)
if not self.pca_model_path.exists():
self.logger.info('PCA model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_pca_params.npz')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download pca model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download pca model')
raise
with open(self.pca_model_path, 'wb') as f:
f.write(r.content)
self.sess = tf.compat.v1.Session()
define_vggish_slim()
load_vggish_slim_checkpoint(self.sess, str(self.vgg_model_path))
self.feature_tensor = self.sess.graph.get_tensor_by_name(
INPUT_TENSOR_NAME)
self.embedding_tensor = self.sess.graph.get_tensor_by_name(
OUTPUT_TENSOR_NAME)
self.post_processor = Postprocessor(str(self.pca_model_path))
@requests
def encode(self, docs: Optional[DocumentArray], parameters: dict, **kwargs):
"""
Compute embeddings and store them in the `docs` array.
:param docs: documents sent to the encoder. The docs must have `text`.
By default, the input `text` must be a `list` of `str`.
:param parameters: dictionary to define the `traversal_paths` and the `batch_size`. For example,
`parameters={'traversal_paths': ['r'], 'batch_size': 10}`.
:param kwargs: Additional key value arguments.
:return:
"""
if docs:
cleaned_document_array = self._get_input_data(docs, parameters)
self._create_embeddings(cleaned_document_array)
def _get_input_data(self, docs: DocumentArray, parameters: dict):
"""Create a filtered set of Documents to iterate over."""
traversal_paths = parameters.get('traversal_paths', self.default_traversal_paths)
# traverse thought all documents which have to be processed
flat_docs = docs.traverse_flat(traversal_paths)
# filter out documents without images
filtered_docs = DocumentArray([doc for doc in flat_docs if doc.blob is not None])
return filtered_docs
def _create_embeddings(self, filtered_docs: Iterable):
"""Update the documents with the embeddings generated by VGGISH"""
for d in filtered_docs:
# Vggish broadcasts across different length audios, not batches
[embedding] = self.sess.run([self.embedding_tensor], feed_dict={self.feature_tensor: d.blob})
result = self.post_processor.postprocess(embedding)
d.embedding = np.mean((np.float32(result) - 128.) / 128., axis=0)
def close(self):
self.sess.close()
| 41.373984 | 112 | 0.659658 | __copyright__ = "Copyright (c) 2021 Jina AI Limited. All rights reserved."
__license__ = "Apache-2.0"
import os
from pathlib import Path
from typing import Any, Optional, List, Iterable
from jina import Executor, requests, DocumentArray
from jina.logging.logger import JinaLogger
import requests as _requests
import tensorflow as tf
tf.compat.v1.disable_eager_execution()
from .vggish.vggish_postprocess import *
from .vggish.vggish_slim import *
cur_dir = os.path.dirname(os.path.abspath(__file__))
class VggishAudioEncoder(Executor):
def __init__(self,
model_path: str = Path(cur_dir) / 'models',
default_traversal_paths: Optional[Iterable[str]] = None,
*args, **kwargs):
super().__init__(*args, **kwargs)
self.default_traversal_paths = default_traversal_paths or ['r']
self.logger = JinaLogger(self.__class__.__name__)
self.model_path = Path(model_path)
self.vgg_model_path = self.model_path / 'vggish_model.ckpt'
self.pca_model_path = self.model_path / 'vggish_pca_params.ckpt'
self.model_path.mkdir(exist_ok=True)
if not self.vgg_model_path.exists():
self.logger.info('VGGish model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_model.ckpt')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download vggish model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download vggish model')
raise
with open(self.vgg_model_path, 'wb') as f:
f.write(r.content)
if not self.pca_model_path.exists():
self.logger.info('PCA model cannot be found from the given model path, downloading a new one...')
try:
r = _requests.get('https://storage.googleapis.com/audioset/vggish_pca_params.npz')
r.raise_for_status()
except _requests.exceptions.HTTPError:
self.logger.error('received HTTP error response, cannot download pca model')
raise
except _requests.exceptions.RequestException:
self.logger.error('Connection error, cannot download pca model')
raise
with open(self.pca_model_path, 'wb') as f:
f.write(r.content)
self.sess = tf.compat.v1.Session()
define_vggish_slim()
load_vggish_slim_checkpoint(self.sess, str(self.vgg_model_path))
self.feature_tensor = self.sess.graph.get_tensor_by_name(
INPUT_TENSOR_NAME)
self.embedding_tensor = self.sess.graph.get_tensor_by_name(
OUTPUT_TENSOR_NAME)
self.post_processor = Postprocessor(str(self.pca_model_path))
@requests
def encode(self, docs: Optional[DocumentArray], parameters: dict, **kwargs):
if docs:
cleaned_document_array = self._get_input_data(docs, parameters)
self._create_embeddings(cleaned_document_array)
def _get_input_data(self, docs: DocumentArray, parameters: dict):
traversal_paths = parameters.get('traversal_paths', self.default_traversal_paths)
flat_docs = docs.traverse_flat(traversal_paths)
filtered_docs = DocumentArray([doc for doc in flat_docs if doc.blob is not None])
return filtered_docs
def _create_embeddings(self, filtered_docs: Iterable):
for d in filtered_docs:
[embedding] = self.sess.run([self.embedding_tensor], feed_dict={self.feature_tensor: d.blob})
result = self.post_processor.postprocess(embedding)
d.embedding = np.mean((np.float32(result) - 128.) / 128., axis=0)
def close(self):
self.sess.close()
| true | true |
f71f66195317baeeed07698a274b4377fafe07c5 | 1,436 | py | Python | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/domain/AlipayOpenPublicSinglearticleDataBatchqueryModel.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenPublicSinglearticleDataBatchqueryModel(object):
def __init__(self):
self._begin_date = None
self._end_date = None
@property
def begin_date(self):
return self._begin_date
@begin_date.setter
def begin_date(self, value):
self._begin_date = value
@property
def end_date(self):
return self._end_date
@end_date.setter
def end_date(self, value):
self._end_date = value
def to_alipay_dict(self):
params = dict()
if self.begin_date:
if hasattr(self.begin_date, 'to_alipay_dict'):
params['begin_date'] = self.begin_date.to_alipay_dict()
else:
params['begin_date'] = self.begin_date
if self.end_date:
if hasattr(self.end_date, 'to_alipay_dict'):
params['end_date'] = self.end_date.to_alipay_dict()
else:
params['end_date'] = self.end_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenPublicSinglearticleDataBatchqueryModel()
if 'begin_date' in d:
o.begin_date = d['begin_date']
if 'end_date' in d:
o.end_date = d['end_date']
return o
| 25.642857 | 71 | 0.601671 |
import simplejson as json
from alipay.aop.api.constant.ParamConstants import *
class AlipayOpenPublicSinglearticleDataBatchqueryModel(object):
def __init__(self):
self._begin_date = None
self._end_date = None
@property
def begin_date(self):
return self._begin_date
@begin_date.setter
def begin_date(self, value):
self._begin_date = value
@property
def end_date(self):
return self._end_date
@end_date.setter
def end_date(self, value):
self._end_date = value
def to_alipay_dict(self):
params = dict()
if self.begin_date:
if hasattr(self.begin_date, 'to_alipay_dict'):
params['begin_date'] = self.begin_date.to_alipay_dict()
else:
params['begin_date'] = self.begin_date
if self.end_date:
if hasattr(self.end_date, 'to_alipay_dict'):
params['end_date'] = self.end_date.to_alipay_dict()
else:
params['end_date'] = self.end_date
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayOpenPublicSinglearticleDataBatchqueryModel()
if 'begin_date' in d:
o.begin_date = d['begin_date']
if 'end_date' in d:
o.end_date = d['end_date']
return o
| true | true |
f71f668d6fbe52a3a43d82cbb88b941356fc85b3 | 2,366 | py | Python | actstream/runtests/manage.py | inspiration4hunter/django-actstream | 7d655b3bf239c85a6ac804ff72e748214b81bb8e | [
"BSD-3-Clause"
] | 1 | 2019-06-27T13:04:59.000Z | 2019-06-27T13:04:59.000Z | actstream/runtests/manage.py | techdragon/django-activity-stream | d5b18470c8682cec3e3db4cfaf8920c3dd33f6bb | [
"BSD-3-Clause"
] | null | null | null | actstream/runtests/manage.py | techdragon/django-activity-stream | d5b18470c8682cec3e3db4cfaf8920c3dd33f6bb | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# http://ericholscher.com/blog/2009/jun/29/enable-setuppy-test-your-django-apps/
# http://www.travisswicegood.com/2010/01/17/django-virtualenv-pip-and-fabric/
# http://code.djangoproject.com/svn/django/trunk/tests/runtests.py
# https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/runtests/runtests.py
import os
import sys
import warnings
warnings.filterwarnings("ignore")
# fix sys path so we don't need to setup PYTHONPATH
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
os.environ['DJANGO_SETTINGS_MODULE'] = 'actstream.runtests.settings'
engine = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
if engine.startswith('mysql'):
engine = 'django.db.backends.mysql'
elif engine.startswith('postgre'):
engine = 'django.db.backends.postgresql_psycopg2'
else:
engine = 'django.db.backends.sqlite3'
try:
import django
except SyntaxError:
sys.stderr.write('Unable to import django (older python version)\n')
exit(0)
PYPY = hasattr(sys, 'pypy_version_info')
version = sys.version_info[:2]
PY3 = version[0] == 3
if PYPY and engine.endswith('psycopg2') and bytes != str:
sys.stderr.write('PyPy3 does not have a psycopg implementation\n')
exit(0)
if PY3 and django.VERSION[:2] >= (1, 9) and version <= (3, 3):
sys.stderr.write('Django>=1.9 does not support Python<=3.3\n')
exit(0)
if PY3 and django.VERSION[:2] <= (1, 8) and version >= (3, 5):
sys.stderr.write('Django<=1.8 does not support Python>=3.5\n')
exit(0)
if PY3 and django.VERSION[:2] == (1, 8) and version <= (3, 3):
sys.stderr.write('Django 1.8 does not support Python<=3.3\n')
exit(0)
if django.VERSION[:2] <= (1, 4) and PY3:
sys.stderr.write('Django<=1.4 does not support Python3\n')
exit(0)
if version == (2, 6) and django.VERSION[:2] >= (1, 7):
sys.stderr.write('Django>=1.7 does not support Python2.6\n')
exit(0)
os.environ['DATABASE_ENGINE'] = engine
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
try:
django.setup()
except AttributeError:
pass
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 30.333333 | 102 | 0.703719 |
import os
import sys
import warnings
warnings.filterwarnings("ignore")
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")))
os.environ['DJANGO_SETTINGS_MODULE'] = 'actstream.runtests.settings'
engine = os.environ.get('DATABASE_ENGINE', 'django.db.backends.sqlite3')
if engine.startswith('mysql'):
engine = 'django.db.backends.mysql'
elif engine.startswith('postgre'):
engine = 'django.db.backends.postgresql_psycopg2'
else:
engine = 'django.db.backends.sqlite3'
try:
import django
except SyntaxError:
sys.stderr.write('Unable to import django (older python version)\n')
exit(0)
PYPY = hasattr(sys, 'pypy_version_info')
version = sys.version_info[:2]
PY3 = version[0] == 3
if PYPY and engine.endswith('psycopg2') and bytes != str:
sys.stderr.write('PyPy3 does not have a psycopg implementation\n')
exit(0)
if PY3 and django.VERSION[:2] >= (1, 9) and version <= (3, 3):
sys.stderr.write('Django>=1.9 does not support Python<=3.3\n')
exit(0)
if PY3 and django.VERSION[:2] <= (1, 8) and version >= (3, 5):
sys.stderr.write('Django<=1.8 does not support Python>=3.5\n')
exit(0)
if PY3 and django.VERSION[:2] == (1, 8) and version <= (3, 3):
sys.stderr.write('Django 1.8 does not support Python<=3.3\n')
exit(0)
if django.VERSION[:2] <= (1, 4) and PY3:
sys.stderr.write('Django<=1.4 does not support Python3\n')
exit(0)
if version == (2, 6) and django.VERSION[:2] >= (1, 7):
sys.stderr.write('Django>=1.7 does not support Python2.6\n')
exit(0)
os.environ['DATABASE_ENGINE'] = engine
try:
from psycopg2cffi import compat
compat.register()
except ImportError:
pass
try:
import pymysql
pymysql.install_as_MySQLdb()
except ImportError:
pass
try:
django.setup()
except AttributeError:
pass
if __name__ == '__main__':
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true | true |
f71f669e87f2e8ea5e0d8ed4ce44947d705ba9d6 | 572 | py | Python | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | Python/Factorization Of Numbers/PairFactorization.py | DeWill404/Data-Structure-and-Algorithm | c61d245c920edff747e87dc7c2ea139561766a3a | [
"MIT"
] | null | null | null | # function to generate list of factors
def get_factorList(n):
# Insert 1 & n in list, if is n == 1 then only add 1
l = list(set([1,n]))
# Iterate to sq.rt. of n to get all factors
for i in range(2, int(n**0.5)+1):
if n%i == 0:
# If i & n/i aree same, then append only one
if i == n//i:
l.append(i)
# else append pair
else:
l.extend([i,n//i])
return l
if __name__ == "__main__":
# List of input no's
list_of_numbers = [23, 46, 65, 34234, 423, 43212]
# Get factor list of given no.
for num in list_of_numbers:
print(get_factorList(num)) | 22.88 | 53 | 0.624126 |
def get_factorList(n):
l = list(set([1,n]))
for i in range(2, int(n**0.5)+1):
if n%i == 0:
if i == n//i:
l.append(i)
else:
l.extend([i,n//i])
return l
if __name__ == "__main__":
list_of_numbers = [23, 46, 65, 34234, 423, 43212]
# Get factor list of given no.
for num in list_of_numbers:
print(get_factorList(num)) | true | true |
f71f684248dd8ea778131509570d6005305ece61 | 3,684 | py | Python | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | uwb_channel.py | iguarna/uwb-ieee | 782813b8a6fc9effeb076c47cd5d497b6e62b330 | [
"MIT"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
def gen_channel(parameters, fc=5E9, fs=2E9, dynamic_range=30):
# Calculate samples/nanosec ratio
nanosec_to_samples = int(1E-9 * fs)
#####################################
# Unpack parameters and convert units
cluster_rate = parameters['cluster_rate'] / nanosec_to_samples
inter_cluster_rate_1 = parameters['inter_cluster_rate_1'] / nanosec_to_samples
inter_cluster_rate_2 = parameters['inter_cluster_rate_2'] / nanosec_to_samples
beta = parameters['beta']
cluster_decay = parameters['cluster_decay'] * nanosec_to_samples
inter_cluster_decay = parameters['inter_cluster_decay'] * nanosec_to_samples
mean_m = parameters['mean_m']
std_m = parameters['std_m']
std_cluster_shadowing = parameters['std_cluster_shadowing']
kf = parameters['kf']
#########################
# Obtain impulse response
if inter_cluster_decay > cluster_decay:
raise ValueError("Inter cluster decay cannot be larger than cluster decay.")
max_t = int(dynamic_range * cluster_decay * np.log(10) / 10)
h = np.zeros(max_t, dtype=complex)
t = 0
while t < max_t:
tau = 0
max_tau = int((max_t - t) * inter_cluster_decay / cluster_decay)
cluster_power = np.exp(-t / cluster_decay) * np.random.lognormal(mean=0, sigma=std_cluster_shadowing)
while tau < max_tau:
# Mean power for this ray
mean_power = cluster_power * np.exp(-tau / inter_cluster_decay)
# Nakagami m-factor is log normally distributed
m = np.random.lognormal(mean_m, std_m)
# Compute amplitude as Nakagami distributed
a = np.sqrt(np.random.gamma(shape=m, scale=mean_power / m))
# Compute phase as uniformly distributed
phi = np.random.uniform(0, 2 * np.pi)
h[t + tau] = np.array([a * np.exp(-1j * phi)])[0]
if np.random.uniform(0, 1) < beta:
inter_cluster_rate = inter_cluster_rate_1
else:
inter_cluster_rate = inter_cluster_rate_2
tau += round(np.random.exponential(1 / inter_cluster_rate))
t += round(np.random.exponential(1 / cluster_rate))
##########################
# Add frequency dependency
# Zero padding before FFT to avoid artifacts
h = np.append(h, np.zeros(h.size, dtype=complex))
H = np.fft.fft(h, norm='ortho')
# Get frequency array in the same order as produced by the FFT
freq = np.linspace(fc - fs / 2, fc + fs / 2, num=h.size)
freq = np.append(freq[freq.size // 2:], freq[:freq.size // 2])
# Calculate frequency dependency and apply
Gf = np.power(freq, -2 * kf)
H = np.multiply(Gf, H)
# Inverse FFT
h = np.fft.ifft(H, norm='ortho')
# Remove padding
h = h[:h.size // 2]
###############
# Normalization
h = normalize(h)
return h
def normalize(s):
return s / np.sqrt(energy(s))
def energy(s):
return np.sum(np.square(np.abs(s)))
if __name__ == '__main__':
parameters_cm1 = {
'cluster_rate': 0.047,
'inter_cluster_rate_1': 1.54,
'inter_cluster_rate_2': 0.15,
'beta': 0.095,
'cluster_decay': 22.61,
'inter_cluster_decay': 12.53,
'mean_m': 0.67,
'std_m': 0.28,
'std_cluster_shadowing': 2.75,
'kf': 1.12,
'kd': 1.79,
'std_path_shadowing': 2.22
}
h = gen_channel(parameters=parameters_cm1,
fc=(10.6E9 + 3.1E9) / 2,
fs=6E9,
dynamic_range=30)
plt.plot(np.abs(h))
plt.show()
| 28.55814 | 109 | 0.59392 | import numpy as np
import matplotlib.pyplot as plt
def gen_channel(parameters, fc=5E9, fs=2E9, dynamic_range=30):
nanosec_to_samples = int(1E-9 * fs)
er = np.exp(-t / cluster_decay) * np.random.lognormal(mean=0, sigma=std_cluster_shadowing)
while tau < max_tau:
mean_power = cluster_power * np.exp(-tau / inter_cluster_decay)
m = np.random.lognormal(mean_m, std_m)
a = np.sqrt(np.random.gamma(shape=m, scale=mean_power / m))
phi = np.random.uniform(0, 2 * np.pi)
h[t + tau] = np.array([a * np.exp(-1j * phi)])[0]
if np.random.uniform(0, 1) < beta:
inter_cluster_rate = inter_cluster_rate_1
else:
inter_cluster_rate = inter_cluster_rate_2
tau += round(np.random.exponential(1 / inter_cluster_rate))
t += round(np.random.exponential(1 / cluster_rate))
fft(H, norm='ortho')
h = h[:h.size // 2]
rgy(s):
return np.sum(np.square(np.abs(s)))
if __name__ == '__main__':
parameters_cm1 = {
'cluster_rate': 0.047,
'inter_cluster_rate_1': 1.54,
'inter_cluster_rate_2': 0.15,
'beta': 0.095,
'cluster_decay': 22.61,
'inter_cluster_decay': 12.53,
'mean_m': 0.67,
'std_m': 0.28,
'std_cluster_shadowing': 2.75,
'kf': 1.12,
'kd': 1.79,
'std_path_shadowing': 2.22
}
h = gen_channel(parameters=parameters_cm1,
fc=(10.6E9 + 3.1E9) / 2,
fs=6E9,
dynamic_range=30)
plt.plot(np.abs(h))
plt.show()
| true | true |
f71f689447e4c38f173ed630b270c2889bd40d14 | 3,032 | py | Python | tottle/exception_factory/error_handler/error_handler.py | muffleo/tottle | 69a5bdda879ab56d43505d517d3369a687c135a2 | [
"MIT"
] | 12 | 2020-09-06T15:31:34.000Z | 2021-02-27T20:30:34.000Z | tottle/exception_factory/error_handler/error_handler.py | cyanlabs-org/tottle | 6cf02022ed7b445c9b5af475c6e854b91780d792 | [
"MIT"
] | 2 | 2021-04-13T06:43:42.000Z | 2021-07-07T20:52:39.000Z | tottle/exception_factory/error_handler/error_handler.py | cyanlabs-org/tottle | 6cf02022ed7b445c9b5af475c6e854b91780d792 | [
"MIT"
] | 4 | 2020-09-12T03:09:25.000Z | 2021-03-22T08:52:04.000Z | import traceback
import typing
from tottle.exception_factory.error_handler.abc import ABCErrorHandler, ExceptionHandler
from tottle.modules import logger
class ErrorHandler(ABCErrorHandler):
def __init__(self, redirect_arguments: bool = False):
self.error_handlers: typing.Dict[str, ExceptionHandler] = {}
self.undefined_error_handler: typing.Optional[ExceptionHandler] = None
self.redirect_arguments = redirect_arguments
def register_error_handler(
self,
exception_type: typing.Type[BaseException],
exception_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if exception_handler:
self.error_handlers[exception_type.__name__] = exception_handler
return None
def decorator(func: ExceptionHandler):
self.error_handlers[exception_type.__name__] = func
return func
return decorator
def register_undefined_error_handler(
self,
undefined_error_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if undefined_error_handler:
self.undefined_error_handler = undefined_error_handler
return None
def decorator(func: ExceptionHandler):
self.undefined_error_handler = func
return func
return decorator
async def call_handler(
self, handler: ExceptionHandler, e: BaseException, *args, **kwargs
) -> typing.Awaitable[typing.Any]:
try:
if self.redirect_arguments:
return await handler(e, *args, **kwargs) # type: ignore
return await handler(e) # type: ignore
except TypeError:
pass
def wraps_error_handler(
self,
) -> typing.Callable[
[typing.Any],
typing.Callable[[typing.Any, typing.Any], typing.Awaitable[typing.Any]],
]:
def decorator(func: typing.Union[typing.NoReturn, typing.Any]):
async def wrapper(*args, **kwargs):
try:
return await func(*args, **kwargs)
except BaseException as e:
return await self.handle(e, *args, **kwargs)
return wrapper
return decorator
async def handle(self, e: BaseException, *args, **kwargs) -> typing.Any:
if e.__class__.__name__ in self.error_handlers:
return await self.call_handler(
self.error_handlers[e.__class__.__name__], e, *args, **kwargs
)
elif self.undefined_error_handler:
return await self.call_handler(
self.undefined_error_handler, e, *args, **kwargs
)
logger.error("\n" + traceback.format_exc())
@property
def handling_exceptions(
self,
) -> typing.Union[str, typing.Tuple[str, ...]]:
return tuple(k for k in self.error_handlers.keys())
| 34.067416 | 88 | 0.636544 | import traceback
import typing
from tottle.exception_factory.error_handler.abc import ABCErrorHandler, ExceptionHandler
from tottle.modules import logger
class ErrorHandler(ABCErrorHandler):
def __init__(self, redirect_arguments: bool = False):
self.error_handlers: typing.Dict[str, ExceptionHandler] = {}
self.undefined_error_handler: typing.Optional[ExceptionHandler] = None
self.redirect_arguments = redirect_arguments
def register_error_handler(
self,
exception_type: typing.Type[BaseException],
exception_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if exception_handler:
self.error_handlers[exception_type.__name__] = exception_handler
return None
def decorator(func: ExceptionHandler):
self.error_handlers[exception_type.__name__] = func
return func
return decorator
def register_undefined_error_handler(
self,
undefined_error_handler: typing.Optional[ExceptionHandler] = None,
) -> typing.Optional[typing.Callable[[ExceptionHandler], typing.Any]]:
if undefined_error_handler:
self.undefined_error_handler = undefined_error_handler
return None
def decorator(func: ExceptionHandler):
self.undefined_error_handler = func
return func
return decorator
async def call_handler(
self, handler: ExceptionHandler, e: BaseException, *args, **kwargs
) -> typing.Awaitable[typing.Any]:
try:
if self.redirect_arguments:
return await handler(e, *args, **kwargs)
return await handler(e)
except TypeError:
pass
def wraps_error_handler(
self,
) -> typing.Callable[
[typing.Any],
typing.Callable[[typing.Any, typing.Any], typing.Awaitable[typing.Any]],
]:
def decorator(func: typing.Union[typing.NoReturn, typing.Any]):
async def wrapper(*args, **kwargs):
try:
return await func(*args, **kwargs)
except BaseException as e:
return await self.handle(e, *args, **kwargs)
return wrapper
return decorator
async def handle(self, e: BaseException, *args, **kwargs) -> typing.Any:
if e.__class__.__name__ in self.error_handlers:
return await self.call_handler(
self.error_handlers[e.__class__.__name__], e, *args, **kwargs
)
elif self.undefined_error_handler:
return await self.call_handler(
self.undefined_error_handler, e, *args, **kwargs
)
logger.error("\n" + traceback.format_exc())
@property
def handling_exceptions(
self,
) -> typing.Union[str, typing.Tuple[str, ...]]:
return tuple(k for k in self.error_handlers.keys())
| true | true |
f71f68f22277399de37d076c657cde17a277ddbd | 70,087 | py | Python | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | androguard/core/analysis/analysis.py | appknox/old-androguard | 8b2fbc262f10f99016f4bbaaac51a963abdb90e4 | [
"Apache-2.0"
] | null | null | null | # This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import collections
from androguard.core.analysis.sign import Signature, TAINTED_PACKAGE_CREATE, \
TAINTED_PACKAGE_CALL
from androguard.core.androconf import debug, is_ascii_problem,\
load_api_specific_resource_module
from androguard.core.bytecodes import dvm
DVM_FIELDS_ACCESS = {
"iget": "R",
"iget-wide": "R",
"iget-object": "R",
"iget-boolean": "R",
"iget-byte": "R",
"iget-char": "R",
"iget-short": "R",
"iput": "W",
"iput-wide": "W",
"iput-object": "W",
"iput-boolean": "W",
"iput-byte": "W",
"iput-char": "W",
"iput-short": "W",
"sget": "R",
"sget-wide": "R",
"sget-object": "R",
"sget-boolean": "R",
"sget-byte": "R",
"sget-char": "R",
"sget-short": "R",
"sput": "W",
"sput-wide": "W",
"sput-object": "W",
"sput-boolean": "W",
"sput-byte": "W",
"sput-char": "W",
"sput-short": "W",
}
class ContextField(object):
def __init__(self, mode):
self.mode = mode
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ContextMethod(object):
def __init__(self):
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ExternalFM(object):
def __init__(self, class_name, name, descriptor):
self.class_name = class_name
self.name = name
self.descriptor = descriptor
def get_class_name(self):
return self.class_name
def get_name(self):
return self.name
def get_descriptor(self):
return self.descriptor
class ToString(object):
def __init__(self, tab):
self.__tab = tab
self.__re_tab = {}
for i in self.__tab:
self.__re_tab[i] = []
for j in self.__tab[i]:
self.__re_tab[i].append(re.compile(j))
self.__string = ""
def push(self, name):
for i in self.__tab:
for j in self.__re_tab[i]:
if j.match(name) is not None:
if len(self.__string) > 0:
if i == 'O' and self.__string[-1] == 'O':
continue
self.__string += i
def get_string(self):
return self.__string
class BreakBlock(object):
def __init__(self, _vm, idx):
self._vm = _vm
self._start = idx
self._end = self._start
self._ins = []
self._ops = []
self._fields = {}
self._methods = {}
def get_ops(self):
return self._ops
def get_fields(self):
return self._fields
def get_methods(self):
return self._methods
def push(self, ins):
self._ins.append(ins)
self._end += ins.get_length()
def get_start(self):
return self._start
def get_end(self):
return self._end
def show(self):
for i in self._ins:
i.show(0)
class DVMBasicBlock(object):
"""
A simple basic block of a dalvik method
"""
def __init__(self, start, vm, method, context):
self.__vm = vm
self.method = method
self.context = context
self.last_length = 0
self.nb_instructions = 0
self.fathers = []
self.childs = []
self.start = start
self.end = self.start
self.special_ins = {}
self.name = "%s-BB@0x%x" % (self.method.get_name(), self.start)
self.exception_analysis = None
self.tainted_variables = self.context.get_tainted_variables()
self.tainted_packages = self.context.get_tainted_packages()
self.notes = []
def get_notes(self):
return self.notes
def set_notes(self, value):
self.notes = [value]
def add_note(self, note):
self.notes.append(note)
def clear_notes(self):
self.notes = []
def get_instructions(self):
"""
Get all instructions from a basic block.
:rtype: Return all instructions in the current basic block
"""
tmp_ins = []
idx = 0
for i in self.method.get_instructions():
if idx >= self.start and idx < self.end:
tmp_ins.append(i)
idx += i.get_length()
return tmp_ins
def get_nb_instructions(self):
return self.nb_instructions
def get_method(self):
return self.method
def get_name(self):
return "%s-BB@0x%x" % (self.method.get_name(), self.start)
def get_start(self):
return self.start
def get_end(self):
return self.end
def get_last(self):
return self.get_instructions()[-1]
def get_next(self):
"""
Get next basic blocks
:rtype: a list of the next basic blocks
"""
return self.childs
def get_prev(self):
"""
Get previous basic blocks
:rtype: a list of the previous basic blocks
"""
return self.fathers
def set_fathers(self, f):
self.fathers.append(f)
def get_last_length(self):
return self.last_length
def set_childs(self, values):
if values == []:
next_block = self.context.get_basic_block(self.end + 1)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), self.end, next_block))
else:
for i in values:
if i != -1:
next_block = self.context.get_basic_block(i)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), i, next_block))
for c in self.childs:
if c[2] is not None:
c[2].set_fathers((c[1], c[0], self))
def push(self, i):
try:
self.nb_instructions += 1
idx = self.end
self.last_length = i.get_length()
self.end += self.last_length
op_value = i.get_op_value()
# field access
if (op_value >= 0x52 and op_value <= 0x6d):
desc = self.__vm.get_cm_field(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(TAINTED_FIELD, desc, DVM_FIELDS_ACCESS[
i.get_name()][0], idx, self.method)
# invoke
elif (op_value >= 0x6e and op_value <= 0x72) or (op_value >= 0x74 and op_value <= 0x78):
idx_meth = i.get_ref_kind()
method_info = self.__vm.get_cm_method(idx_meth)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
method_info[0], TAINTED_PACKAGE_CALL, idx, self.method, idx_meth)
# new_instance
elif op_value == 0x22:
idx_type = i.get_ref_kind()
type_info = self.__vm.get_cm_type(idx_type)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
type_info, TAINTED_PACKAGE_CREATE, idx, self.method, None)
# const-string
elif (op_value >= 0x1a and op_value <= 0x1b):
string_name = self.__vm.get_cm_string(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(
TAINTED_STRING, string_name, "R", idx, self.method)
elif op_value == 0x26 or (op_value >= 0x2b and op_value <= 0x2c):
code = self.method.get_code().get_bc()
self.special_ins[idx] = code.get_ins_off(
idx + i.get_ref_off() * 2)
except:
pass
def get_special_ins(self, idx):
"""
Return the associated instruction to a specific instruction (for example a packed/sparse switch)
:param idx: the index of the instruction
:rtype: None or an Instruction
"""
try:
return self.special_ins[idx]
except:
return None
def get_exception_analysis(self):
return self.exception_analysis
def set_exception_analysis(self, exception_analysis):
self.exception_analysis = exception_analysis
TAINTED_LOCAL_VARIABLE = 0
TAINTED_FIELD = 1
TAINTED_STRING = 2
class PathVar(object):
def __init__(self, access, idx, dst_idx, info_obj):
self.access_flag = access
self.idx = idx
self.dst_idx = dst_idx
self.info_obj = info_obj
def get_var_info(self):
return self.info_obj.get_info()
def get_access_flag(self):
return self.access_flag
def get_src(self, cm):
method = cm.get_method_ref(self.idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
class TaintedVariable(object):
def __init__(self, var, _type):
self.var = var
self.type = _type
self.paths = {}
self.__cache = []
def get_type(self):
return self.type
def get_info(self):
if self.type == TAINTED_FIELD:
return [self.var[0], self.var[2], self.var[1]]
return self.var
def push(self, access, idx, ref):
m_idx = ref.get_method_idx()
if m_idx not in self.paths:
self.paths[m_idx] = []
self.paths[m_idx].append((access, idx))
def get_paths_access(self, mode):
for i in self.paths:
for j in self.paths[i]:
for k, v in self.paths[i][j]:
if k in mode:
yield i, j, k, v
def get_paths(self):
if self.__cache != []:
return self.__cache
for i in self.paths:
for j in self.paths[i]:
self.__cache.append([j, i])
# yield j, i
return self.__cache
def get_paths_length(self):
return len(self.paths)
def show_paths(self, vm):
show_PathVariable(vm, self.get_paths())
class TaintedVariables(object):
def __init__(self, _vm):
self.__vm = _vm
self.__vars = {
TAINTED_LOCAL_VARIABLE: {},
TAINTED_FIELD: {},
TAINTED_STRING: {},
}
self.__cache_field_by_method = {}
self.__cache_string_by_method = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
# functions to get particulars elements
def get_string(self, s):
try:
return self.__vars[TAINTED_STRING][s]
except KeyError:
return None
def get_field(self, class_name, name, descriptor):
key = class_name + descriptor + name
try:
return self.__vars[TAINTED_FIELD][key]
except KeyError:
return None
def toPathVariable(self, obj):
z = []
for i in obj.get_paths():
access, idx = i[0]
m_idx = i[1]
z.append(PathVar(access, idx, m_idx, obj))
return z
# permission functions
def get_permissions_method(self, method):
permissions = set()
for f, f1 in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
for path in f.get_paths():
#access, idx = path[0]
m_idx = path[1]
if m_idx == method.get_idx():
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
return permissions
def get_permissions(self, permissions_needed):
"""
@param permissions_needed : a list of restricted permissions to get ([] returns all permissions)
@rtype : a dictionnary of permissions' paths
"""
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
for f, _ in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
for p in perm_intersection:
try:
permissions[p].extend(self.toPathVariable(f))
except KeyError:
permissions[p] = []
permissions[p].extend(self.toPathVariable(f))
return permissions
# global functions
def get_strings(self):
for i in self.__vars[TAINTED_STRING]:
yield self.__vars[TAINTED_STRING][i], i
def get_fields(self):
for i in self.__vars[TAINTED_FIELD]:
yield self.__vars[TAINTED_FIELD][i], i
# specifics functions
def get_strings_by_method(self, method):
z = {}
try:
for i in self.__cache_string_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def get_fields_by_method(self, method):
z = {}
try:
for i in self.__cache_field_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def add(self, var, _type, _method=None):
if _type == TAINTED_FIELD:
key = var[0] + var[1] + var[2]
if key not in self.__vars[TAINTED_FIELD]:
self.__vars[TAINTED_FIELD][key] = TaintedVariable(var, _type)
elif _type == TAINTED_STRING:
if var not in self.__vars[TAINTED_STRING]:
self.__vars[TAINTED_STRING][var] = TaintedVariable(var, _type)
elif _type == TAINTED_LOCAL_VARIABLE:
if _method not in self.__vars[TAINTED_LOCAL_VARIABLE]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method] = {}
if var not in self.__vars[TAINTED_LOCAL_VARIABLE][_method]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method][
var] = TaintedVariable(var, _type)
def push_info(self, _type, var, access, idx, ref):
if _type == TAINTED_FIELD:
self.add(var, _type)
key = var[0] + var[1] + var[2]
self.__vars[_type][key].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_field_by_method:
self.__cache_field_by_method[method_idx] = set()
self.__cache_field_by_method[method_idx].add(
self.__vars[TAINTED_FIELD][key])
elif _type == TAINTED_STRING:
self.add(var, _type)
self.__vars[_type][var].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_string_by_method:
self.__cache_string_by_method[method_idx] = set()
self.__cache_string_by_method[method_idx].add(
self.__vars[TAINTED_STRING][var])
def show_Path(vm, path):
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
def get_Path(vm, path):
x = {}
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
info_var = path.get_var_info()
x["src"] = "%s" % info_var
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
x["idx"] = path.get_idx()
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["idx"] = path.get_idx()
return x
def show_Paths(vm, paths):
"""
Show paths of packages
:param vm: the object which represents the dex file
:param paths: a list of :class:`PathP` objects
"""
for path in paths:
show_Path(vm, path)
def get_Paths(vm, paths):
"""
Return paths of packages
:param vm: the object which represents the dex file
:param paths: a list of :class:`PathP` objects
"""
full_paths = []
for path in paths:
full_paths.append(get_Path(vm, path))
return full_paths
def show_PathVariable(vm, paths):
return
for path in paths:
access, idx = path[0]
m_idx = path[1]
method = vm.get_cm_method(m_idx)
print("%s %x %s->%s %s" %
(access, idx, method[0], method[1], method[2][0] + method[2][1]))
class PathP(object):
def __init__(self, access, idx, src_idx, dst_idx):
self.access_flag = access
self.idx = idx
self.src_idx = src_idx
self.dst_idx = dst_idx
def get_access_flag(self):
return self.access_flag
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_src(self, cm):
method = cm.get_method_ref(self.src_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
def get_src_idx(self):
return self.src_idx
def get_dst_idx(self):
return self.dst_idx
class TaintedPackage(object):
def __init__(self, vm, name):
self.vm = vm
self.name = name
self.paths = {TAINTED_PACKAGE_CREATE: [], TAINTED_PACKAGE_CALL: []}
def get_name(self):
return self.name
def gets(self):
return self.paths
def push(self, access, idx, src_idx, dst_idx):
p = PathP(access, idx, src_idx, dst_idx)
self.paths[access].append(p)
return p
def get_objects_paths(self):
return self.paths[TAINTED_PACKAGE_CREATE]
def search_method(self, name, descriptor):
"""
@param name : a regexp for the name of the method
@param descriptor : a regexp for the descriptor of the method
@rtype : a list of called paths
"""
l = []
m_name = re.compile(name)
m_descriptor = re.compile(descriptor)
for path in self.paths[TAINTED_PACKAGE_CALL]:
_, dst_name, dst_descriptor = path.get_dst(
self.vm.get_class_manager())
if m_name.match(dst_name) is not None and m_descriptor.match(dst_descriptor) is not None:
l.append(path)
return l
def get_method(self, name, descriptor):
l = []
for path in self.paths[TAINTED_PACKAGE_CALL]:
if path.get_name() == name and path.get_descriptor() == descriptor:
l.append(path)
return l
def get_paths(self):
for i in self.paths:
for j in self.paths[i]:
yield j
def get_paths_length(self):
x = 0
for i in self.paths:
x += len(self.paths[i])
return x
def get_methods(self):
return [path for path in self.paths[TAINTED_PACKAGE_CALL]]
def get_new(self):
return [path for path in self.paths[TAINTED_PACKAGE_CREATE]]
def show(self):
return
cm = self.vm.get_class_manager()
print(self.get_name())
for _type in self.paths:
print("\t -->", _type)
if _type == TAINTED_PACKAGE_CALL:
for path in self.paths[_type]:
print("\t\t => %s <-- %x in %s" %
(path.get_dst(cm), path.get_idx(), path.get_src(cm)))
else:
for path in self.paths[_type]:
print("\t\t => %x in %s" %
(path.get_idx(), path.get_src(cm)))
def show_Permissions(dx):
"""
Show where permissions are used in a specific application
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
p = dx.get_permissions([])
for i in p:
for j in p[i]:
show_Path(dx.get_vm(), j)
def show_DynCode(dx):
"""
Show where dynamic code is used
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
paths = []
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."))
show_Paths(dx.get_vm(), paths)
def show_NativeMethods(dx):
"""
Show the native methods
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
return
print(get_NativeMethods(dx))
def show_ReflectionCode(dx):
"""
Show the reflection code
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
"""
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
show_Paths(dx.get_vm(), paths)
def get_NativeMethods(dx):
"""
Return the native methods
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: [tuple]
"""
d = dx.get_vm()
native_methods = []
for i in d.get_methods():
if i.get_access_flags() & 0x100:
native_methods.append(
(i.get_class_name(), i.get_name(), i.get_descriptor()))
return native_methods
def get_ReflectionCode(dx):
"""
Return the reflection code
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: [dict]
"""
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
return get_Paths(dx.get_vm(), paths)
def is_crypto_code(dx):
"""
Crypto code is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljavax/crypto/.",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/security/spec/.",
".",
"."):
return True
return False
def is_dyn_code(dx):
"""
Dalvik Dynamic code loading is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."):
return True
return False
def is_reflection_code(dx):
"""
Reflection is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Method;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Field;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Class;",
"forName",
"."):
return True
return False
def is_native_code(dx):
"""
Native code is present ?
:param dx : the analysis virtual machine
:type dx: a :class:`VMAnalysis` object
:rtype: boolean
"""
if dx.get_tainted_packages().search_methods("Ljava/lang/System;",
"load.",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Runtime;",
"load.",
"."):
return True
return False
class TaintedPackages(object):
def __init__(self, _vm):
self.__vm = _vm
self.__packages = {}
self.__methods = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def _add_pkg(self, name):
if name not in self.__packages:
self.__packages[name] = TaintedPackage(self.__vm, name)
#self.context.get_tainted_packages().push_info( method_info[0], TAINTED_PACKAGE_CALL, idx, self, self.method, method_info[1], method_info[2][0] + method_info[2][1] )
def push_info(self, class_name, access, idx, method, idx_method):
self._add_pkg(class_name)
p = self.__packages[class_name].push(
access, idx, method.get_method_idx(), idx_method)
try:
self.__methods[method][class_name].append(p)
except:
try:
self.__methods[method][class_name] = []
except:
self.__methods[method] = {}
self.__methods[method][class_name] = []
self.__methods[method][class_name].append(p)
def get_packages_by_method(self, method):
try:
return self.__methods[method]
except KeyError:
return {}
def get_package(self, name):
return self.__packages[name]
def get_packages_by_bb(self, bb):
"""
:rtype: return a list of packaged used in a basic block
"""
l = []
for i in self.__packages:
paths = self.__packages[i].gets()
for j in paths:
for k in paths[j]:
if k.get_bb() == bb:
l.append((i, k.get_access_flag(),
k.get_idx(), k.get_method()))
return l
def get_packages(self):
for i in self.__packages:
yield self.__packages[i], i
def get_internal_packages_from_package(self, package):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name == package and dst_class_name in classes:
l.append(j)
return l
def get_internal_packages(self):
"""
:rtype: return a list of the internal packages called in the application
"""
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, _, _ = j.get_dst(
self.__vm.get_class_manager())
if dst_class_name in classes and m.get_name() in classes:
l.append(j)
return l
def get_internal_new_packages(self):
"""
:rtype: return a list of the internal packages created in the application
"""
classes = self.__vm.get_classes_names()
l = {}
for m, _ in self.get_packages():
paths = m.get_new()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
if src_class_name in classes and m.get_name() in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CREATE:
try:
l[m.get_name()].append(j)
except:
l[m.get_name()] = []
l[m.get_name()].append(j)
return l
def get_external_packages(self):
"""
:rtype: return a list of the external packages called in the application
"""
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name in classes and dst_class_name not in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
l.append(j)
return l
def search_packages(self, package_name):
"""
:param package_name: a regexp for the name of the package
:rtype: a list of called packages' paths
"""
ex = re.compile(package_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_methods())
return l
def search_unique_packages(self, package_name):
"""
:param package_name: a regexp for the name of the package
"""
ex = re.compile(package_name)
l = []
d = {}
for m, _ in self.get_packages():
if ex.match(m.get_info()) is not None:
for path in m.get_methods():
try:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] += 1
except KeyError:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] = 0
l.append([path.get_class_name(),
path.get_name(), path.get_descriptor()])
return l, d
def search_methods(self, class_name, name, descriptor, re_expr=True):
"""
@param class_name : a regexp for the class name of the method (the package)
@param name : a regexp for the name of the method
@param descriptor : a regexp for the descriptor of the method
@rtype : a list of called methods' paths
"""
l = []
if re_expr:
ex = re.compile(class_name)
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.search_method(name, descriptor))
return l
def search_objects(self, class_name):
"""
@param class_name : a regexp for the class name
@rtype : a list of created objects' paths
"""
ex = re.compile(class_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_objects_paths())
return l
def search_crypto_packages(self):
"""
@rtype : a list of called crypto packages
"""
return self.search_packages("Ljavax/crypto/")
def search_telephony_packages(self):
"""
@rtype : a list of called telephony packages
"""
return self.search_packages("Landroid/telephony/")
def search_net_packages(self):
"""
@rtype : a list of called net packages
"""
return self.search_packages("Landroid/net/")
def get_method(self, class_name, name, descriptor):
try:
return self.__packages[class_name].get_method(name, descriptor)
except KeyError:
return []
def get_permissions_method(self, method):
permissions = set()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_method() == method:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
return permissions
def get_permissions(self, permissions_needed):
"""
@param permissions_needed : a list of restricted permissions to get ([] returns all permissions)
@rtype : a dictionnary of permissions' paths
"""
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
classes = self.__vm.get_classes_names()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, src_method_name, src_descriptor = j.get_src(
self.__vm.get_class_manager())
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
if (src_class_name in classes) and (dst_class_name not in classes):
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
for p in perm_intersection:
try:
permissions[p].append(j)
except KeyError:
permissions[p] = []
permissions[p].append(j)
return permissions
class Enum(object):
def __init__(self, names):
self.names = names
for value, name in enumerate(self.names):
setattr(self, name.upper(), value)
def tuples(self):
return tuple(enumerate(self.names))
TAG_ANDROID = Enum([
'ANDROID', 'TELEPHONY', 'SMS', 'SMSMESSAGE', 'ACCESSIBILITYSERVICE', 'ACCOUNTS',
'ANIMATION', 'APP', 'BLUETOOTH', 'CONTENT', 'DATABASE', 'DEBUG', 'DRM', 'GESTURE',
'GRAPHICS', 'HARDWARE', 'INPUTMETHODSERVICE', 'LOCATION', 'MEDIA', 'MTP',
'NET', 'NFC', 'OPENGL', 'OS', 'PREFERENCE', 'PROVIDER', 'RENDERSCRIPT',
'SAX', 'SECURITY', 'SERVICE', 'SPEECH', 'SUPPORT', 'TEST', 'TEXT', 'UTIL',
'VIEW', 'WEBKIT', 'WIDGET', 'DALVIK_BYTECODE', 'DALVIK_SYSTEM', 'JAVA_REFLECTION'])
TAG_REVERSE_ANDROID = dict((i[0], i[1]) for i in TAG_ANDROID.tuples())
TAGS_ANDROID = {
TAG_ANDROID.ANDROID: [0, "Landroid"],
TAG_ANDROID.TELEPHONY: [0, "Landroid/telephony"],
TAG_ANDROID.SMS: [0, "Landroid/telephony/SmsManager"],
TAG_ANDROID.SMSMESSAGE: [0, "Landroid/telephony/SmsMessage"],
TAG_ANDROID.DEBUG: [0, "Landroid/os/Debug"],
TAG_ANDROID.ACCESSIBILITYSERVICE: [0, "Landroid/accessibilityservice"],
TAG_ANDROID.ACCOUNTS: [0, "Landroid/accounts"],
TAG_ANDROID.ANIMATION: [0, "Landroid/animation"],
TAG_ANDROID.APP: [0, "Landroid/app"],
TAG_ANDROID.BLUETOOTH: [0, "Landroid/bluetooth"],
TAG_ANDROID.CONTENT: [0, "Landroid/content"],
TAG_ANDROID.DATABASE: [0, "Landroid/database"],
TAG_ANDROID.DRM: [0, "Landroid/drm"],
TAG_ANDROID.GESTURE: [0, "Landroid/gesture"],
TAG_ANDROID.GRAPHICS: [0, "Landroid/graphics"],
TAG_ANDROID.HARDWARE: [0, "Landroid/hardware"],
TAG_ANDROID.INPUTMETHODSERVICE: [0, "Landroid/inputmethodservice"],
TAG_ANDROID.LOCATION: [0, "Landroid/location"],
TAG_ANDROID.MEDIA: [0, "Landroid/media"],
TAG_ANDROID.MTP: [0, "Landroid/mtp"],
TAG_ANDROID.NET: [0, "Landroid/net"],
TAG_ANDROID.NFC: [0, "Landroid/nfc"],
TAG_ANDROID.OPENGL: [0, "Landroid/opengl"],
TAG_ANDROID.OS: [0, "Landroid/os"],
TAG_ANDROID.PREFERENCE: [0, "Landroid/preference"],
TAG_ANDROID.PROVIDER: [0, "Landroid/provider"],
TAG_ANDROID.RENDERSCRIPT: [0, "Landroid/renderscript"],
TAG_ANDROID.SAX: [0, "Landroid/sax"],
TAG_ANDROID.SECURITY: [0, "Landroid/security"],
TAG_ANDROID.SERVICE: [0, "Landroid/service"],
TAG_ANDROID.SPEECH: [0, "Landroid/speech"],
TAG_ANDROID.SUPPORT: [0, "Landroid/support"],
TAG_ANDROID.TEST: [0, "Landroid/test"],
TAG_ANDROID.TEXT: [0, "Landroid/text"],
TAG_ANDROID.UTIL: [0, "Landroid/util"],
TAG_ANDROID.VIEW: [0, "Landroid/view"],
TAG_ANDROID.WEBKIT: [0, "Landroid/webkit"],
TAG_ANDROID.WIDGET: [0, "Landroid/widget"],
TAG_ANDROID.DALVIK_BYTECODE: [0, "Ldalvik/bytecode"],
TAG_ANDROID.DALVIK_SYSTEM: [0, "Ldalvik/system"],
TAG_ANDROID.JAVA_REFLECTION: [0, "Ljava/lang/reflect"],
}
class Tags(object):
"""
Handle specific tags
:param patterns:
:params reverse:
"""
def __init__(self, patterns=TAGS_ANDROID, reverse=TAG_REVERSE_ANDROID):
self.tags = set()
self.patterns = patterns
self.reverse = TAG_REVERSE_ANDROID
for i in self.patterns:
self.patterns[i][1] = re.compile(self.patterns[i][1])
def emit(self, method):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(method.get_class()) is not None:
self.tags.add(i)
def emit_by_classname(self, classname):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(classname) is not None:
self.tags.add(i)
def get_list(self):
return [self.reverse[i] for i in self.tags]
def __contains__(self, key):
return key in self.tags
def __str__(self):
return str([self.reverse[i] for i in self.tags])
def empty(self):
return self.tags == set()
class BasicBlocks(object):
"""
This class represents all basic blocks of a method
"""
def __init__(self, _vm, tv):
self.__vm = _vm
self.tainted = tv
self.bb = []
def push(self, bb):
self.bb.append(bb)
def pop(self, idx):
return self.bb.pop(idx)
def get_basic_block(self, idx):
for i in self.bb:
if idx >= i.get_start() and idx < i.get_end():
return i
return None
def get_tainted_integers(self):
try:
return self.tainted.get_tainted_integers()
except:
return None
def get_tainted_packages(self):
try:
return self.tainted.get_tainted_packages()
except:
return None
def get_tainted_variables(self):
try:
return self.tainted.get_tainted_variables()
except:
return None
def get(self):
"""
:rtype: return each basic block (:class:`DVMBasicBlock` object)
"""
for i in self.bb:
yield i
def gets(self):
"""
:rtype: a list of basic blocks (:class:`DVMBasicBlock` objects)
"""
return self.bb
def get_basic_block_pos(self, idx):
return self.bb[idx]
class ExceptionAnalysis(object):
def __init__(self, exception, bb):
self.start = exception[0]
self.end = exception[1]
self.exceptions = exception[2:]
for i in self.exceptions:
i.append(bb.get_basic_block(i[1]))
def show_buff(self):
buff = "%x:%x\n" % (self.start, self.end)
for i in self.exceptions:
if i[2] is None:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2])
else:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2].get_name())
return buff[:-1]
def get(self):
d = {"start": self.start, "end": self.end, "list": []}
for i in self.exceptions:
d["list"].append(
{"name": i[0], "idx": i[1], "bb": i[2].get_name()})
return d
class Exceptions(object):
def __init__(self, _vm):
self.__vm = _vm
self.exceptions = []
def add(self, exceptions, basic_blocks):
for i in exceptions:
self.exceptions.append(ExceptionAnalysis(i, basic_blocks))
def get_exception(self, addr_start, addr_end):
for i in self.exceptions:
# print hex(i.start), hex(i.end), hex(addr_start), hex(addr_end),
# i.start >= addr_start and i.end <= addr_end, addr_end <= i.end
# and addr_start >= i.start
if i.start >= addr_start and i.end <= addr_end:
return i
elif addr_end <= i.end and addr_start >= i.start:
return i
return None
def gets(self):
return self.exceptions
def get(self):
for i in self.exceptions:
yield i
BO = {"BasicOPCODES": dvm.BRANCH_DVM_OPCODES, "BasicClass": DVMBasicBlock,
"Dnext": dvm.determineNext, "Dexception": dvm.determineException}
BO["BasicOPCODES_H"] = []
for i in BO["BasicOPCODES"]:
BO["BasicOPCODES_H"].append(re.compile(i))
class MethodAnalysis(object):
"""
This class analyses in details a method of a class/dex file
:param vm: the object which represent the dex file
:param method: the original method
:param tv: a virtual object to get access to tainted information
:type vm: a :class:`DalvikVMFormat` object
:type method: a :class:`EncodedMethod` object
"""
def __init__(self, vm, method, tv):
self.__vm = vm
self.method = method
self.tainted = tv
self.basic_blocks = BasicBlocks(self.__vm, self.tainted)
self.exceptions = Exceptions(self.__vm)
code = self.method.get_code()
if code is None:
return
current_basic = BO["BasicClass"](
0, self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
##########################################################
bc = code.get_bc()
l = []
h = {}
idx = 0
debug("Parsing instructions")
instructions = [i for i in bc.get_instructions()]
for i in instructions:
for j in BO["BasicOPCODES_H"]:
try:
if j.match(i.get_name()) is not None:
v = BO["Dnext"](i, idx, self.method)
h[idx] = v
l.extend(v)
break
except Exception:
# print("BasicOPCODES_H Error")
break
idx += i.get_length()
debug("Parsing exceptions")
excepts = BO["Dexception"](self.__vm, self.method)
for i in excepts:
l.extend([i[0]])
for handler in i[2:]:
l.append(handler[1])
debug("Creating basic blocks in %s" % self.method)
idx = 0
for i in instructions:
# index is a destination
if idx in l:
if current_basic.get_nb_instructions() != 0:
current_basic = BO["BasicClass"](
current_basic.get_end(), self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
current_basic.push(i)
# index is a branch instruction
if idx in h:
current_basic = BO["BasicClass"](
current_basic.get_end(), self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
idx += i.get_length()
if current_basic.get_nb_instructions() == 0:
self.basic_blocks.pop(-1)
debug("Settings basic blocks childs")
for i in self.basic_blocks.get():
try:
i.set_childs(h[i.end - i.get_last_length()])
except KeyError:
i.set_childs([])
debug("Creating exceptions")
# Create exceptions
self.exceptions.add(excepts, self.basic_blocks)
for i in self.basic_blocks.get():
# setup exception by basic block
i.set_exception_analysis(
self.exceptions.get_exception(i.start, i.end - 1))
del instructions
del h, l
def get_basic_blocks(self):
"""
:rtype: a :class:`BasicBlocks` object
"""
return self.basic_blocks
def get_length(self):
"""
:rtype: an integer which is the length of the code
"""
return self.get_code().get_length()
def get_vm(self):
return self.__vm
def get_method(self):
return self.method
def get_local_variables(self):
return self.tainted.get_tainted_variables().get_local_variables(self.method)
def show(self):
return
print("METHOD", self.method.get_class_name(),
self.method.get_name(), self.method.get_descriptor())
for i in self.basic_blocks.get():
print("\t", i)
i.show()
print("")
def show_methods(self):
return
print("\t #METHODS :")
for i in self.__bb:
methods = i.get_methods()
for method in methods:
print("\t\t-->", method.get_class_name(),
method.get_name(), method.get_descriptor())
for context in methods[method]:
print("\t\t\t |---|", context.details)
def create_tags(self):
"""
Create the tags for the method
"""
self.tags = Tags()
for i in self.tainted.get_tainted_packages().get_packages_by_method(self.method):
self.tags.emit_by_classname(i)
def get_tags(self):
"""
Return the tags of the method
:rtype: a :class:`Tags` object
"""
return self.tags
SIGNATURE_L0_0 = "L0_0"
SIGNATURE_L0_1 = "L0_1"
SIGNATURE_L0_2 = "L0_2"
SIGNATURE_L0_3 = "L0_3"
SIGNATURE_L0_4 = "L0_4"
SIGNATURE_L0_5 = "L0_5"
SIGNATURE_L0_6 = "L0_6"
SIGNATURE_L0_0_L1 = "L0_0:L1"
SIGNATURE_L0_1_L1 = "L0_1:L1"
SIGNATURE_L0_2_L1 = "L0_2:L1"
SIGNATURE_L0_3_L1 = "L0_3:L1"
SIGNATURE_L0_4_L1 = "L0_4:L1"
SIGNATURE_L0_5_L1 = "L0_5:L1"
SIGNATURE_L0_0_L2 = "L0_0:L2"
SIGNATURE_L0_0_L3 = "L0_0:L3"
SIGNATURE_HEX = "hex"
SIGNATURE_SEQUENCE_BB = "sequencebb"
SIGNATURES = {
SIGNATURE_L0_0: {"type": 0},
SIGNATURE_L0_1: {"type": 1},
SIGNATURE_L0_2: {"type": 2, "arguments": ["Landroid"]},
SIGNATURE_L0_3: {"type": 2, "arguments": ["Ljava"]},
SIGNATURE_L0_4: {"type": 2, "arguments": ["Landroid", "Ljava"]},
SIGNATURE_L0_5: {"type": 3, "arguments": ["Landroid"]},
SIGNATURE_L0_6: {"type": 3, "arguments": ["Ljava"]},
SIGNATURE_SEQUENCE_BB: {},
SIGNATURE_HEX: {},
}
class StringAnalysis(object):
def __init__(self, value):
self.value = value
self.xreffrom = set()
def AddXrefFrom(self, classobj, methodobj):
#debug("Added strings xreffrom for %s to %s" % (self.value, methodobj))
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def __str__(self):
data = "XREFto for string %s in\n" % repr(self.value)
for ref_class, ref_method in self.xreffrom:
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class MethodClassAnalysis(object):
def __init__(self, method):
self.method = method
self.xrefto = set()
self.xreffrom = set()
def AddXrefTo(self, classobj, methodobj):
#debug("Added method xrefto for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefto.add((classobj, methodobj))
def AddXrefFrom(self, classobj, methodobj):
#debug("Added method xreffrom for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def __str__(self):
data = "XREFto for %s\n" % self.method
for ref_class, ref_method in self.xrefto:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFFrom for %s\n" % self.method
for ref_class, ref_method in self.xreffrom:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class FieldClassAnalysis(object):
def __init__(self, field):
self.field = field
self.xrefread = set()
self.xrefwrite = set()
def AddXrefRead(self, classobj, methodobj):
#debug("Added method xrefto for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefread.add((classobj, methodobj))
def AddXrefWrite(self, classobj, methodobj):
#debug("Added method xreffrom for %s [%s] to %s" % (self.method, classobj, methodobj))
self.xrefwrite.add((classobj, methodobj))
def get_xref_read(self):
return self.xrefread
def get_xref_write(self):
return self.xrefwrite
def __str__(self):
data = "XREFRead for %s\n" % self.field
for ref_class, ref_method in self.xrefread:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFWrite for %s\n" % self.field
for ref_class, ref_method in self.xrefwrite:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
REF_NEW_INSTANCE = 0
REF_CLASS_USAGE = 1
class ClassAnalysis(object):
def __init__(self, classobj):
self._class = classobj
self._methods = {}
self._fields = {}
self.xrefto = collections.defaultdict(set)
self.xreffrom = collections.defaultdict(set)
def get_method_analysis(self, method):
return self._methods.get(method)
def get_field_analysis(self, field):
return self._fields.get(field)
def AddFXrefRead(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefRead(classobj, method)
def AddFXrefWrite(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefWrite(classobj, method)
def AddMXrefTo(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefTo(classobj, method2)
def AddMXrefFrom(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefFrom(classobj, method2)
def AddXrefTo(self, ref_kind, classobj, methodobj):
#debug("Added class xrefto for %s to %s" % (self._class.get_name(), classobj.get_vm_class().get_name()))
self.xrefto[classobj].add((ref_kind, methodobj))
def AddXrefFrom(self, ref_kind, classobj, methodobj):
#debug("Added class xreffrom for %s to %s" % (self._class.get_name(), classobj.get_vm_class().get_name()))
self.xreffrom[classobj].add((ref_kind, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def get_vm_class(self):
return self._class
def __str__(self):
data = "XREFto for %s\n" % self._class
for ref_class in self.xrefto:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xrefto[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
data += "XREFFrom for %s\n" % self._class
for ref_class in self.xreffrom:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xreffrom[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
return data
class newVMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.classes = {}
self.strings = {}
for current_class in self.vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(
current_class)
def create_xref(self):
debug("Creating XREF/DREF")
instances_class_name = list(self.classes.keys())
for current_class in self.vm.get_classes():
for current_method in current_class.get_methods():
debug("Creating XREF for %s" % current_method)
code = current_method.get_code()
if code is None:
continue
off = 0
bc = code.get_bc()
for instruction in bc.get_instructions():
op_value = instruction.get_op_value()
if op_value in [0x1c, 0x22]:
idx_type = instruction.get_ref_kind()
type_info = self.vm.get_cm_type(idx_type)
# Internal xref related to class manipulation
if type_info in instances_class_name and type_info != current_class.get_name():
# new instance
if op_value == 0x22:
self.classes[current_class.get_name()].AddXrefTo(
REF_NEW_INSTANCE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_NEW_INSTANCE, self.classes[
current_class.get_name()], current_method)
# class reference
else:
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif ((op_value >= 0x6e and op_value <= 0x72) or
(op_value >= 0x74 and op_value <= 0x78)):
idx_meth = instruction.get_ref_kind()
method_info = self.vm.get_cm_method(idx_meth)
if method_info:
class_info = method_info[0]
method_item = self.vm.get_method_descriptor(
method_info[0], method_info[1], ''.join(method_info[2]))
if method_item:
self.classes[current_class.get_name()].AddMXrefTo(
current_method, self.classes[class_info], method_item)
self.classes[class_info].AddMXrefFrom(
method_item, self.classes[current_class.get_name()], current_method)
# Internal xref related to class manipulation
if class_info in instances_class_name and class_info != current_class.get_name():
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[class_info], method_item)
self.classes[class_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif op_value >= 0x1a and op_value <= 0x1b:
string_value = self.vm.get_cm_string(
instruction.get_ref_kind())
if string_value not in self.strings:
self.strings[string_value] = StringAnalysis(
string_value)
self.strings[string_value].AddXrefFrom(
self.classes[current_class.get_name()], current_method)
elif op_value >= 0x52 and op_value <= 0x6d:
idx_field = instruction.get_ref_kind()
field_info = self.vm.get_cm_field(idx_field)
field_item = self.vm.get_field_descriptor(
field_info[0], field_info[2], field_info[1])
if field_item:
# read access to a field
if (op_value >= 0x52 and op_value <= 0x58) or (op_value >= 0x60 and op_value <= 0x66):
self.classes[current_class.get_name()].AddFXrefRead(
current_method, self.classes[current_class.get_name()], field_item)
# write access to a field
else:
self.classes[current_class.get_name()].AddFXrefWrite(
current_method, self.classes[current_class.get_name()], field_item)
off += instruction.get_length()
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_method_by_name(self, class_name, method_name, method_descriptor):
if class_name in self.classes:
for method in self.classes[class_name].get_vm_class().get_methods():
if method.get_name() == method_name and method.get_descriptor() == method_descriptor:
return method
return None
def is_class_present(self, class_name):
return class_name in self.classes
def get_class_analysis(self, class_name):
return self.classes.get(class_name)
def get_strings_analysis(self):
return self.strings
class VMAnalysis(object):
"""
This class analyses a dex file
:param _vm: the object which represent the dex file
:type _vm: a :class:`DalvikVMFormat` object
:Example:
VMAnalysis( DalvikVMFormat( read("toto.dex", binary=False) ) )
"""
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
for i in self.vm.get_all_fields():
self.tainted_variables.add(
[i.get_class_name(), i.get_descriptor(), i.get_name()], TAINTED_FIELD)
self.methods = []
self.hmethods = {}
self.__nmethods = {}
for i in self.vm.get_methods():
x = MethodAnalysis(self.vm, i, self)
self.methods.append(x)
self.hmethods[i] = x
self.__nmethods[i.get_name()] = x
def get_vm(self):
return self.vm
def get_method(self, method):
"""
Return an analysis method
:param method: a classical method object
:type method: an :class:`EncodedMethod` object
:rtype: a :class:`MethodAnalysis` object
"""
return self.hmethods[method]
def get_methods(self):
"""
Return each analysis method
:rtype: a :class:`MethodAnalysis` object
"""
for i in self.hmethods:
yield self.hmethods[i]
def get_method_signature(self, method, grammar_type="", options={}, predef_sign=""):
"""
Return a specific signature for a specific method
:param method: a reference to method from a vm class
:type method: a :class:`EncodedMethod` object
:param grammar_type: the type of the signature (optional)
:type grammar_type: string
:param options: the options of the signature (optional)
:param options: dict
:param predef_sign: used a predefined signature (optional)
:type predef_sign: string
:rtype: a :class:`Sign` object
"""
if self.signature is None:
self.signature = Signature(self)
if predef_sign != "":
g = ""
o = {}
for i in predef_sign.split(":"):
if "_" in i:
g += "L0:"
o["L0"] = SIGNATURES[i]
else:
g += i
g += ":"
return self.signature.get_method(self.get_method(method), g[:-1], o)
else:
return self.signature.get_method(self.get_method(method), grammar_type, options)
def get_permissions(self, permissions_needed):
"""
Return the permissions used
:param permissions_needed: a list of restricted permissions to get ([] returns all permissions)
:type permissions_needed: list
:rtype: a dictionnary of permissions paths
"""
permissions = {}
permissions.update(self.get_tainted_packages(
).get_permissions(permissions_needed))
permissions.update(self.get_tainted_variables(
).get_permissions(permissions_needed))
return permissions
def get_permissions_method(self, method):
permissions_f = self.get_tainted_packages().get_permissions_method(method)
permissions_v = self.get_tainted_variables().get_permissions_method(method)
all_permissions_of_method = permissions_f.union(permissions_v)
return list(all_permissions_of_method)
def get_tainted_variables(self):
"""
Return the tainted variables
:rtype: a :class:`TaintedVariables` object
"""
return self.tainted_variables
def get_tainted_packages(self):
"""
Return the tainted packages
:rtype: a :class:`TaintedPackages` object
"""
return self.tainted_packages
def get_tainted_fields(self):
return self.get_tainted_variables().get_fields()
def get_tainted_field(self, class_name, name, descriptor):
"""
Return a specific tainted field
:param class_name: the name of the class
:param name: the name of the field
:param descriptor: the descriptor of the field
:type class_name: string
:type name: string
:type descriptor: string
:rtype: a :class:`TaintedVariable` object
"""
return self.get_tainted_variables().get_field(class_name, name, descriptor)
class uVMAnalysis(VMAnalysis):
"""
This class analyses a dex file but on the fly (quicker !)
:param _vm: the object which represent the dex file
:type _vm: a :class:`DalvikVMFormat` object
:Example:
uVMAnalysis( DalvikVMFormat( read("toto.dex", binary=False) ) )
"""
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
self.resolve = False
def get_methods(self):
self.resolve = True
for i in self.vm.get_methods():
yield MethodAnalysis(self.vm, i, self)
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_vm(self):
return self.vm
def _resolve(self):
if not self.resolve:
for i in self.get_methods():
pass
def get_tainted_packages(self):
self._resolve()
return self.tainted_packages
def get_tainted_variables(self):
self._resolve()
return self.tainted_variables
def is_ascii_obfuscation(vm):
for classe in vm.get_classes():
if is_ascii_problem(classe.get_name()):
return True
for method in classe.get_methods():
if is_ascii_problem(method.get_name()):
return True
return False
| 32.462714 | 169 | 0.551957 |
import re
import collections
from androguard.core.analysis.sign import Signature, TAINTED_PACKAGE_CREATE, \
TAINTED_PACKAGE_CALL
from androguard.core.androconf import debug, is_ascii_problem,\
load_api_specific_resource_module
from androguard.core.bytecodes import dvm
DVM_FIELDS_ACCESS = {
"iget": "R",
"iget-wide": "R",
"iget-object": "R",
"iget-boolean": "R",
"iget-byte": "R",
"iget-char": "R",
"iget-short": "R",
"iput": "W",
"iput-wide": "W",
"iput-object": "W",
"iput-boolean": "W",
"iput-byte": "W",
"iput-char": "W",
"iput-short": "W",
"sget": "R",
"sget-wide": "R",
"sget-object": "R",
"sget-boolean": "R",
"sget-byte": "R",
"sget-char": "R",
"sget-short": "R",
"sput": "W",
"sput-wide": "W",
"sput-object": "W",
"sput-boolean": "W",
"sput-byte": "W",
"sput-char": "W",
"sput-short": "W",
}
class ContextField(object):
def __init__(self, mode):
self.mode = mode
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ContextMethod(object):
def __init__(self):
self.details = []
def set_details(self, details):
for i in details:
self.details.append(i)
class ExternalFM(object):
def __init__(self, class_name, name, descriptor):
self.class_name = class_name
self.name = name
self.descriptor = descriptor
def get_class_name(self):
return self.class_name
def get_name(self):
return self.name
def get_descriptor(self):
return self.descriptor
class ToString(object):
def __init__(self, tab):
self.__tab = tab
self.__re_tab = {}
for i in self.__tab:
self.__re_tab[i] = []
for j in self.__tab[i]:
self.__re_tab[i].append(re.compile(j))
self.__string = ""
def push(self, name):
for i in self.__tab:
for j in self.__re_tab[i]:
if j.match(name) is not None:
if len(self.__string) > 0:
if i == 'O' and self.__string[-1] == 'O':
continue
self.__string += i
def get_string(self):
return self.__string
class BreakBlock(object):
def __init__(self, _vm, idx):
self._vm = _vm
self._start = idx
self._end = self._start
self._ins = []
self._ops = []
self._fields = {}
self._methods = {}
def get_ops(self):
return self._ops
def get_fields(self):
return self._fields
def get_methods(self):
return self._methods
def push(self, ins):
self._ins.append(ins)
self._end += ins.get_length()
def get_start(self):
return self._start
def get_end(self):
return self._end
def show(self):
for i in self._ins:
i.show(0)
class DVMBasicBlock(object):
def __init__(self, start, vm, method, context):
self.__vm = vm
self.method = method
self.context = context
self.last_length = 0
self.nb_instructions = 0
self.fathers = []
self.childs = []
self.start = start
self.end = self.start
self.special_ins = {}
self.name = "%s-BB@0x%x" % (self.method.get_name(), self.start)
self.exception_analysis = None
self.tainted_variables = self.context.get_tainted_variables()
self.tainted_packages = self.context.get_tainted_packages()
self.notes = []
def get_notes(self):
return self.notes
def set_notes(self, value):
self.notes = [value]
def add_note(self, note):
self.notes.append(note)
def clear_notes(self):
self.notes = []
def get_instructions(self):
tmp_ins = []
idx = 0
for i in self.method.get_instructions():
if idx >= self.start and idx < self.end:
tmp_ins.append(i)
idx += i.get_length()
return tmp_ins
def get_nb_instructions(self):
return self.nb_instructions
def get_method(self):
return self.method
def get_name(self):
return "%s-BB@0x%x" % (self.method.get_name(), self.start)
def get_start(self):
return self.start
def get_end(self):
return self.end
def get_last(self):
return self.get_instructions()[-1]
def get_next(self):
return self.childs
def get_prev(self):
return self.fathers
def set_fathers(self, f):
self.fathers.append(f)
def get_last_length(self):
return self.last_length
def set_childs(self, values):
if values == []:
next_block = self.context.get_basic_block(self.end + 1)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), self.end, next_block))
else:
for i in values:
if i != -1:
next_block = self.context.get_basic_block(i)
if next_block is not None:
self.childs.append(
(self.end - self.get_last_length(), i, next_block))
for c in self.childs:
if c[2] is not None:
c[2].set_fathers((c[1], c[0], self))
def push(self, i):
try:
self.nb_instructions += 1
idx = self.end
self.last_length = i.get_length()
self.end += self.last_length
op_value = i.get_op_value()
if (op_value >= 0x52 and op_value <= 0x6d):
desc = self.__vm.get_cm_field(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(TAINTED_FIELD, desc, DVM_FIELDS_ACCESS[
i.get_name()][0], idx, self.method)
elif (op_value >= 0x6e and op_value <= 0x72) or (op_value >= 0x74 and op_value <= 0x78):
idx_meth = i.get_ref_kind()
method_info = self.__vm.get_cm_method(idx_meth)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
method_info[0], TAINTED_PACKAGE_CALL, idx, self.method, idx_meth)
elif op_value == 0x22:
idx_type = i.get_ref_kind()
type_info = self.__vm.get_cm_type(idx_type)
if self.tainted_packages is not None:
self.tainted_packages.push_info(
type_info, TAINTED_PACKAGE_CREATE, idx, self.method, None)
elif (op_value >= 0x1a and op_value <= 0x1b):
string_name = self.__vm.get_cm_string(i.get_ref_kind())
if self.tainted_variables is not None:
self.tainted_variables.push_info(
TAINTED_STRING, string_name, "R", idx, self.method)
elif op_value == 0x26 or (op_value >= 0x2b and op_value <= 0x2c):
code = self.method.get_code().get_bc()
self.special_ins[idx] = code.get_ins_off(
idx + i.get_ref_off() * 2)
except:
pass
def get_special_ins(self, idx):
try:
return self.special_ins[idx]
except:
return None
def get_exception_analysis(self):
return self.exception_analysis
def set_exception_analysis(self, exception_analysis):
self.exception_analysis = exception_analysis
TAINTED_LOCAL_VARIABLE = 0
TAINTED_FIELD = 1
TAINTED_STRING = 2
class PathVar(object):
def __init__(self, access, idx, dst_idx, info_obj):
self.access_flag = access
self.idx = idx
self.dst_idx = dst_idx
self.info_obj = info_obj
def get_var_info(self):
return self.info_obj.get_info()
def get_access_flag(self):
return self.access_flag
def get_src(self, cm):
method = cm.get_method_ref(self.idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
class TaintedVariable(object):
def __init__(self, var, _type):
self.var = var
self.type = _type
self.paths = {}
self.__cache = []
def get_type(self):
return self.type
def get_info(self):
if self.type == TAINTED_FIELD:
return [self.var[0], self.var[2], self.var[1]]
return self.var
def push(self, access, idx, ref):
m_idx = ref.get_method_idx()
if m_idx not in self.paths:
self.paths[m_idx] = []
self.paths[m_idx].append((access, idx))
def get_paths_access(self, mode):
for i in self.paths:
for j in self.paths[i]:
for k, v in self.paths[i][j]:
if k in mode:
yield i, j, k, v
def get_paths(self):
if self.__cache != []:
return self.__cache
for i in self.paths:
for j in self.paths[i]:
self.__cache.append([j, i])
return self.__cache
def get_paths_length(self):
return len(self.paths)
def show_paths(self, vm):
show_PathVariable(vm, self.get_paths())
class TaintedVariables(object):
def __init__(self, _vm):
self.__vm = _vm
self.__vars = {
TAINTED_LOCAL_VARIABLE: {},
TAINTED_FIELD: {},
TAINTED_STRING: {},
}
self.__cache_field_by_method = {}
self.__cache_string_by_method = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def get_string(self, s):
try:
return self.__vars[TAINTED_STRING][s]
except KeyError:
return None
def get_field(self, class_name, name, descriptor):
key = class_name + descriptor + name
try:
return self.__vars[TAINTED_FIELD][key]
except KeyError:
return None
def toPathVariable(self, obj):
z = []
for i in obj.get_paths():
access, idx = i[0]
m_idx = i[1]
z.append(PathVar(access, idx, m_idx, obj))
return z
def get_permissions_method(self, method):
permissions = set()
for f, f1 in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
for path in f.get_paths():
m_idx = path[1]
if m_idx == method.get_idx():
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
return permissions
def get_permissions(self, permissions_needed):
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
for f, _ in self.get_fields():
data = "%s-%s-%s" % (f.var[0], f.var[2], f.var[1])
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_FIELDS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_FIELDS"][data])
for p in perm_intersection:
try:
permissions[p].extend(self.toPathVariable(f))
except KeyError:
permissions[p] = []
permissions[p].extend(self.toPathVariable(f))
return permissions
def get_strings(self):
for i in self.__vars[TAINTED_STRING]:
yield self.__vars[TAINTED_STRING][i], i
def get_fields(self):
for i in self.__vars[TAINTED_FIELD]:
yield self.__vars[TAINTED_FIELD][i], i
def get_strings_by_method(self, method):
z = {}
try:
for i in self.__cache_string_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def get_fields_by_method(self, method):
z = {}
try:
for i in self.__cache_field_by_method[method.get_method_idx()]:
z[i] = []
for j in i.get_paths():
if method.get_method_idx() == j[1]:
z[i].append(j[0])
return z
except:
return z
def add(self, var, _type, _method=None):
if _type == TAINTED_FIELD:
key = var[0] + var[1] + var[2]
if key not in self.__vars[TAINTED_FIELD]:
self.__vars[TAINTED_FIELD][key] = TaintedVariable(var, _type)
elif _type == TAINTED_STRING:
if var not in self.__vars[TAINTED_STRING]:
self.__vars[TAINTED_STRING][var] = TaintedVariable(var, _type)
elif _type == TAINTED_LOCAL_VARIABLE:
if _method not in self.__vars[TAINTED_LOCAL_VARIABLE]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method] = {}
if var not in self.__vars[TAINTED_LOCAL_VARIABLE][_method]:
self.__vars[TAINTED_LOCAL_VARIABLE][_method][
var] = TaintedVariable(var, _type)
def push_info(self, _type, var, access, idx, ref):
if _type == TAINTED_FIELD:
self.add(var, _type)
key = var[0] + var[1] + var[2]
self.__vars[_type][key].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_field_by_method:
self.__cache_field_by_method[method_idx] = set()
self.__cache_field_by_method[method_idx].add(
self.__vars[TAINTED_FIELD][key])
elif _type == TAINTED_STRING:
self.add(var, _type)
self.__vars[_type][var].push(access, idx, ref)
method_idx = ref.get_method_idx()
if method_idx not in self.__cache_string_by_method:
self.__cache_string_by_method[method_idx] = set()
self.__cache_string_by_method[method_idx].add(
self.__vars[TAINTED_STRING][var])
def show_Path(vm, path):
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
def get_Path(vm, path):
x = {}
cm = vm.get_class_manager()
if isinstance(path, PathVar):
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
info_var = path.get_var_info()
x["src"] = "%s" % info_var
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
x["idx"] = path.get_idx()
else:
if path.get_access_flag() == TAINTED_PACKAGE_CALL:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
dst_class_name, dst_method_name, dst_descriptor = path.get_dst(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["dst"] = "%s %s %s" % (
dst_class_name, dst_method_name, dst_descriptor)
else:
src_class_name, src_method_name, src_descriptor = path.get_src(cm)
x["src"] = "%s %s %s" % (
src_class_name, src_method_name, src_descriptor)
x["idx"] = path.get_idx()
return x
def show_Paths(vm, paths):
for path in paths:
show_Path(vm, path)
def get_Paths(vm, paths):
full_paths = []
for path in paths:
full_paths.append(get_Path(vm, path))
return full_paths
def show_PathVariable(vm, paths):
return
for path in paths:
access, idx = path[0]
m_idx = path[1]
method = vm.get_cm_method(m_idx)
print("%s %x %s->%s %s" %
(access, idx, method[0], method[1], method[2][0] + method[2][1]))
class PathP(object):
def __init__(self, access, idx, src_idx, dst_idx):
self.access_flag = access
self.idx = idx
self.src_idx = src_idx
self.dst_idx = dst_idx
def get_access_flag(self):
return self.access_flag
def get_dst(self, cm):
method = cm.get_method_ref(self.dst_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_src(self, cm):
method = cm.get_method_ref(self.src_idx)
return method.get_class_name(), method.get_name(), method.get_descriptor()
def get_idx(self):
return self.idx
def get_src_idx(self):
return self.src_idx
def get_dst_idx(self):
return self.dst_idx
class TaintedPackage(object):
def __init__(self, vm, name):
self.vm = vm
self.name = name
self.paths = {TAINTED_PACKAGE_CREATE: [], TAINTED_PACKAGE_CALL: []}
def get_name(self):
return self.name
def gets(self):
return self.paths
def push(self, access, idx, src_idx, dst_idx):
p = PathP(access, idx, src_idx, dst_idx)
self.paths[access].append(p)
return p
def get_objects_paths(self):
return self.paths[TAINTED_PACKAGE_CREATE]
def search_method(self, name, descriptor):
l = []
m_name = re.compile(name)
m_descriptor = re.compile(descriptor)
for path in self.paths[TAINTED_PACKAGE_CALL]:
_, dst_name, dst_descriptor = path.get_dst(
self.vm.get_class_manager())
if m_name.match(dst_name) is not None and m_descriptor.match(dst_descriptor) is not None:
l.append(path)
return l
def get_method(self, name, descriptor):
l = []
for path in self.paths[TAINTED_PACKAGE_CALL]:
if path.get_name() == name and path.get_descriptor() == descriptor:
l.append(path)
return l
def get_paths(self):
for i in self.paths:
for j in self.paths[i]:
yield j
def get_paths_length(self):
x = 0
for i in self.paths:
x += len(self.paths[i])
return x
def get_methods(self):
return [path for path in self.paths[TAINTED_PACKAGE_CALL]]
def get_new(self):
return [path for path in self.paths[TAINTED_PACKAGE_CREATE]]
def show(self):
return
cm = self.vm.get_class_manager()
print(self.get_name())
for _type in self.paths:
print("\t -->", _type)
if _type == TAINTED_PACKAGE_CALL:
for path in self.paths[_type]:
print("\t\t => %s <-- %x in %s" %
(path.get_dst(cm), path.get_idx(), path.get_src(cm)))
else:
for path in self.paths[_type]:
print("\t\t => %x in %s" %
(path.get_idx(), path.get_src(cm)))
def show_Permissions(dx):
p = dx.get_permissions([])
for i in p:
for j in p[i]:
show_Path(dx.get_vm(), j)
def show_DynCode(dx):
paths = []
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."))
paths.extend(dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."))
show_Paths(dx.get_vm(), paths)
def show_NativeMethods(dx):
return
print(get_NativeMethods(dx))
def show_ReflectionCode(dx):
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
show_Paths(dx.get_vm(), paths)
def get_NativeMethods(dx):
d = dx.get_vm()
native_methods = []
for i in d.get_methods():
if i.get_access_flags() & 0x100:
native_methods.append(
(i.get_class_name(), i.get_name(), i.get_descriptor()))
return native_methods
def get_ReflectionCode(dx):
paths = dx.get_tainted_packages().search_methods(
"Ljava/lang/reflect/Method;", ".", ".")
return get_Paths(dx.get_vm(), paths)
def is_crypto_code(dx):
if dx.get_tainted_packages().search_methods("Ljavax/crypto/.",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/security/spec/.",
".",
"."):
return True
return False
def is_dyn_code(dx):
if dx.get_tainted_packages().search_methods("Ldalvik/system/BaseDexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/PathClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexClassLoader;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"<init>",
"."):
return True
if dx.get_tainted_packages().search_methods("Ldalvik/system/DexFile;",
"loadDex",
"."):
return True
return False
def is_reflection_code(dx):
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Method;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/reflect/Field;",
".",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Class;",
"forName",
"."):
return True
return False
def is_native_code(dx):
if dx.get_tainted_packages().search_methods("Ljava/lang/System;",
"load.",
"."):
return True
if dx.get_tainted_packages().search_methods("Ljava/lang/Runtime;",
"load.",
"."):
return True
return False
class TaintedPackages(object):
def __init__(self, _vm):
self.__vm = _vm
self.__packages = {}
self.__methods = {}
self.AOSP_PERMISSIONS_MODULE = load_api_specific_resource_module(
"aosp_permissions", self.__vm.get_api_version())
self.API_PERMISSION_MAPPINGS_MODULE = load_api_specific_resource_module(
"api_permission_mappings", self.__vm.get_api_version())
def _add_pkg(self, name):
if name not in self.__packages:
self.__packages[name] = TaintedPackage(self.__vm, name)
def push_info(self, class_name, access, idx, method, idx_method):
self._add_pkg(class_name)
p = self.__packages[class_name].push(
access, idx, method.get_method_idx(), idx_method)
try:
self.__methods[method][class_name].append(p)
except:
try:
self.__methods[method][class_name] = []
except:
self.__methods[method] = {}
self.__methods[method][class_name] = []
self.__methods[method][class_name].append(p)
def get_packages_by_method(self, method):
try:
return self.__methods[method]
except KeyError:
return {}
def get_package(self, name):
return self.__packages[name]
def get_packages_by_bb(self, bb):
l = []
for i in self.__packages:
paths = self.__packages[i].gets()
for j in paths:
for k in paths[j]:
if k.get_bb() == bb:
l.append((i, k.get_access_flag(),
k.get_idx(), k.get_method()))
return l
def get_packages(self):
for i in self.__packages:
yield self.__packages[i], i
def get_internal_packages_from_package(self, package):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name == package and dst_class_name in classes:
l.append(j)
return l
def get_internal_packages(self):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, _, _ = j.get_dst(
self.__vm.get_class_manager())
if dst_class_name in classes and m.get_name() in classes:
l.append(j)
return l
def get_internal_new_packages(self):
classes = self.__vm.get_classes_names()
l = {}
for m, _ in self.get_packages():
paths = m.get_new()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
if src_class_name in classes and m.get_name() in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CREATE:
try:
l[m.get_name()].append(j)
except:
l[m.get_name()] = []
l[m.get_name()].append(j)
return l
def get_external_packages(self):
classes = self.__vm.get_classes_names()
l = []
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, _, _ = j.get_src(self.__vm.get_class_manager())
dst_class_name, _, _ = j.get_dst(self.__vm.get_class_manager())
if src_class_name in classes and dst_class_name not in classes:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
l.append(j)
return l
def search_packages(self, package_name):
ex = re.compile(package_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_methods())
return l
def search_unique_packages(self, package_name):
ex = re.compile(package_name)
l = []
d = {}
for m, _ in self.get_packages():
if ex.match(m.get_info()) is not None:
for path in m.get_methods():
try:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] += 1
except KeyError:
d[path.get_class_name() + path.get_name() +
path.get_descriptor()] = 0
l.append([path.get_class_name(),
path.get_name(), path.get_descriptor()])
return l, d
def search_methods(self, class_name, name, descriptor, re_expr=True):
l = []
if re_expr:
ex = re.compile(class_name)
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.search_method(name, descriptor))
return l
def search_objects(self, class_name):
ex = re.compile(class_name)
l = []
for m, _ in self.get_packages():
if ex.search(m.get_name()) is not None:
l.extend(m.get_objects_paths())
return l
def search_crypto_packages(self):
return self.search_packages("Ljavax/crypto/")
def search_telephony_packages(self):
return self.search_packages("Landroid/telephony/")
def search_net_packages(self):
return self.search_packages("Landroid/net/")
def get_method(self, class_name, name, descriptor):
try:
return self.__packages[class_name].get_method(name, descriptor)
except KeyError:
return []
def get_permissions_method(self, method):
permissions = set()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
if j.get_method() == method:
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
permissions.update(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
return permissions
def get_permissions(self, permissions_needed):
permissions = {}
pn = set(permissions_needed)
if permissions_needed == []:
pn = set(self.AOSP_PERMISSIONS_MODULE["AOSP_PERMISSIONS"].keys())
classes = self.__vm.get_classes_names()
for m, _ in self.get_packages():
paths = m.get_methods()
for j in paths:
src_class_name, src_method_name, src_descriptor = j.get_src(
self.__vm.get_class_manager())
dst_class_name, dst_method_name, dst_descriptor = j.get_dst(
self.__vm.get_class_manager())
if (src_class_name in classes) and (dst_class_name not in classes):
if j.get_access_flag() == TAINTED_PACKAGE_CALL:
data = "%s-%s-%s" % (dst_class_name,
dst_method_name, dst_descriptor)
if data in list(self.API_PERMISSION_MAPPINGS_MODULE["AOSP_PERMISSIONS_BY_METHODS"].keys()):
perm_intersection = pn.intersection(self.API_PERMISSION_MAPPINGS_MODULE[
"AOSP_PERMISSIONS_BY_METHODS"][data])
for p in perm_intersection:
try:
permissions[p].append(j)
except KeyError:
permissions[p] = []
permissions[p].append(j)
return permissions
class Enum(object):
def __init__(self, names):
self.names = names
for value, name in enumerate(self.names):
setattr(self, name.upper(), value)
def tuples(self):
return tuple(enumerate(self.names))
TAG_ANDROID = Enum([
'ANDROID', 'TELEPHONY', 'SMS', 'SMSMESSAGE', 'ACCESSIBILITYSERVICE', 'ACCOUNTS',
'ANIMATION', 'APP', 'BLUETOOTH', 'CONTENT', 'DATABASE', 'DEBUG', 'DRM', 'GESTURE',
'GRAPHICS', 'HARDWARE', 'INPUTMETHODSERVICE', 'LOCATION', 'MEDIA', 'MTP',
'NET', 'NFC', 'OPENGL', 'OS', 'PREFERENCE', 'PROVIDER', 'RENDERSCRIPT',
'SAX', 'SECURITY', 'SERVICE', 'SPEECH', 'SUPPORT', 'TEST', 'TEXT', 'UTIL',
'VIEW', 'WEBKIT', 'WIDGET', 'DALVIK_BYTECODE', 'DALVIK_SYSTEM', 'JAVA_REFLECTION'])
TAG_REVERSE_ANDROID = dict((i[0], i[1]) for i in TAG_ANDROID.tuples())
TAGS_ANDROID = {
TAG_ANDROID.ANDROID: [0, "Landroid"],
TAG_ANDROID.TELEPHONY: [0, "Landroid/telephony"],
TAG_ANDROID.SMS: [0, "Landroid/telephony/SmsManager"],
TAG_ANDROID.SMSMESSAGE: [0, "Landroid/telephony/SmsMessage"],
TAG_ANDROID.DEBUG: [0, "Landroid/os/Debug"],
TAG_ANDROID.ACCESSIBILITYSERVICE: [0, "Landroid/accessibilityservice"],
TAG_ANDROID.ACCOUNTS: [0, "Landroid/accounts"],
TAG_ANDROID.ANIMATION: [0, "Landroid/animation"],
TAG_ANDROID.APP: [0, "Landroid/app"],
TAG_ANDROID.BLUETOOTH: [0, "Landroid/bluetooth"],
TAG_ANDROID.CONTENT: [0, "Landroid/content"],
TAG_ANDROID.DATABASE: [0, "Landroid/database"],
TAG_ANDROID.DRM: [0, "Landroid/drm"],
TAG_ANDROID.GESTURE: [0, "Landroid/gesture"],
TAG_ANDROID.GRAPHICS: [0, "Landroid/graphics"],
TAG_ANDROID.HARDWARE: [0, "Landroid/hardware"],
TAG_ANDROID.INPUTMETHODSERVICE: [0, "Landroid/inputmethodservice"],
TAG_ANDROID.LOCATION: [0, "Landroid/location"],
TAG_ANDROID.MEDIA: [0, "Landroid/media"],
TAG_ANDROID.MTP: [0, "Landroid/mtp"],
TAG_ANDROID.NET: [0, "Landroid/net"],
TAG_ANDROID.NFC: [0, "Landroid/nfc"],
TAG_ANDROID.OPENGL: [0, "Landroid/opengl"],
TAG_ANDROID.OS: [0, "Landroid/os"],
TAG_ANDROID.PREFERENCE: [0, "Landroid/preference"],
TAG_ANDROID.PROVIDER: [0, "Landroid/provider"],
TAG_ANDROID.RENDERSCRIPT: [0, "Landroid/renderscript"],
TAG_ANDROID.SAX: [0, "Landroid/sax"],
TAG_ANDROID.SECURITY: [0, "Landroid/security"],
TAG_ANDROID.SERVICE: [0, "Landroid/service"],
TAG_ANDROID.SPEECH: [0, "Landroid/speech"],
TAG_ANDROID.SUPPORT: [0, "Landroid/support"],
TAG_ANDROID.TEST: [0, "Landroid/test"],
TAG_ANDROID.TEXT: [0, "Landroid/text"],
TAG_ANDROID.UTIL: [0, "Landroid/util"],
TAG_ANDROID.VIEW: [0, "Landroid/view"],
TAG_ANDROID.WEBKIT: [0, "Landroid/webkit"],
TAG_ANDROID.WIDGET: [0, "Landroid/widget"],
TAG_ANDROID.DALVIK_BYTECODE: [0, "Ldalvik/bytecode"],
TAG_ANDROID.DALVIK_SYSTEM: [0, "Ldalvik/system"],
TAG_ANDROID.JAVA_REFLECTION: [0, "Ljava/lang/reflect"],
}
class Tags(object):
def __init__(self, patterns=TAGS_ANDROID, reverse=TAG_REVERSE_ANDROID):
self.tags = set()
self.patterns = patterns
self.reverse = TAG_REVERSE_ANDROID
for i in self.patterns:
self.patterns[i][1] = re.compile(self.patterns[i][1])
def emit(self, method):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(method.get_class()) is not None:
self.tags.add(i)
def emit_by_classname(self, classname):
for i in self.patterns:
if self.patterns[i][0] == 0:
if self.patterns[i][1].search(classname) is not None:
self.tags.add(i)
def get_list(self):
return [self.reverse[i] for i in self.tags]
def __contains__(self, key):
return key in self.tags
def __str__(self):
return str([self.reverse[i] for i in self.tags])
def empty(self):
return self.tags == set()
class BasicBlocks(object):
def __init__(self, _vm, tv):
self.__vm = _vm
self.tainted = tv
self.bb = []
def push(self, bb):
self.bb.append(bb)
def pop(self, idx):
return self.bb.pop(idx)
def get_basic_block(self, idx):
for i in self.bb:
if idx >= i.get_start() and idx < i.get_end():
return i
return None
def get_tainted_integers(self):
try:
return self.tainted.get_tainted_integers()
except:
return None
def get_tainted_packages(self):
try:
return self.tainted.get_tainted_packages()
except:
return None
def get_tainted_variables(self):
try:
return self.tainted.get_tainted_variables()
except:
return None
def get(self):
for i in self.bb:
yield i
def gets(self):
return self.bb
def get_basic_block_pos(self, idx):
return self.bb[idx]
class ExceptionAnalysis(object):
def __init__(self, exception, bb):
self.start = exception[0]
self.end = exception[1]
self.exceptions = exception[2:]
for i in self.exceptions:
i.append(bb.get_basic_block(i[1]))
def show_buff(self):
buff = "%x:%x\n" % (self.start, self.end)
for i in self.exceptions:
if i[2] is None:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2])
else:
buff += "\t(%s -> %x %s)\n" % (i[0], i[1], i[2].get_name())
return buff[:-1]
def get(self):
d = {"start": self.start, "end": self.end, "list": []}
for i in self.exceptions:
d["list"].append(
{"name": i[0], "idx": i[1], "bb": i[2].get_name()})
return d
class Exceptions(object):
def __init__(self, _vm):
self.__vm = _vm
self.exceptions = []
def add(self, exceptions, basic_blocks):
for i in exceptions:
self.exceptions.append(ExceptionAnalysis(i, basic_blocks))
def get_exception(self, addr_start, addr_end):
for i in self.exceptions:
if i.start >= addr_start and i.end <= addr_end:
return i
elif addr_end <= i.end and addr_start >= i.start:
return i
return None
def gets(self):
return self.exceptions
def get(self):
for i in self.exceptions:
yield i
BO = {"BasicOPCODES": dvm.BRANCH_DVM_OPCODES, "BasicClass": DVMBasicBlock,
"Dnext": dvm.determineNext, "Dexception": dvm.determineException}
BO["BasicOPCODES_H"] = []
for i in BO["BasicOPCODES"]:
BO["BasicOPCODES_H"].append(re.compile(i))
class MethodAnalysis(object):
def __init__(self, vm, method, tv):
self.__vm = vm
self.method = method
self.tainted = tv
self.basic_blocks = BasicBlocks(self.__vm, self.tainted)
self.exceptions = Exceptions(self.__vm)
code = self.method.get_code()
if code is None:
return
current_basic = BO["BasicClass"](
0, self.__vm, self.method, self.basic_blocks)
self.basic_blocks.push(current_basic)
basic blocks childs")
for i in self.basic_blocks.get():
try:
i.set_childs(h[i.end - i.get_last_length()])
except KeyError:
i.set_childs([])
debug("Creating exceptions")
self.exceptions.add(excepts, self.basic_blocks)
for i in self.basic_blocks.get():
i.set_exception_analysis(
self.exceptions.get_exception(i.start, i.end - 1))
del instructions
del h, l
def get_basic_blocks(self):
return self.basic_blocks
def get_length(self):
return self.get_code().get_length()
def get_vm(self):
return self.__vm
def get_method(self):
return self.method
def get_local_variables(self):
return self.tainted.get_tainted_variables().get_local_variables(self.method)
def show(self):
return
print("METHOD", self.method.get_class_name(),
self.method.get_name(), self.method.get_descriptor())
for i in self.basic_blocks.get():
print("\t", i)
i.show()
print("")
def show_methods(self):
return
print("\t #METHODS :")
for i in self.__bb:
methods = i.get_methods()
for method in methods:
print("\t\t-->", method.get_class_name(),
method.get_name(), method.get_descriptor())
for context in methods[method]:
print("\t\t\t |---|", context.details)
def create_tags(self):
self.tags = Tags()
for i in self.tainted.get_tainted_packages().get_packages_by_method(self.method):
self.tags.emit_by_classname(i)
def get_tags(self):
return self.tags
SIGNATURE_L0_0 = "L0_0"
SIGNATURE_L0_1 = "L0_1"
SIGNATURE_L0_2 = "L0_2"
SIGNATURE_L0_3 = "L0_3"
SIGNATURE_L0_4 = "L0_4"
SIGNATURE_L0_5 = "L0_5"
SIGNATURE_L0_6 = "L0_6"
SIGNATURE_L0_0_L1 = "L0_0:L1"
SIGNATURE_L0_1_L1 = "L0_1:L1"
SIGNATURE_L0_2_L1 = "L0_2:L1"
SIGNATURE_L0_3_L1 = "L0_3:L1"
SIGNATURE_L0_4_L1 = "L0_4:L1"
SIGNATURE_L0_5_L1 = "L0_5:L1"
SIGNATURE_L0_0_L2 = "L0_0:L2"
SIGNATURE_L0_0_L3 = "L0_0:L3"
SIGNATURE_HEX = "hex"
SIGNATURE_SEQUENCE_BB = "sequencebb"
SIGNATURES = {
SIGNATURE_L0_0: {"type": 0},
SIGNATURE_L0_1: {"type": 1},
SIGNATURE_L0_2: {"type": 2, "arguments": ["Landroid"]},
SIGNATURE_L0_3: {"type": 2, "arguments": ["Ljava"]},
SIGNATURE_L0_4: {"type": 2, "arguments": ["Landroid", "Ljava"]},
SIGNATURE_L0_5: {"type": 3, "arguments": ["Landroid"]},
SIGNATURE_L0_6: {"type": 3, "arguments": ["Ljava"]},
SIGNATURE_SEQUENCE_BB: {},
SIGNATURE_HEX: {},
}
class StringAnalysis(object):
def __init__(self, value):
self.value = value
self.xreffrom = set()
def AddXrefFrom(self, classobj, methodobj):
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def __str__(self):
data = "XREFto for string %s in\n" % repr(self.value)
for ref_class, ref_method in self.xreffrom:
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class MethodClassAnalysis(object):
def __init__(self, method):
self.method = method
self.xrefto = set()
self.xreffrom = set()
def AddXrefTo(self, classobj, methodobj):
self.xrefto.add((classobj, methodobj))
def AddXrefFrom(self, classobj, methodobj):
self.xreffrom.add((classobj, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def __str__(self):
data = "XREFto for %s\n" % self.method
for ref_class, ref_method in self.xrefto:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFFrom for %s\n" % self.method
for ref_class, ref_method in self.xreffrom:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
class FieldClassAnalysis(object):
def __init__(self, field):
self.field = field
self.xrefread = set()
self.xrefwrite = set()
def AddXrefRead(self, classobj, methodobj):
self.xrefread.add((classobj, methodobj))
def AddXrefWrite(self, classobj, methodobj):
self.xrefwrite.add((classobj, methodobj))
def get_xref_read(self):
return self.xrefread
def get_xref_write(self):
return self.xrefwrite
def __str__(self):
data = "XREFRead for %s\n" % self.field
for ref_class, ref_method in self.xrefread:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
data += "XREFWrite for %s\n" % self.field
for ref_class, ref_method in self.xrefwrite:
data += "in\n"
data += "%s:%s\n" % (ref_class.get_vm_class().get_name(),
ref_method)
return data
REF_NEW_INSTANCE = 0
REF_CLASS_USAGE = 1
class ClassAnalysis(object):
def __init__(self, classobj):
self._class = classobj
self._methods = {}
self._fields = {}
self.xrefto = collections.defaultdict(set)
self.xreffrom = collections.defaultdict(set)
def get_method_analysis(self, method):
return self._methods.get(method)
def get_field_analysis(self, field):
return self._fields.get(field)
def AddFXrefRead(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefRead(classobj, method)
def AddFXrefWrite(self, method, classobj, field):
if field not in self._fields:
self._fields[field] = FieldClassAnalysis(field)
self._fields[field].AddXrefWrite(classobj, method)
def AddMXrefTo(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefTo(classobj, method2)
def AddMXrefFrom(self, method1, classobj, method2):
if method1 not in self._methods:
self._methods[method1] = MethodClassAnalysis(method1)
self._methods[method1].AddXrefFrom(classobj, method2)
def AddXrefTo(self, ref_kind, classobj, methodobj):
self.xrefto[classobj].add((ref_kind, methodobj))
def AddXrefFrom(self, ref_kind, classobj, methodobj):
self.xreffrom[classobj].add((ref_kind, methodobj))
def get_xref_from(self):
return self.xreffrom
def get_xref_to(self):
return self.xrefto
def get_vm_class(self):
return self._class
def __str__(self):
data = "XREFto for %s\n" % self._class
for ref_class in self.xrefto:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xrefto[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
data += "XREFFrom for %s\n" % self._class
for ref_class in self.xreffrom:
data += str(ref_class.get_vm_class().get_name()) + " "
data += "in\n"
for ref_kind, ref_method in self.xreffrom[ref_class]:
data += "%d %s\n" % (ref_kind, ref_method)
data += "\n"
return data
class newVMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.classes = {}
self.strings = {}
for current_class in self.vm.get_classes():
self.classes[current_class.get_name()] = ClassAnalysis(
current_class)
def create_xref(self):
debug("Creating XREF/DREF")
instances_class_name = list(self.classes.keys())
for current_class in self.vm.get_classes():
for current_method in current_class.get_methods():
debug("Creating XREF for %s" % current_method)
code = current_method.get_code()
if code is None:
continue
off = 0
bc = code.get_bc()
for instruction in bc.get_instructions():
op_value = instruction.get_op_value()
if op_value in [0x1c, 0x22]:
idx_type = instruction.get_ref_kind()
type_info = self.vm.get_cm_type(idx_type)
if type_info in instances_class_name and type_info != current_class.get_name():
if op_value == 0x22:
self.classes[current_class.get_name()].AddXrefTo(
REF_NEW_INSTANCE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_NEW_INSTANCE, self.classes[
current_class.get_name()], current_method)
else:
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[type_info], current_method)
self.classes[type_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif ((op_value >= 0x6e and op_value <= 0x72) or
(op_value >= 0x74 and op_value <= 0x78)):
idx_meth = instruction.get_ref_kind()
method_info = self.vm.get_cm_method(idx_meth)
if method_info:
class_info = method_info[0]
method_item = self.vm.get_method_descriptor(
method_info[0], method_info[1], ''.join(method_info[2]))
if method_item:
self.classes[current_class.get_name()].AddMXrefTo(
current_method, self.classes[class_info], method_item)
self.classes[class_info].AddMXrefFrom(
method_item, self.classes[current_class.get_name()], current_method)
if class_info in instances_class_name and class_info != current_class.get_name():
self.classes[current_class.get_name()].AddXrefTo(
REF_CLASS_USAGE, self.classes[class_info], method_item)
self.classes[class_info].AddXrefFrom(REF_CLASS_USAGE, self.classes[
current_class.get_name()], current_method)
elif op_value >= 0x1a and op_value <= 0x1b:
string_value = self.vm.get_cm_string(
instruction.get_ref_kind())
if string_value not in self.strings:
self.strings[string_value] = StringAnalysis(
string_value)
self.strings[string_value].AddXrefFrom(
self.classes[current_class.get_name()], current_method)
elif op_value >= 0x52 and op_value <= 0x6d:
idx_field = instruction.get_ref_kind()
field_info = self.vm.get_cm_field(idx_field)
field_item = self.vm.get_field_descriptor(
field_info[0], field_info[2], field_info[1])
if field_item:
if (op_value >= 0x52 and op_value <= 0x58) or (op_value >= 0x60 and op_value <= 0x66):
self.classes[current_class.get_name()].AddFXrefRead(
current_method, self.classes[current_class.get_name()], field_item)
else:
self.classes[current_class.get_name()].AddFXrefWrite(
current_method, self.classes[current_class.get_name()], field_item)
off += instruction.get_length()
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_method_by_name(self, class_name, method_name, method_descriptor):
if class_name in self.classes:
for method in self.classes[class_name].get_vm_class().get_methods():
if method.get_name() == method_name and method.get_descriptor() == method_descriptor:
return method
return None
def is_class_present(self, class_name):
return class_name in self.classes
def get_class_analysis(self, class_name):
return self.classes.get(class_name)
def get_strings_analysis(self):
return self.strings
class VMAnalysis(object):
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
for i in self.vm.get_all_fields():
self.tainted_variables.add(
[i.get_class_name(), i.get_descriptor(), i.get_name()], TAINTED_FIELD)
self.methods = []
self.hmethods = {}
self.__nmethods = {}
for i in self.vm.get_methods():
x = MethodAnalysis(self.vm, i, self)
self.methods.append(x)
self.hmethods[i] = x
self.__nmethods[i.get_name()] = x
def get_vm(self):
return self.vm
def get_method(self, method):
return self.hmethods[method]
def get_methods(self):
for i in self.hmethods:
yield self.hmethods[i]
def get_method_signature(self, method, grammar_type="", options={}, predef_sign=""):
if self.signature is None:
self.signature = Signature(self)
if predef_sign != "":
g = ""
o = {}
for i in predef_sign.split(":"):
if "_" in i:
g += "L0:"
o["L0"] = SIGNATURES[i]
else:
g += i
g += ":"
return self.signature.get_method(self.get_method(method), g[:-1], o)
else:
return self.signature.get_method(self.get_method(method), grammar_type, options)
def get_permissions(self, permissions_needed):
permissions = {}
permissions.update(self.get_tainted_packages(
).get_permissions(permissions_needed))
permissions.update(self.get_tainted_variables(
).get_permissions(permissions_needed))
return permissions
def get_permissions_method(self, method):
permissions_f = self.get_tainted_packages().get_permissions_method(method)
permissions_v = self.get_tainted_variables().get_permissions_method(method)
all_permissions_of_method = permissions_f.union(permissions_v)
return list(all_permissions_of_method)
def get_tainted_variables(self):
return self.tainted_variables
def get_tainted_packages(self):
return self.tainted_packages
def get_tainted_fields(self):
return self.get_tainted_variables().get_fields()
def get_tainted_field(self, class_name, name, descriptor):
return self.get_tainted_variables().get_field(class_name, name, descriptor)
class uVMAnalysis(VMAnalysis):
def __init__(self, vm):
self.vm = vm
self.tainted_variables = TaintedVariables(self.vm)
self.tainted_packages = TaintedPackages(self.vm)
self.tainted = {"variables": self.tainted_variables,
"packages": self.tainted_packages,
}
self.signature = None
self.resolve = False
def get_methods(self):
self.resolve = True
for i in self.vm.get_methods():
yield MethodAnalysis(self.vm, i, self)
def get_method(self, method):
return MethodAnalysis(self.vm, method, None)
def get_vm(self):
return self.vm
def _resolve(self):
if not self.resolve:
for i in self.get_methods():
pass
def get_tainted_packages(self):
self._resolve()
return self.tainted_packages
def get_tainted_variables(self):
self._resolve()
return self.tainted_variables
def is_ascii_obfuscation(vm):
for classe in vm.get_classes():
if is_ascii_problem(classe.get_name()):
return True
for method in classe.get_methods():
if is_ascii_problem(method.get_name()):
return True
return False
| true | true |
f71f68f60efce427cc864118cc7e00210f6bd3bb | 302 | py | Python | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | 1 | 2020-10-08T09:17:40.000Z | 2020-10-08T09:17:40.000Z | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | python3/recent_counter.py | joshiaj7/CodingChallenges | f95dd79132f07c296e074d675819031912f6a943 | [
"MIT"
] | null | null | null | """
space : O(n)
time : O(n)
"""
class RecentCounter:
def __init__(self):
self.history = []
def ping(self, t: int) -> int:
self.history.append(t)
s = t - 3000
while self.history[0] < s:
self.history.pop(0)
return len(self.history)
| 15.1 | 34 | 0.503311 |
class RecentCounter:
def __init__(self):
self.history = []
def ping(self, t: int) -> int:
self.history.append(t)
s = t - 3000
while self.history[0] < s:
self.history.pop(0)
return len(self.history)
| true | true |
f71f694ec80a3bd4c8eb0b4d9cd3f8f8a53b92c1 | 8,293 | py | Python | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | 1 | 2019-02-10T01:38:05.000Z | 2019-02-10T01:38:05.000Z | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | null | null | null | knack/invocation.py | derekbekoe/knack | 07ce4c3ae51ef22e6364ed93c5980cae7688e347 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import print_function
import sys
from collections import defaultdict
from .deprecation import ImplicitDeprecated, resolve_deprecate_info
from .util import CLIError, CtxTypeError, CommandResultItem, todict
from .parser import CLICommandParser
from .commands import CLICommandsLoader
from .events import (EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE,
EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS,
EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT,
EVENT_INVOKER_FILTER_RESULT)
from .help import CLIHelp
class CommandInvoker(object):
def __init__(self,
cli_ctx=None,
parser_cls=CLICommandParser,
commands_loader_cls=CLICommandsLoader,
help_cls=CLIHelp,
initial_data=None):
""" Manages a single invocation of the CLI (i.e. running a command)
:param cli_ctx: CLI Context
:type cli_ctx: knack.cli.CLI
:param parser_cls: A class to handle command parsing
:type parser_cls: knack.parser.CLICommandParser
:param commands_loader_cls: A class to handle loading commands
:type commands_loader_cls: knack.commands.CLICommandsLoader
:param help_cls: A class to handle help
:type help_cls: knack.help.CLIHelp
:param initial_data: The initial in-memory collection for this command invocation
:type initial_data: dict
"""
from .cli import CLI
if cli_ctx is not None and not isinstance(cli_ctx, CLI):
raise CtxTypeError(cli_ctx)
self.cli_ctx = cli_ctx
# In memory collection of key-value data for this current invocation This does not persist between invocations.
self.data = initial_data or defaultdict(lambda: None)
self.data['command'] = 'unknown'
self._global_parser = parser_cls.create_global_parser(cli_ctx=self.cli_ctx)
self.help = help_cls(cli_ctx=self.cli_ctx)
self.parser = parser_cls(cli_ctx=self.cli_ctx, cli_help=self.help,
prog=self.cli_ctx.name, parents=[self._global_parser])
self.commands_loader = commands_loader_cls(cli_ctx=self.cli_ctx)
def _filter_params(self, args): # pylint: disable=no-self-use
# Consider - we are using any args that start with an underscore (_) as 'private'
# arguments and remove them from the arguments that we pass to the actual function.
params = {key: value
for key, value in args.__dict__.items()
if not key.startswith('_')}
params.pop('func', None)
params.pop('command', None)
return params
def _rudimentary_get_command(self, args):
""" Rudimentary parsing to get the command """
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
# since the command name may be immediately followed by a positional arg, strip those off
while nouns and not _find_args(nouns):
del nouns[-1]
# ensure the command string is case-insensitive
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns)
def _validate_cmd_level(self, ns, cmd_validator): # pylint: disable=no-self-use
if cmd_validator:
cmd_validator(ns)
try:
delattr(ns, '_command_validator')
except AttributeError:
pass
def _validate_arg_level(self, ns, **_): # pylint: disable=no-self-use
for validator in getattr(ns, '_argument_validators', []):
validator(ns)
try:
delattr(ns, '_argument_validators')
except AttributeError:
pass
def _validation(self, parsed_ns):
try:
cmd_validator = getattr(parsed_ns, '_command_validator', None)
if cmd_validator:
self._validate_cmd_level(parsed_ns, cmd_validator)
else:
self._validate_arg_level(parsed_ns)
except CLIError:
raise
except Exception: # pylint: disable=broad-except
err = sys.exc_info()[1]
getattr(parsed_ns, '_parser', self.parser).validation_error(str(err))
def execute(self, args):
""" Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem
"""
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
# save the command name (leaf in the tree)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
# search for implicit deprecation
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active'])
| 41.673367 | 119 | 0.636079 |
from __future__ import print_function
import sys
from collections import defaultdict
from .deprecation import ImplicitDeprecated, resolve_deprecate_info
from .util import CLIError, CtxTypeError, CommandResultItem, todict
from .parser import CLICommandParser
from .commands import CLICommandsLoader
from .events import (EVENT_INVOKER_PRE_CMD_TBL_CREATE, EVENT_INVOKER_POST_CMD_TBL_CREATE,
EVENT_INVOKER_CMD_TBL_LOADED, EVENT_INVOKER_PRE_PARSE_ARGS,
EVENT_INVOKER_POST_PARSE_ARGS, EVENT_INVOKER_TRANSFORM_RESULT,
EVENT_INVOKER_FILTER_RESULT)
from .help import CLIHelp
class CommandInvoker(object):
def __init__(self,
cli_ctx=None,
parser_cls=CLICommandParser,
commands_loader_cls=CLICommandsLoader,
help_cls=CLIHelp,
initial_data=None):
from .cli import CLI
if cli_ctx is not None and not isinstance(cli_ctx, CLI):
raise CtxTypeError(cli_ctx)
self.cli_ctx = cli_ctx
self.data = initial_data or defaultdict(lambda: None)
self.data['command'] = 'unknown'
self._global_parser = parser_cls.create_global_parser(cli_ctx=self.cli_ctx)
self.help = help_cls(cli_ctx=self.cli_ctx)
self.parser = parser_cls(cli_ctx=self.cli_ctx, cli_help=self.help,
prog=self.cli_ctx.name, parents=[self._global_parser])
self.commands_loader = commands_loader_cls(cli_ctx=self.cli_ctx)
def _filter_params(self, args):
params = {key: value
for key, value in args.__dict__.items()
if not key.startswith('_')}
params.pop('func', None)
params.pop('command', None)
return params
def _rudimentary_get_command(self, args):
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
while nouns and not _find_args(nouns):
del nouns[-1]
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns)
def _validate_cmd_level(self, ns, cmd_validator):
if cmd_validator:
cmd_validator(ns)
try:
delattr(ns, '_command_validator')
except AttributeError:
pass
def _validate_arg_level(self, ns, **_):
for validator in getattr(ns, '_argument_validators', []):
validator(ns)
try:
delattr(ns, '_argument_validators')
except AttributeError:
pass
def _validation(self, parsed_ns):
try:
cmd_validator = getattr(parsed_ns, '_command_validator', None)
if cmd_validator:
self._validate_cmd_level(parsed_ns, cmd_validator)
else:
self._validate_arg_level(parsed_ns)
except CLIError:
raise
except Exception:
err = sys.exc_info()[1]
getattr(parsed_ns, '_parser', self.parser).validation_error(str(err))
def execute(self, args):
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active'])
| true | true |
f71f6972720d1f87a308457a99c2da6ef6fe19d9 | 63,620 | py | Python | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | LeetCode/contest-2018-11-26/fair_candy_swap.py | Max-PJB/python-learning2 | e8b05bef1574ee9abf8c90497e94ef20a7f4e3bd | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
@ Author : pengj
@ date : 2018/11/26 19:28
@ IDE : PyCharm
@ GitHub : https://github.com/JackyPJB
@ Contact : pengjianbiao@hotmail.com
-------------------------------------------------
Description : 888. 公平的糖果交换
虚拟 用户通过次数 0
虚拟 用户尝试次数 1
虚拟 通过次数 0
虚拟 提交次数 1
题目难度 Easy
爱丽丝和鲍勃有不同大小的糖果棒:A[i] 是爱丽丝拥有的第 i 块糖的大小,B[j] 是鲍勃拥有的第 j 块糖的大小。
因为他们是朋友,所以他们想交换一个糖果棒,这样交换后,他们都有相同的糖果总量。(一个人拥有的糖果总量是他们拥有的糖果棒大小的总和。)
返回一个整数数组 ans,其中 ans[0] 是爱丽丝必须交换的糖果棒的大小,ans[1] 是 Bob 必须交换的糖果棒的大小。
如果有多个答案,你可以返回其中任何一个。保证答案存在。
示例 1:
输入:A = [1,1], B = [2,2]
输出:[1,2]
示例 2:
输入:A = [1,2], B = [2,3]
输出:[1,2]
示例 3:
输入:A = [2], B = [1,3]
输出:[2,3]
示例 4:
输入:A = [1,2,5], B = [2,4]
输出:[5,4]
提示:
1 <= A.length <= 10000
1 <= B.length <= 10000
1 <= A[i] <= 100000
1 <= B[i] <= 100000
保证爱丽丝与鲍勃的糖果总量不同。
答案肯定存在。
-------------------------------------------------
"""
import time
__author__ = 'Max_Pengjb'
start = time.time()
# 下面写上代码块
class Solution(object):
def fairCandySwap(self, A, B):
"""
:type A: List[int]
:type B: List[int]
:rtype: List[int]
"""
k = (sum(A) - sum(B)) // 2
b = dict(zip(B, [1 for _ in B]))
for i in A:
if i - k in b.keys():
return [i, i - k]
A = [1, 2, 5]
B = [2, 4]
a1 = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59,
61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113,
115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205,
207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251,
253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297,
299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343,
345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389,
391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435,
437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481,
483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527,
529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573,
575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619,
621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665,
667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711,
713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757,
759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803,
805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849,
851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895,
897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941,
943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987,
989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027,
1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065,
1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103,
1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141,
1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179,
1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217,
1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255,
1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293,
1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331,
1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369,
1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407,
1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445,
1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483,
1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521,
1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559,
1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597,
1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635,
1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673,
1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711,
1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749,
1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787,
1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825,
1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863,
1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901,
1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939,
1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977,
1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015,
2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053,
2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091,
2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129,
2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167,
2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205,
2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243,
2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281,
2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319,
2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357,
2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395,
2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433,
2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471,
2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509,
2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547,
2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585,
2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623,
2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661,
2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699,
2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737,
2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775,
2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813,
2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851,
2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889,
2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927,
2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965,
2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003,
3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041,
3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079,
3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117,
3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155,
3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193,
3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231,
3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269,
3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307,
3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345,
3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383,
3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421,
3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459,
3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497,
3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535,
3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573,
3575, 3577, 3579, 3581, 3583, 3585, 3587, 3589, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611,
3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649,
3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687,
3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725,
3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763,
3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801,
3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839,
3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877,
3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915,
3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953,
3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991,
3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029,
4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067,
4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105,
4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143,
4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181,
4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219,
4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257,
4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295,
4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333,
4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371,
4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409,
4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4445, 4447,
4449, 4451, 4453, 4455, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485,
4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4521, 4523,
4525, 4527, 4529, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561,
4563, 4565, 4567, 4569, 4571, 4573, 4575, 4577, 4579, 4581, 4583, 4585, 4587, 4589, 4591, 4593, 4595, 4597, 4599,
4601, 4603, 4605, 4607, 4609, 4611, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637,
4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4669, 4671, 4673, 4675,
4677, 4679, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713,
4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4741, 4743, 4745, 4747, 4749, 4751,
4753, 4755, 4757, 4759, 4761, 4763, 4765, 4767, 4769, 4771, 4773, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789,
4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827,
4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4847, 4849, 4851, 4853, 4855, 4857, 4859, 4861, 4863, 4865,
4867, 4869, 4871, 4873, 4875, 4877, 4879, 4881, 4883, 4885, 4887, 4889, 4891, 4893, 4895, 4897, 4899, 4901, 4903,
4905, 4907, 4909, 4911, 4913, 4915, 4917, 4919, 4921, 4923, 4925, 4927, 4929, 4931, 4933, 4935, 4937, 4939, 4941,
4943, 4945, 4947, 4949, 4951, 4953, 4955, 4957, 4959, 4961, 4963, 4965, 4967, 4969, 4971, 4973, 4975, 4977, 4979,
4981, 4983, 4985, 4987, 4989, 4991, 4993, 4995, 4997, 4999, 5001, 5003, 5005, 5007, 5009, 5011, 5013, 5015, 5017,
5019, 5021, 5023, 5025, 5027, 5029, 5031, 5033, 5035, 5037, 5039, 5041, 5043, 5045, 5047, 5049, 5051, 5053, 5055,
5057, 5059, 5061, 5063, 5065, 5067, 5069, 5071, 5073, 5075, 5077, 5079, 5081, 5083, 5085, 5087, 5089, 5091, 5093,
5095, 5097, 5099, 5101, 5103, 5105, 5107, 5109, 5111, 5113, 5115, 5117, 5119, 5121, 5123, 5125, 5127, 5129, 5131,
5133, 5135, 5137, 5139, 5141, 5143, 5145, 5147, 5149, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169,
5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5187, 5189, 5191, 5193, 5195, 5197, 5199, 5201, 5203, 5205, 5207,
5209, 5211, 5213, 5215, 5217, 5219, 5221, 5223, 5225, 5227, 5229, 5231, 5233, 5235, 5237, 5239, 5241, 5243, 5245,
5247, 5249, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283,
5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321,
5323, 5325, 5327, 5329, 5331, 5333, 5335, 5337, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359,
5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397,
5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 5419, 5421, 5423, 5425, 5427, 5429, 5431, 5433, 5435,
5437, 5439, 5441, 5443, 5445, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5467, 5469, 5471, 5473,
5475, 5477, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5511,
5513, 5515, 5517, 5519, 5521, 5523, 5525, 5527, 5529, 5531, 5533, 5535, 5537, 5539, 5541, 5543, 5545, 5547, 5549,
5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587,
5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625,
5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663,
5665, 5667, 5669, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701,
5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739,
5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5759, 5761, 5763, 5765, 5767, 5769, 5771, 5773, 5775, 5777,
5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5815,
5817, 5819, 5821, 5823, 5825, 5827, 5829, 5831, 5833, 5835, 5837, 5839, 5841, 5843, 5845, 5847, 5849, 5851, 5853,
5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891,
5893, 5895, 5897, 5899, 5901, 5903, 5905, 5907, 5909, 5911, 5913, 5915, 5917, 5919, 5921, 5923, 5925, 5927, 5929,
5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967,
5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5997, 5999, 6001, 6003, 6005,
6007, 6009, 6011, 6013, 6015, 6017, 6019, 6021, 6023, 6025, 6027, 6029, 6031, 6033, 6035, 6037, 6039, 6041, 6043,
6045, 6047, 6049, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081,
6083, 6085, 6087, 6089, 6091, 6093, 6095, 6097, 6099, 6101, 6103, 6105, 6107, 6109, 6111, 6113, 6115, 6117, 6119,
6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 6141, 6143, 6145, 6147, 6149, 6151, 6153, 6155, 6157,
6159, 6161, 6163, 6165, 6167, 6169, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195,
6197, 6199, 6201, 6203, 6205, 6207, 6209, 6211, 6213, 6215, 6217, 6219, 6221, 6223, 6225, 6227, 6229, 6231, 6233,
6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6251, 6253, 6255, 6257, 6259, 6261, 6263, 6265, 6267, 6269, 6271,
6273, 6275, 6277, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309,
6311, 6313, 6315, 6317, 6319, 6321, 6323, 6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339, 6341, 6343, 6345, 6347,
6349, 6351, 6353, 6355, 6357, 6359, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6375, 6377, 6379, 6381, 6383, 6385,
6387, 6389, 6391, 6393, 6395, 6397, 6399, 6401, 6403, 6405, 6407, 6409, 6411, 6413, 6415, 6417, 6419, 6421, 6423,
6425, 6427, 6429, 6431, 6433, 6435, 6437, 6439, 6441, 6443, 6445, 6447, 6449, 6451, 6453, 6455, 6457, 6459, 6461,
6463, 6465, 6467, 6469, 6471, 6473, 6475, 6477, 6479, 6481, 6483, 6485, 6487, 6489, 6491, 6493, 6495, 6497, 6499,
6501, 6503, 6505, 6507, 6509, 6511, 6513, 6515, 6517, 6519, 6521, 6523, 6525, 6527, 6529, 6531, 6533, 6535, 6537,
6539, 6541, 6543, 6545, 6547, 6549, 6551, 6553, 6555, 6557, 6559, 6561, 6563, 6565, 6567, 6569, 6571, 6573, 6575,
6577, 6579, 6581, 6583, 6585, 6587, 6589, 6591, 6593, 6595, 6597, 6599, 6601, 6603, 6605, 6607, 6609, 6611, 6613,
6615, 6617, 6619, 6621, 6623, 6625, 6627, 6629, 6631, 6633, 6635, 6637, 6639, 6641, 6643, 6645, 6647, 6649, 6651,
6653, 6655, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689,
6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727,
6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765,
6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803,
6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841,
6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879,
6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917,
6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955,
6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993,
6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031,
7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069,
7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107,
7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145,
7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183,
7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221,
7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259,
7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297,
7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335,
7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373,
7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411,
7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449,
7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487,
7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525,
7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 7543, 7545, 7547, 7549, 7551, 7553, 7555, 7557, 7559, 7561, 7563,
7565, 7567, 7569, 7571, 7573, 7575, 7577, 7579, 7581, 7583, 7585, 7587, 7589, 7591, 7593, 7595, 7597, 7599, 7601,
7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639,
7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677,
7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715,
7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753,
7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791,
7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829,
7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867,
7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905,
7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943,
7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981,
7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019,
8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057,
8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095,
8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133,
8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171,
8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209,
8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247,
8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285,
8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323,
8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361,
8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399,
8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437,
8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475,
8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513,
8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551,
8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589,
8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627,
8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665,
8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703,
8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741,
8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779,
8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817,
8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855,
8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893,
8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931,
8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969,
8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007,
9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045,
9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083,
9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121,
9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159,
9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197,
9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235,
9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273,
9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311,
9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349,
9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387,
9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425,
9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463,
9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501,
9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539,
9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577,
9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615,
9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653,
9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691,
9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729,
9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767,
9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 9791, 9793, 9795, 9797, 9799, 9801, 9803, 9805,
9807, 9809, 9811, 9813, 9815, 9817, 9819, 9821, 9823, 9825, 9827, 9829, 9831, 9833, 9835, 9837, 9839, 9841, 9843,
9845, 9847, 9849, 9851, 9853, 9855, 9857, 9859, 9861, 9863, 9865, 9867, 9869, 9871, 9873, 9875, 9877, 9879, 9881,
9883, 9885, 9887, 9889, 9891, 9893, 9895, 9897, 9899, 9901, 9903, 9905, 9907, 9909, 9911, 9913, 9915, 9917, 9919,
9921, 9923, 9925, 9927, 9929, 9931, 9933, 9935, 9937, 9939, 9941, 9943, 9945, 9947, 9949, 9951, 9953, 9955, 9957,
9959, 9961, 9963, 9965, 9967, 9969, 9971, 9973, 9975, 9977, 9979, 9981, 9983, 9985, 9987, 9989, 9991, 9993, 9995,
9997, 9999, 4982]
b1 = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58,
60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112,
114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158,
160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204,
206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250,
252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296,
298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342,
344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388,
390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434,
436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480,
482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526,
528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572,
574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618,
620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664,
666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710,
712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756,
758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802,
804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848,
850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894,
896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940,
942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986,
988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026,
1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064,
1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102,
1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140,
1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178,
1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216,
1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254,
1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292,
1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330,
1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368,
1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406,
1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444,
1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482,
1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520,
1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558,
1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596,
1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634,
1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672,
1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710,
1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748,
1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786,
1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824,
1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862,
1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900,
1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938,
1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976,
1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014,
2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052,
2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090,
2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128,
2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166,
2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204,
2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242,
2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280,
2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318,
2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356,
2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394,
2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432,
2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470,
2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508,
2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546,
2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584,
2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622,
2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660,
2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698,
2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736,
2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774,
2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812,
2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850,
2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888,
2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926,
2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964,
2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002,
3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040,
3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078,
3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116,
3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154,
3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192,
3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230,
3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268,
3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306,
3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344,
3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382,
3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420,
3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458,
3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496,
3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534,
3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572,
3574, 3576, 3578, 3580, 3582, 3584, 3586, 3588, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610,
3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648,
3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686,
3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724,
3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762,
3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800,
3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838,
3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876,
3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914,
3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952,
3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990,
3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028,
4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066,
4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104,
4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142,
4144, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180,
4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218,
4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256,
4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294,
4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332,
4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370,
4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408,
4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4446,
4448, 4450, 4452, 4454, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484,
4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4520, 4522,
4524, 4526, 4528, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560,
4562, 4564, 4566, 4568, 4570, 4572, 4574, 4576, 4578, 4580, 4582, 4584, 4586, 4588, 4590, 4592, 4594, 4596, 4598,
4600, 4602, 4604, 4606, 4608, 4610, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636,
4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4668, 4670, 4672, 4674,
4676, 4678, 4680, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712,
4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750,
4752, 4754, 4756, 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, 4774, 4776, 4778, 4780, 4782, 4784, 4786, 4788,
4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826,
4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4846, 4848, 4850, 4852, 4854, 4856, 4858, 4860, 4862, 4864,
4866, 4868, 4870, 4872, 4874, 4876, 4878, 4880, 4882, 4884, 4886, 4888, 4890, 4892, 4894, 4896, 4898, 4900, 4902,
4904, 4906, 4908, 4910, 4912, 4914, 4916, 4918, 4920, 4922, 4924, 4926, 4928, 4930, 4932, 4934, 4936, 4938, 4940,
4942, 4944, 4946, 4948, 4950, 4952, 4954, 4956, 4958, 4960, 4962, 4964, 4966, 4968, 4970, 4972, 4974, 4976, 4978,
4980, 4982, 4984, 4986, 4988, 4990, 4992, 4994, 4996, 4998, 5000, 5002, 5004, 5006, 5008, 5010, 5012, 5014, 5016,
5018, 5020, 5022, 5024, 5026, 5028, 5030, 5032, 5034, 5036, 5038, 5040, 5042, 5044, 5046, 5048, 5050, 5052, 5054,
5056, 5058, 5060, 5062, 5064, 5066, 5068, 5070, 5072, 5074, 5076, 5078, 5080, 5082, 5084, 5086, 5088, 5090, 5092,
5094, 5096, 5098, 5100, 5102, 5104, 5106, 5108, 5110, 5112, 5114, 5116, 5118, 5120, 5122, 5124, 5126, 5128, 5130,
5132, 5134, 5136, 5138, 5140, 5142, 5144, 5146, 5148, 5150, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168,
5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5188, 5190, 5192, 5194, 5196, 5198, 5200, 5202, 5204, 5206,
5208, 5210, 5212, 5214, 5216, 5218, 5220, 5222, 5224, 5226, 5228, 5230, 5232, 5234, 5236, 5238, 5240, 5242, 5244,
5246, 5248, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282,
5284, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320,
5322, 5324, 5326, 5328, 5330, 5332, 5334, 5336, 5338, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358,
5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396,
5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 5420, 5422, 5424, 5426, 5428, 5430, 5432, 5434,
5436, 5438, 5440, 5442, 5444, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5466, 5468, 5470, 5472,
5474, 5476, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5510,
5512, 5514, 5516, 5518, 5520, 5522, 5524, 5526, 5528, 5530, 5532, 5534, 5536, 5538, 5540, 5542, 5544, 5546, 5548,
5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586,
5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624,
5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662,
5664, 5666, 5668, 5670, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700,
5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738,
5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5760, 5762, 5764, 5766, 5768, 5770, 5772, 5774, 5776,
5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5814,
5816, 5818, 5820, 5822, 5824, 5826, 5828, 5830, 5832, 5834, 5836, 5838, 5840, 5842, 5844, 5846, 5848, 5850, 5852,
5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890,
5892, 5894, 5896, 5898, 5900, 5902, 5904, 5906, 5908, 5910, 5912, 5914, 5916, 5918, 5920, 5922, 5924, 5926, 5928,
5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966,
5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5998, 6000, 6002, 6004,
6006, 6008, 6010, 6012, 6014, 6016, 6018, 6020, 6022, 6024, 6026, 6028, 6030, 6032, 6034, 6036, 6038, 6040, 6042,
6044, 6046, 6048, 6050, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080,
6082, 6084, 6086, 6088, 6090, 6092, 6094, 6096, 6098, 6100, 6102, 6104, 6106, 6108, 6110, 6112, 6114, 6116, 6118,
6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 6142, 6144, 6146, 6148, 6150, 6152, 6154, 6156,
6158, 6160, 6162, 6164, 6166, 6168, 6170, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194,
6196, 6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212, 6214, 6216, 6218, 6220, 6222, 6224, 6226, 6228, 6230, 6232,
6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6252, 6254, 6256, 6258, 6260, 6262, 6264, 6266, 6268, 6270,
6272, 6274, 6276, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308,
6310, 6312, 6314, 6316, 6318, 6320, 6322, 6324, 6326, 6328, 6330, 6332, 6334, 6336, 6338, 6340, 6342, 6344, 6346,
6348, 6350, 6352, 6354, 6356, 6358, 6360, 6362, 6364, 6366, 6368, 6370, 6372, 6374, 6376, 6378, 6380, 6382, 6384,
6386, 6388, 6390, 6392, 6394, 6396, 6398, 6400, 6402, 6404, 6406, 6408, 6410, 6412, 6414, 6416, 6418, 6420, 6422,
6424, 6426, 6428, 6430, 6432, 6434, 6436, 6438, 6440, 6442, 6444, 6446, 6448, 6450, 6452, 6454, 6456, 6458, 6460,
6462, 6464, 6466, 6468, 6470, 6472, 6474, 6476, 6478, 6480, 6482, 6484, 6486, 6488, 6490, 6492, 6494, 6496, 6498,
6500, 6502, 6504, 6506, 6508, 6510, 6512, 6514, 6516, 6518, 6520, 6522, 6524, 6526, 6528, 6530, 6532, 6534, 6536,
6538, 6540, 6542, 6544, 6546, 6548, 6550, 6552, 6554, 6556, 6558, 6560, 6562, 6564, 6566, 6568, 6570, 6572, 6574,
6576, 6578, 6580, 6582, 6584, 6586, 6588, 6590, 6592, 6594, 6596, 6598, 6600, 6602, 6604, 6606, 6608, 6610, 6612,
6614, 6616, 6618, 6620, 6622, 6624, 6626, 6628, 6630, 6632, 6634, 6636, 6638, 6640, 6642, 6644, 6646, 6648, 6650,
6652, 6654, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688,
6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726,
6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764,
6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802,
6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840,
6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878,
6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916,
6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954,
6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992,
6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030,
7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068,
7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106,
7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144,
7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182,
7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220,
7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258,
7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296,
7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334,
7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372,
7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410,
7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448,
7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486,
7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524,
7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7544, 7546, 7548, 7550, 7552, 7554, 7556, 7558, 7560, 7562,
7564, 7566, 7568, 7570, 7572, 7574, 7576, 7578, 7580, 7582, 7584, 7586, 7588, 7590, 7592, 7594, 7596, 7598, 7600,
7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638,
7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676,
7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714,
7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752,
7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790,
7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828,
7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866,
7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904,
7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942,
7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980,
7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018,
8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056,
8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094,
8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132,
8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170,
8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208,
8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246,
8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284,
8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322,
8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360,
8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398,
8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436,
8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474,
8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512,
8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550,
8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588,
8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626,
8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664,
8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702,
8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740,
8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778,
8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816,
8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854,
8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892,
8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930,
8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968,
8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006,
9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044,
9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082,
9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120,
9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158,
9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196,
9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234,
9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272,
9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310,
9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348,
9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386,
9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424,
9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462,
9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500,
9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538,
9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576,
9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614,
9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652,
9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690,
9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728,
9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766,
9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9792, 9794, 9796, 9798, 9800, 9802, 9804,
9806, 9808, 9810, 9812, 9814, 9816, 9818, 9820, 9822, 9824, 9826, 9828, 9830, 9832, 9834, 9836, 9838, 9840, 9842,
9844, 9846, 9848, 9850, 9852, 9854, 9856, 9858, 9860, 9862, 9864, 9866, 9868, 9870, 9872, 9874, 9876, 9878, 9880,
9882, 9884, 9886, 9888, 9890, 9892, 9894, 9896, 9898, 9900, 9902, 9904, 9906, 9908, 9910, 9912, 9914, 9916, 9918,
9920, 9922, 9924, 9926, 9928, 9930, 9932, 9934, 9936, 9938, 9940, 9942, 9944, 9946, 9948, 9950, 9952, 9954, 9956,
9958, 9960, 9962, 9964, 9966, 9968, 9970, 9972, 9974, 9976, 9978, 9980, 9982, 9984, 9986, 9988, 9990, 9992, 9994,
9996, 9998, 10000, 10002]
res = Solution().fairCandySwap(a1, b1)
print(res)
# 上面中间写上代码块
end = time.time()
print('Running time: %s Seconds' % (end - start))
| 104.638158 | 120 | 0.623546 |
import time
__author__ = 'Max_Pengjb'
start = time.time()
class Solution(object):
def fairCandySwap(self, A, B):
k = (sum(A) - sum(B)) // 2
b = dict(zip(B, [1 for _ in B]))
for i in A:
if i - k in b.keys():
return [i, i - k]
A = [1, 2, 5]
B = [2, 4]
a1 = [1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 21, 23, 25, 27, 29, 31, 33, 35, 37, 39, 41, 43, 45, 47, 49, 51, 53, 55, 57, 59,
61, 63, 65, 67, 69, 71, 73, 75, 77, 79, 81, 83, 85, 87, 89, 91, 93, 95, 97, 99, 101, 103, 105, 107, 109, 111, 113,
115, 117, 119, 121, 123, 125, 127, 129, 131, 133, 135, 137, 139, 141, 143, 145, 147, 149, 151, 153, 155, 157, 159,
161, 163, 165, 167, 169, 171, 173, 175, 177, 179, 181, 183, 185, 187, 189, 191, 193, 195, 197, 199, 201, 203, 205,
207, 209, 211, 213, 215, 217, 219, 221, 223, 225, 227, 229, 231, 233, 235, 237, 239, 241, 243, 245, 247, 249, 251,
253, 255, 257, 259, 261, 263, 265, 267, 269, 271, 273, 275, 277, 279, 281, 283, 285, 287, 289, 291, 293, 295, 297,
299, 301, 303, 305, 307, 309, 311, 313, 315, 317, 319, 321, 323, 325, 327, 329, 331, 333, 335, 337, 339, 341, 343,
345, 347, 349, 351, 353, 355, 357, 359, 361, 363, 365, 367, 369, 371, 373, 375, 377, 379, 381, 383, 385, 387, 389,
391, 393, 395, 397, 399, 401, 403, 405, 407, 409, 411, 413, 415, 417, 419, 421, 423, 425, 427, 429, 431, 433, 435,
437, 439, 441, 443, 445, 447, 449, 451, 453, 455, 457, 459, 461, 463, 465, 467, 469, 471, 473, 475, 477, 479, 481,
483, 485, 487, 489, 491, 493, 495, 497, 499, 501, 503, 505, 507, 509, 511, 513, 515, 517, 519, 521, 523, 525, 527,
529, 531, 533, 535, 537, 539, 541, 543, 545, 547, 549, 551, 553, 555, 557, 559, 561, 563, 565, 567, 569, 571, 573,
575, 577, 579, 581, 583, 585, 587, 589, 591, 593, 595, 597, 599, 601, 603, 605, 607, 609, 611, 613, 615, 617, 619,
621, 623, 625, 627, 629, 631, 633, 635, 637, 639, 641, 643, 645, 647, 649, 651, 653, 655, 657, 659, 661, 663, 665,
667, 669, 671, 673, 675, 677, 679, 681, 683, 685, 687, 689, 691, 693, 695, 697, 699, 701, 703, 705, 707, 709, 711,
713, 715, 717, 719, 721, 723, 725, 727, 729, 731, 733, 735, 737, 739, 741, 743, 745, 747, 749, 751, 753, 755, 757,
759, 761, 763, 765, 767, 769, 771, 773, 775, 777, 779, 781, 783, 785, 787, 789, 791, 793, 795, 797, 799, 801, 803,
805, 807, 809, 811, 813, 815, 817, 819, 821, 823, 825, 827, 829, 831, 833, 835, 837, 839, 841, 843, 845, 847, 849,
851, 853, 855, 857, 859, 861, 863, 865, 867, 869, 871, 873, 875, 877, 879, 881, 883, 885, 887, 889, 891, 893, 895,
897, 899, 901, 903, 905, 907, 909, 911, 913, 915, 917, 919, 921, 923, 925, 927, 929, 931, 933, 935, 937, 939, 941,
943, 945, 947, 949, 951, 953, 955, 957, 959, 961, 963, 965, 967, 969, 971, 973, 975, 977, 979, 981, 983, 985, 987,
989, 991, 993, 995, 997, 999, 1001, 1003, 1005, 1007, 1009, 1011, 1013, 1015, 1017, 1019, 1021, 1023, 1025, 1027,
1029, 1031, 1033, 1035, 1037, 1039, 1041, 1043, 1045, 1047, 1049, 1051, 1053, 1055, 1057, 1059, 1061, 1063, 1065,
1067, 1069, 1071, 1073, 1075, 1077, 1079, 1081, 1083, 1085, 1087, 1089, 1091, 1093, 1095, 1097, 1099, 1101, 1103,
1105, 1107, 1109, 1111, 1113, 1115, 1117, 1119, 1121, 1123, 1125, 1127, 1129, 1131, 1133, 1135, 1137, 1139, 1141,
1143, 1145, 1147, 1149, 1151, 1153, 1155, 1157, 1159, 1161, 1163, 1165, 1167, 1169, 1171, 1173, 1175, 1177, 1179,
1181, 1183, 1185, 1187, 1189, 1191, 1193, 1195, 1197, 1199, 1201, 1203, 1205, 1207, 1209, 1211, 1213, 1215, 1217,
1219, 1221, 1223, 1225, 1227, 1229, 1231, 1233, 1235, 1237, 1239, 1241, 1243, 1245, 1247, 1249, 1251, 1253, 1255,
1257, 1259, 1261, 1263, 1265, 1267, 1269, 1271, 1273, 1275, 1277, 1279, 1281, 1283, 1285, 1287, 1289, 1291, 1293,
1295, 1297, 1299, 1301, 1303, 1305, 1307, 1309, 1311, 1313, 1315, 1317, 1319, 1321, 1323, 1325, 1327, 1329, 1331,
1333, 1335, 1337, 1339, 1341, 1343, 1345, 1347, 1349, 1351, 1353, 1355, 1357, 1359, 1361, 1363, 1365, 1367, 1369,
1371, 1373, 1375, 1377, 1379, 1381, 1383, 1385, 1387, 1389, 1391, 1393, 1395, 1397, 1399, 1401, 1403, 1405, 1407,
1409, 1411, 1413, 1415, 1417, 1419, 1421, 1423, 1425, 1427, 1429, 1431, 1433, 1435, 1437, 1439, 1441, 1443, 1445,
1447, 1449, 1451, 1453, 1455, 1457, 1459, 1461, 1463, 1465, 1467, 1469, 1471, 1473, 1475, 1477, 1479, 1481, 1483,
1485, 1487, 1489, 1491, 1493, 1495, 1497, 1499, 1501, 1503, 1505, 1507, 1509, 1511, 1513, 1515, 1517, 1519, 1521,
1523, 1525, 1527, 1529, 1531, 1533, 1535, 1537, 1539, 1541, 1543, 1545, 1547, 1549, 1551, 1553, 1555, 1557, 1559,
1561, 1563, 1565, 1567, 1569, 1571, 1573, 1575, 1577, 1579, 1581, 1583, 1585, 1587, 1589, 1591, 1593, 1595, 1597,
1599, 1601, 1603, 1605, 1607, 1609, 1611, 1613, 1615, 1617, 1619, 1621, 1623, 1625, 1627, 1629, 1631, 1633, 1635,
1637, 1639, 1641, 1643, 1645, 1647, 1649, 1651, 1653, 1655, 1657, 1659, 1661, 1663, 1665, 1667, 1669, 1671, 1673,
1675, 1677, 1679, 1681, 1683, 1685, 1687, 1689, 1691, 1693, 1695, 1697, 1699, 1701, 1703, 1705, 1707, 1709, 1711,
1713, 1715, 1717, 1719, 1721, 1723, 1725, 1727, 1729, 1731, 1733, 1735, 1737, 1739, 1741, 1743, 1745, 1747, 1749,
1751, 1753, 1755, 1757, 1759, 1761, 1763, 1765, 1767, 1769, 1771, 1773, 1775, 1777, 1779, 1781, 1783, 1785, 1787,
1789, 1791, 1793, 1795, 1797, 1799, 1801, 1803, 1805, 1807, 1809, 1811, 1813, 1815, 1817, 1819, 1821, 1823, 1825,
1827, 1829, 1831, 1833, 1835, 1837, 1839, 1841, 1843, 1845, 1847, 1849, 1851, 1853, 1855, 1857, 1859, 1861, 1863,
1865, 1867, 1869, 1871, 1873, 1875, 1877, 1879, 1881, 1883, 1885, 1887, 1889, 1891, 1893, 1895, 1897, 1899, 1901,
1903, 1905, 1907, 1909, 1911, 1913, 1915, 1917, 1919, 1921, 1923, 1925, 1927, 1929, 1931, 1933, 1935, 1937, 1939,
1941, 1943, 1945, 1947, 1949, 1951, 1953, 1955, 1957, 1959, 1961, 1963, 1965, 1967, 1969, 1971, 1973, 1975, 1977,
1979, 1981, 1983, 1985, 1987, 1989, 1991, 1993, 1995, 1997, 1999, 2001, 2003, 2005, 2007, 2009, 2011, 2013, 2015,
2017, 2019, 2021, 2023, 2025, 2027, 2029, 2031, 2033, 2035, 2037, 2039, 2041, 2043, 2045, 2047, 2049, 2051, 2053,
2055, 2057, 2059, 2061, 2063, 2065, 2067, 2069, 2071, 2073, 2075, 2077, 2079, 2081, 2083, 2085, 2087, 2089, 2091,
2093, 2095, 2097, 2099, 2101, 2103, 2105, 2107, 2109, 2111, 2113, 2115, 2117, 2119, 2121, 2123, 2125, 2127, 2129,
2131, 2133, 2135, 2137, 2139, 2141, 2143, 2145, 2147, 2149, 2151, 2153, 2155, 2157, 2159, 2161, 2163, 2165, 2167,
2169, 2171, 2173, 2175, 2177, 2179, 2181, 2183, 2185, 2187, 2189, 2191, 2193, 2195, 2197, 2199, 2201, 2203, 2205,
2207, 2209, 2211, 2213, 2215, 2217, 2219, 2221, 2223, 2225, 2227, 2229, 2231, 2233, 2235, 2237, 2239, 2241, 2243,
2245, 2247, 2249, 2251, 2253, 2255, 2257, 2259, 2261, 2263, 2265, 2267, 2269, 2271, 2273, 2275, 2277, 2279, 2281,
2283, 2285, 2287, 2289, 2291, 2293, 2295, 2297, 2299, 2301, 2303, 2305, 2307, 2309, 2311, 2313, 2315, 2317, 2319,
2321, 2323, 2325, 2327, 2329, 2331, 2333, 2335, 2337, 2339, 2341, 2343, 2345, 2347, 2349, 2351, 2353, 2355, 2357,
2359, 2361, 2363, 2365, 2367, 2369, 2371, 2373, 2375, 2377, 2379, 2381, 2383, 2385, 2387, 2389, 2391, 2393, 2395,
2397, 2399, 2401, 2403, 2405, 2407, 2409, 2411, 2413, 2415, 2417, 2419, 2421, 2423, 2425, 2427, 2429, 2431, 2433,
2435, 2437, 2439, 2441, 2443, 2445, 2447, 2449, 2451, 2453, 2455, 2457, 2459, 2461, 2463, 2465, 2467, 2469, 2471,
2473, 2475, 2477, 2479, 2481, 2483, 2485, 2487, 2489, 2491, 2493, 2495, 2497, 2499, 2501, 2503, 2505, 2507, 2509,
2511, 2513, 2515, 2517, 2519, 2521, 2523, 2525, 2527, 2529, 2531, 2533, 2535, 2537, 2539, 2541, 2543, 2545, 2547,
2549, 2551, 2553, 2555, 2557, 2559, 2561, 2563, 2565, 2567, 2569, 2571, 2573, 2575, 2577, 2579, 2581, 2583, 2585,
2587, 2589, 2591, 2593, 2595, 2597, 2599, 2601, 2603, 2605, 2607, 2609, 2611, 2613, 2615, 2617, 2619, 2621, 2623,
2625, 2627, 2629, 2631, 2633, 2635, 2637, 2639, 2641, 2643, 2645, 2647, 2649, 2651, 2653, 2655, 2657, 2659, 2661,
2663, 2665, 2667, 2669, 2671, 2673, 2675, 2677, 2679, 2681, 2683, 2685, 2687, 2689, 2691, 2693, 2695, 2697, 2699,
2701, 2703, 2705, 2707, 2709, 2711, 2713, 2715, 2717, 2719, 2721, 2723, 2725, 2727, 2729, 2731, 2733, 2735, 2737,
2739, 2741, 2743, 2745, 2747, 2749, 2751, 2753, 2755, 2757, 2759, 2761, 2763, 2765, 2767, 2769, 2771, 2773, 2775,
2777, 2779, 2781, 2783, 2785, 2787, 2789, 2791, 2793, 2795, 2797, 2799, 2801, 2803, 2805, 2807, 2809, 2811, 2813,
2815, 2817, 2819, 2821, 2823, 2825, 2827, 2829, 2831, 2833, 2835, 2837, 2839, 2841, 2843, 2845, 2847, 2849, 2851,
2853, 2855, 2857, 2859, 2861, 2863, 2865, 2867, 2869, 2871, 2873, 2875, 2877, 2879, 2881, 2883, 2885, 2887, 2889,
2891, 2893, 2895, 2897, 2899, 2901, 2903, 2905, 2907, 2909, 2911, 2913, 2915, 2917, 2919, 2921, 2923, 2925, 2927,
2929, 2931, 2933, 2935, 2937, 2939, 2941, 2943, 2945, 2947, 2949, 2951, 2953, 2955, 2957, 2959, 2961, 2963, 2965,
2967, 2969, 2971, 2973, 2975, 2977, 2979, 2981, 2983, 2985, 2987, 2989, 2991, 2993, 2995, 2997, 2999, 3001, 3003,
3005, 3007, 3009, 3011, 3013, 3015, 3017, 3019, 3021, 3023, 3025, 3027, 3029, 3031, 3033, 3035, 3037, 3039, 3041,
3043, 3045, 3047, 3049, 3051, 3053, 3055, 3057, 3059, 3061, 3063, 3065, 3067, 3069, 3071, 3073, 3075, 3077, 3079,
3081, 3083, 3085, 3087, 3089, 3091, 3093, 3095, 3097, 3099, 3101, 3103, 3105, 3107, 3109, 3111, 3113, 3115, 3117,
3119, 3121, 3123, 3125, 3127, 3129, 3131, 3133, 3135, 3137, 3139, 3141, 3143, 3145, 3147, 3149, 3151, 3153, 3155,
3157, 3159, 3161, 3163, 3165, 3167, 3169, 3171, 3173, 3175, 3177, 3179, 3181, 3183, 3185, 3187, 3189, 3191, 3193,
3195, 3197, 3199, 3201, 3203, 3205, 3207, 3209, 3211, 3213, 3215, 3217, 3219, 3221, 3223, 3225, 3227, 3229, 3231,
3233, 3235, 3237, 3239, 3241, 3243, 3245, 3247, 3249, 3251, 3253, 3255, 3257, 3259, 3261, 3263, 3265, 3267, 3269,
3271, 3273, 3275, 3277, 3279, 3281, 3283, 3285, 3287, 3289, 3291, 3293, 3295, 3297, 3299, 3301, 3303, 3305, 3307,
3309, 3311, 3313, 3315, 3317, 3319, 3321, 3323, 3325, 3327, 3329, 3331, 3333, 3335, 3337, 3339, 3341, 3343, 3345,
3347, 3349, 3351, 3353, 3355, 3357, 3359, 3361, 3363, 3365, 3367, 3369, 3371, 3373, 3375, 3377, 3379, 3381, 3383,
3385, 3387, 3389, 3391, 3393, 3395, 3397, 3399, 3401, 3403, 3405, 3407, 3409, 3411, 3413, 3415, 3417, 3419, 3421,
3423, 3425, 3427, 3429, 3431, 3433, 3435, 3437, 3439, 3441, 3443, 3445, 3447, 3449, 3451, 3453, 3455, 3457, 3459,
3461, 3463, 3465, 3467, 3469, 3471, 3473, 3475, 3477, 3479, 3481, 3483, 3485, 3487, 3489, 3491, 3493, 3495, 3497,
3499, 3501, 3503, 3505, 3507, 3509, 3511, 3513, 3515, 3517, 3519, 3521, 3523, 3525, 3527, 3529, 3531, 3533, 3535,
3537, 3539, 3541, 3543, 3545, 3547, 3549, 3551, 3553, 3555, 3557, 3559, 3561, 3563, 3565, 3567, 3569, 3571, 3573,
3575, 3577, 3579, 3581, 3583, 3585, 3587, 3589, 3591, 3593, 3595, 3597, 3599, 3601, 3603, 3605, 3607, 3609, 3611,
3613, 3615, 3617, 3619, 3621, 3623, 3625, 3627, 3629, 3631, 3633, 3635, 3637, 3639, 3641, 3643, 3645, 3647, 3649,
3651, 3653, 3655, 3657, 3659, 3661, 3663, 3665, 3667, 3669, 3671, 3673, 3675, 3677, 3679, 3681, 3683, 3685, 3687,
3689, 3691, 3693, 3695, 3697, 3699, 3701, 3703, 3705, 3707, 3709, 3711, 3713, 3715, 3717, 3719, 3721, 3723, 3725,
3727, 3729, 3731, 3733, 3735, 3737, 3739, 3741, 3743, 3745, 3747, 3749, 3751, 3753, 3755, 3757, 3759, 3761, 3763,
3765, 3767, 3769, 3771, 3773, 3775, 3777, 3779, 3781, 3783, 3785, 3787, 3789, 3791, 3793, 3795, 3797, 3799, 3801,
3803, 3805, 3807, 3809, 3811, 3813, 3815, 3817, 3819, 3821, 3823, 3825, 3827, 3829, 3831, 3833, 3835, 3837, 3839,
3841, 3843, 3845, 3847, 3849, 3851, 3853, 3855, 3857, 3859, 3861, 3863, 3865, 3867, 3869, 3871, 3873, 3875, 3877,
3879, 3881, 3883, 3885, 3887, 3889, 3891, 3893, 3895, 3897, 3899, 3901, 3903, 3905, 3907, 3909, 3911, 3913, 3915,
3917, 3919, 3921, 3923, 3925, 3927, 3929, 3931, 3933, 3935, 3937, 3939, 3941, 3943, 3945, 3947, 3949, 3951, 3953,
3955, 3957, 3959, 3961, 3963, 3965, 3967, 3969, 3971, 3973, 3975, 3977, 3979, 3981, 3983, 3985, 3987, 3989, 3991,
3993, 3995, 3997, 3999, 4001, 4003, 4005, 4007, 4009, 4011, 4013, 4015, 4017, 4019, 4021, 4023, 4025, 4027, 4029,
4031, 4033, 4035, 4037, 4039, 4041, 4043, 4045, 4047, 4049, 4051, 4053, 4055, 4057, 4059, 4061, 4063, 4065, 4067,
4069, 4071, 4073, 4075, 4077, 4079, 4081, 4083, 4085, 4087, 4089, 4091, 4093, 4095, 4097, 4099, 4101, 4103, 4105,
4107, 4109, 4111, 4113, 4115, 4117, 4119, 4121, 4123, 4125, 4127, 4129, 4131, 4133, 4135, 4137, 4139, 4141, 4143,
4145, 4147, 4149, 4151, 4153, 4155, 4157, 4159, 4161, 4163, 4165, 4167, 4169, 4171, 4173, 4175, 4177, 4179, 4181,
4183, 4185, 4187, 4189, 4191, 4193, 4195, 4197, 4199, 4201, 4203, 4205, 4207, 4209, 4211, 4213, 4215, 4217, 4219,
4221, 4223, 4225, 4227, 4229, 4231, 4233, 4235, 4237, 4239, 4241, 4243, 4245, 4247, 4249, 4251, 4253, 4255, 4257,
4259, 4261, 4263, 4265, 4267, 4269, 4271, 4273, 4275, 4277, 4279, 4281, 4283, 4285, 4287, 4289, 4291, 4293, 4295,
4297, 4299, 4301, 4303, 4305, 4307, 4309, 4311, 4313, 4315, 4317, 4319, 4321, 4323, 4325, 4327, 4329, 4331, 4333,
4335, 4337, 4339, 4341, 4343, 4345, 4347, 4349, 4351, 4353, 4355, 4357, 4359, 4361, 4363, 4365, 4367, 4369, 4371,
4373, 4375, 4377, 4379, 4381, 4383, 4385, 4387, 4389, 4391, 4393, 4395, 4397, 4399, 4401, 4403, 4405, 4407, 4409,
4411, 4413, 4415, 4417, 4419, 4421, 4423, 4425, 4427, 4429, 4431, 4433, 4435, 4437, 4439, 4441, 4443, 4445, 4447,
4449, 4451, 4453, 4455, 4457, 4459, 4461, 4463, 4465, 4467, 4469, 4471, 4473, 4475, 4477, 4479, 4481, 4483, 4485,
4487, 4489, 4491, 4493, 4495, 4497, 4499, 4501, 4503, 4505, 4507, 4509, 4511, 4513, 4515, 4517, 4519, 4521, 4523,
4525, 4527, 4529, 4531, 4533, 4535, 4537, 4539, 4541, 4543, 4545, 4547, 4549, 4551, 4553, 4555, 4557, 4559, 4561,
4563, 4565, 4567, 4569, 4571, 4573, 4575, 4577, 4579, 4581, 4583, 4585, 4587, 4589, 4591, 4593, 4595, 4597, 4599,
4601, 4603, 4605, 4607, 4609, 4611, 4613, 4615, 4617, 4619, 4621, 4623, 4625, 4627, 4629, 4631, 4633, 4635, 4637,
4639, 4641, 4643, 4645, 4647, 4649, 4651, 4653, 4655, 4657, 4659, 4661, 4663, 4665, 4667, 4669, 4671, 4673, 4675,
4677, 4679, 4681, 4683, 4685, 4687, 4689, 4691, 4693, 4695, 4697, 4699, 4701, 4703, 4705, 4707, 4709, 4711, 4713,
4715, 4717, 4719, 4721, 4723, 4725, 4727, 4729, 4731, 4733, 4735, 4737, 4739, 4741, 4743, 4745, 4747, 4749, 4751,
4753, 4755, 4757, 4759, 4761, 4763, 4765, 4767, 4769, 4771, 4773, 4775, 4777, 4779, 4781, 4783, 4785, 4787, 4789,
4791, 4793, 4795, 4797, 4799, 4801, 4803, 4805, 4807, 4809, 4811, 4813, 4815, 4817, 4819, 4821, 4823, 4825, 4827,
4829, 4831, 4833, 4835, 4837, 4839, 4841, 4843, 4845, 4847, 4849, 4851, 4853, 4855, 4857, 4859, 4861, 4863, 4865,
4867, 4869, 4871, 4873, 4875, 4877, 4879, 4881, 4883, 4885, 4887, 4889, 4891, 4893, 4895, 4897, 4899, 4901, 4903,
4905, 4907, 4909, 4911, 4913, 4915, 4917, 4919, 4921, 4923, 4925, 4927, 4929, 4931, 4933, 4935, 4937, 4939, 4941,
4943, 4945, 4947, 4949, 4951, 4953, 4955, 4957, 4959, 4961, 4963, 4965, 4967, 4969, 4971, 4973, 4975, 4977, 4979,
4981, 4983, 4985, 4987, 4989, 4991, 4993, 4995, 4997, 4999, 5001, 5003, 5005, 5007, 5009, 5011, 5013, 5015, 5017,
5019, 5021, 5023, 5025, 5027, 5029, 5031, 5033, 5035, 5037, 5039, 5041, 5043, 5045, 5047, 5049, 5051, 5053, 5055,
5057, 5059, 5061, 5063, 5065, 5067, 5069, 5071, 5073, 5075, 5077, 5079, 5081, 5083, 5085, 5087, 5089, 5091, 5093,
5095, 5097, 5099, 5101, 5103, 5105, 5107, 5109, 5111, 5113, 5115, 5117, 5119, 5121, 5123, 5125, 5127, 5129, 5131,
5133, 5135, 5137, 5139, 5141, 5143, 5145, 5147, 5149, 5151, 5153, 5155, 5157, 5159, 5161, 5163, 5165, 5167, 5169,
5171, 5173, 5175, 5177, 5179, 5181, 5183, 5185, 5187, 5189, 5191, 5193, 5195, 5197, 5199, 5201, 5203, 5205, 5207,
5209, 5211, 5213, 5215, 5217, 5219, 5221, 5223, 5225, 5227, 5229, 5231, 5233, 5235, 5237, 5239, 5241, 5243, 5245,
5247, 5249, 5251, 5253, 5255, 5257, 5259, 5261, 5263, 5265, 5267, 5269, 5271, 5273, 5275, 5277, 5279, 5281, 5283,
5285, 5287, 5289, 5291, 5293, 5295, 5297, 5299, 5301, 5303, 5305, 5307, 5309, 5311, 5313, 5315, 5317, 5319, 5321,
5323, 5325, 5327, 5329, 5331, 5333, 5335, 5337, 5339, 5341, 5343, 5345, 5347, 5349, 5351, 5353, 5355, 5357, 5359,
5361, 5363, 5365, 5367, 5369, 5371, 5373, 5375, 5377, 5379, 5381, 5383, 5385, 5387, 5389, 5391, 5393, 5395, 5397,
5399, 5401, 5403, 5405, 5407, 5409, 5411, 5413, 5415, 5417, 5419, 5421, 5423, 5425, 5427, 5429, 5431, 5433, 5435,
5437, 5439, 5441, 5443, 5445, 5447, 5449, 5451, 5453, 5455, 5457, 5459, 5461, 5463, 5465, 5467, 5469, 5471, 5473,
5475, 5477, 5479, 5481, 5483, 5485, 5487, 5489, 5491, 5493, 5495, 5497, 5499, 5501, 5503, 5505, 5507, 5509, 5511,
5513, 5515, 5517, 5519, 5521, 5523, 5525, 5527, 5529, 5531, 5533, 5535, 5537, 5539, 5541, 5543, 5545, 5547, 5549,
5551, 5553, 5555, 5557, 5559, 5561, 5563, 5565, 5567, 5569, 5571, 5573, 5575, 5577, 5579, 5581, 5583, 5585, 5587,
5589, 5591, 5593, 5595, 5597, 5599, 5601, 5603, 5605, 5607, 5609, 5611, 5613, 5615, 5617, 5619, 5621, 5623, 5625,
5627, 5629, 5631, 5633, 5635, 5637, 5639, 5641, 5643, 5645, 5647, 5649, 5651, 5653, 5655, 5657, 5659, 5661, 5663,
5665, 5667, 5669, 5671, 5673, 5675, 5677, 5679, 5681, 5683, 5685, 5687, 5689, 5691, 5693, 5695, 5697, 5699, 5701,
5703, 5705, 5707, 5709, 5711, 5713, 5715, 5717, 5719, 5721, 5723, 5725, 5727, 5729, 5731, 5733, 5735, 5737, 5739,
5741, 5743, 5745, 5747, 5749, 5751, 5753, 5755, 5757, 5759, 5761, 5763, 5765, 5767, 5769, 5771, 5773, 5775, 5777,
5779, 5781, 5783, 5785, 5787, 5789, 5791, 5793, 5795, 5797, 5799, 5801, 5803, 5805, 5807, 5809, 5811, 5813, 5815,
5817, 5819, 5821, 5823, 5825, 5827, 5829, 5831, 5833, 5835, 5837, 5839, 5841, 5843, 5845, 5847, 5849, 5851, 5853,
5855, 5857, 5859, 5861, 5863, 5865, 5867, 5869, 5871, 5873, 5875, 5877, 5879, 5881, 5883, 5885, 5887, 5889, 5891,
5893, 5895, 5897, 5899, 5901, 5903, 5905, 5907, 5909, 5911, 5913, 5915, 5917, 5919, 5921, 5923, 5925, 5927, 5929,
5931, 5933, 5935, 5937, 5939, 5941, 5943, 5945, 5947, 5949, 5951, 5953, 5955, 5957, 5959, 5961, 5963, 5965, 5967,
5969, 5971, 5973, 5975, 5977, 5979, 5981, 5983, 5985, 5987, 5989, 5991, 5993, 5995, 5997, 5999, 6001, 6003, 6005,
6007, 6009, 6011, 6013, 6015, 6017, 6019, 6021, 6023, 6025, 6027, 6029, 6031, 6033, 6035, 6037, 6039, 6041, 6043,
6045, 6047, 6049, 6051, 6053, 6055, 6057, 6059, 6061, 6063, 6065, 6067, 6069, 6071, 6073, 6075, 6077, 6079, 6081,
6083, 6085, 6087, 6089, 6091, 6093, 6095, 6097, 6099, 6101, 6103, 6105, 6107, 6109, 6111, 6113, 6115, 6117, 6119,
6121, 6123, 6125, 6127, 6129, 6131, 6133, 6135, 6137, 6139, 6141, 6143, 6145, 6147, 6149, 6151, 6153, 6155, 6157,
6159, 6161, 6163, 6165, 6167, 6169, 6171, 6173, 6175, 6177, 6179, 6181, 6183, 6185, 6187, 6189, 6191, 6193, 6195,
6197, 6199, 6201, 6203, 6205, 6207, 6209, 6211, 6213, 6215, 6217, 6219, 6221, 6223, 6225, 6227, 6229, 6231, 6233,
6235, 6237, 6239, 6241, 6243, 6245, 6247, 6249, 6251, 6253, 6255, 6257, 6259, 6261, 6263, 6265, 6267, 6269, 6271,
6273, 6275, 6277, 6279, 6281, 6283, 6285, 6287, 6289, 6291, 6293, 6295, 6297, 6299, 6301, 6303, 6305, 6307, 6309,
6311, 6313, 6315, 6317, 6319, 6321, 6323, 6325, 6327, 6329, 6331, 6333, 6335, 6337, 6339, 6341, 6343, 6345, 6347,
6349, 6351, 6353, 6355, 6357, 6359, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6375, 6377, 6379, 6381, 6383, 6385,
6387, 6389, 6391, 6393, 6395, 6397, 6399, 6401, 6403, 6405, 6407, 6409, 6411, 6413, 6415, 6417, 6419, 6421, 6423,
6425, 6427, 6429, 6431, 6433, 6435, 6437, 6439, 6441, 6443, 6445, 6447, 6449, 6451, 6453, 6455, 6457, 6459, 6461,
6463, 6465, 6467, 6469, 6471, 6473, 6475, 6477, 6479, 6481, 6483, 6485, 6487, 6489, 6491, 6493, 6495, 6497, 6499,
6501, 6503, 6505, 6507, 6509, 6511, 6513, 6515, 6517, 6519, 6521, 6523, 6525, 6527, 6529, 6531, 6533, 6535, 6537,
6539, 6541, 6543, 6545, 6547, 6549, 6551, 6553, 6555, 6557, 6559, 6561, 6563, 6565, 6567, 6569, 6571, 6573, 6575,
6577, 6579, 6581, 6583, 6585, 6587, 6589, 6591, 6593, 6595, 6597, 6599, 6601, 6603, 6605, 6607, 6609, 6611, 6613,
6615, 6617, 6619, 6621, 6623, 6625, 6627, 6629, 6631, 6633, 6635, 6637, 6639, 6641, 6643, 6645, 6647, 6649, 6651,
6653, 6655, 6657, 6659, 6661, 6663, 6665, 6667, 6669, 6671, 6673, 6675, 6677, 6679, 6681, 6683, 6685, 6687, 6689,
6691, 6693, 6695, 6697, 6699, 6701, 6703, 6705, 6707, 6709, 6711, 6713, 6715, 6717, 6719, 6721, 6723, 6725, 6727,
6729, 6731, 6733, 6735, 6737, 6739, 6741, 6743, 6745, 6747, 6749, 6751, 6753, 6755, 6757, 6759, 6761, 6763, 6765,
6767, 6769, 6771, 6773, 6775, 6777, 6779, 6781, 6783, 6785, 6787, 6789, 6791, 6793, 6795, 6797, 6799, 6801, 6803,
6805, 6807, 6809, 6811, 6813, 6815, 6817, 6819, 6821, 6823, 6825, 6827, 6829, 6831, 6833, 6835, 6837, 6839, 6841,
6843, 6845, 6847, 6849, 6851, 6853, 6855, 6857, 6859, 6861, 6863, 6865, 6867, 6869, 6871, 6873, 6875, 6877, 6879,
6881, 6883, 6885, 6887, 6889, 6891, 6893, 6895, 6897, 6899, 6901, 6903, 6905, 6907, 6909, 6911, 6913, 6915, 6917,
6919, 6921, 6923, 6925, 6927, 6929, 6931, 6933, 6935, 6937, 6939, 6941, 6943, 6945, 6947, 6949, 6951, 6953, 6955,
6957, 6959, 6961, 6963, 6965, 6967, 6969, 6971, 6973, 6975, 6977, 6979, 6981, 6983, 6985, 6987, 6989, 6991, 6993,
6995, 6997, 6999, 7001, 7003, 7005, 7007, 7009, 7011, 7013, 7015, 7017, 7019, 7021, 7023, 7025, 7027, 7029, 7031,
7033, 7035, 7037, 7039, 7041, 7043, 7045, 7047, 7049, 7051, 7053, 7055, 7057, 7059, 7061, 7063, 7065, 7067, 7069,
7071, 7073, 7075, 7077, 7079, 7081, 7083, 7085, 7087, 7089, 7091, 7093, 7095, 7097, 7099, 7101, 7103, 7105, 7107,
7109, 7111, 7113, 7115, 7117, 7119, 7121, 7123, 7125, 7127, 7129, 7131, 7133, 7135, 7137, 7139, 7141, 7143, 7145,
7147, 7149, 7151, 7153, 7155, 7157, 7159, 7161, 7163, 7165, 7167, 7169, 7171, 7173, 7175, 7177, 7179, 7181, 7183,
7185, 7187, 7189, 7191, 7193, 7195, 7197, 7199, 7201, 7203, 7205, 7207, 7209, 7211, 7213, 7215, 7217, 7219, 7221,
7223, 7225, 7227, 7229, 7231, 7233, 7235, 7237, 7239, 7241, 7243, 7245, 7247, 7249, 7251, 7253, 7255, 7257, 7259,
7261, 7263, 7265, 7267, 7269, 7271, 7273, 7275, 7277, 7279, 7281, 7283, 7285, 7287, 7289, 7291, 7293, 7295, 7297,
7299, 7301, 7303, 7305, 7307, 7309, 7311, 7313, 7315, 7317, 7319, 7321, 7323, 7325, 7327, 7329, 7331, 7333, 7335,
7337, 7339, 7341, 7343, 7345, 7347, 7349, 7351, 7353, 7355, 7357, 7359, 7361, 7363, 7365, 7367, 7369, 7371, 7373,
7375, 7377, 7379, 7381, 7383, 7385, 7387, 7389, 7391, 7393, 7395, 7397, 7399, 7401, 7403, 7405, 7407, 7409, 7411,
7413, 7415, 7417, 7419, 7421, 7423, 7425, 7427, 7429, 7431, 7433, 7435, 7437, 7439, 7441, 7443, 7445, 7447, 7449,
7451, 7453, 7455, 7457, 7459, 7461, 7463, 7465, 7467, 7469, 7471, 7473, 7475, 7477, 7479, 7481, 7483, 7485, 7487,
7489, 7491, 7493, 7495, 7497, 7499, 7501, 7503, 7505, 7507, 7509, 7511, 7513, 7515, 7517, 7519, 7521, 7523, 7525,
7527, 7529, 7531, 7533, 7535, 7537, 7539, 7541, 7543, 7545, 7547, 7549, 7551, 7553, 7555, 7557, 7559, 7561, 7563,
7565, 7567, 7569, 7571, 7573, 7575, 7577, 7579, 7581, 7583, 7585, 7587, 7589, 7591, 7593, 7595, 7597, 7599, 7601,
7603, 7605, 7607, 7609, 7611, 7613, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7629, 7631, 7633, 7635, 7637, 7639,
7641, 7643, 7645, 7647, 7649, 7651, 7653, 7655, 7657, 7659, 7661, 7663, 7665, 7667, 7669, 7671, 7673, 7675, 7677,
7679, 7681, 7683, 7685, 7687, 7689, 7691, 7693, 7695, 7697, 7699, 7701, 7703, 7705, 7707, 7709, 7711, 7713, 7715,
7717, 7719, 7721, 7723, 7725, 7727, 7729, 7731, 7733, 7735, 7737, 7739, 7741, 7743, 7745, 7747, 7749, 7751, 7753,
7755, 7757, 7759, 7761, 7763, 7765, 7767, 7769, 7771, 7773, 7775, 7777, 7779, 7781, 7783, 7785, 7787, 7789, 7791,
7793, 7795, 7797, 7799, 7801, 7803, 7805, 7807, 7809, 7811, 7813, 7815, 7817, 7819, 7821, 7823, 7825, 7827, 7829,
7831, 7833, 7835, 7837, 7839, 7841, 7843, 7845, 7847, 7849, 7851, 7853, 7855, 7857, 7859, 7861, 7863, 7865, 7867,
7869, 7871, 7873, 7875, 7877, 7879, 7881, 7883, 7885, 7887, 7889, 7891, 7893, 7895, 7897, 7899, 7901, 7903, 7905,
7907, 7909, 7911, 7913, 7915, 7917, 7919, 7921, 7923, 7925, 7927, 7929, 7931, 7933, 7935, 7937, 7939, 7941, 7943,
7945, 7947, 7949, 7951, 7953, 7955, 7957, 7959, 7961, 7963, 7965, 7967, 7969, 7971, 7973, 7975, 7977, 7979, 7981,
7983, 7985, 7987, 7989, 7991, 7993, 7995, 7997, 7999, 8001, 8003, 8005, 8007, 8009, 8011, 8013, 8015, 8017, 8019,
8021, 8023, 8025, 8027, 8029, 8031, 8033, 8035, 8037, 8039, 8041, 8043, 8045, 8047, 8049, 8051, 8053, 8055, 8057,
8059, 8061, 8063, 8065, 8067, 8069, 8071, 8073, 8075, 8077, 8079, 8081, 8083, 8085, 8087, 8089, 8091, 8093, 8095,
8097, 8099, 8101, 8103, 8105, 8107, 8109, 8111, 8113, 8115, 8117, 8119, 8121, 8123, 8125, 8127, 8129, 8131, 8133,
8135, 8137, 8139, 8141, 8143, 8145, 8147, 8149, 8151, 8153, 8155, 8157, 8159, 8161, 8163, 8165, 8167, 8169, 8171,
8173, 8175, 8177, 8179, 8181, 8183, 8185, 8187, 8189, 8191, 8193, 8195, 8197, 8199, 8201, 8203, 8205, 8207, 8209,
8211, 8213, 8215, 8217, 8219, 8221, 8223, 8225, 8227, 8229, 8231, 8233, 8235, 8237, 8239, 8241, 8243, 8245, 8247,
8249, 8251, 8253, 8255, 8257, 8259, 8261, 8263, 8265, 8267, 8269, 8271, 8273, 8275, 8277, 8279, 8281, 8283, 8285,
8287, 8289, 8291, 8293, 8295, 8297, 8299, 8301, 8303, 8305, 8307, 8309, 8311, 8313, 8315, 8317, 8319, 8321, 8323,
8325, 8327, 8329, 8331, 8333, 8335, 8337, 8339, 8341, 8343, 8345, 8347, 8349, 8351, 8353, 8355, 8357, 8359, 8361,
8363, 8365, 8367, 8369, 8371, 8373, 8375, 8377, 8379, 8381, 8383, 8385, 8387, 8389, 8391, 8393, 8395, 8397, 8399,
8401, 8403, 8405, 8407, 8409, 8411, 8413, 8415, 8417, 8419, 8421, 8423, 8425, 8427, 8429, 8431, 8433, 8435, 8437,
8439, 8441, 8443, 8445, 8447, 8449, 8451, 8453, 8455, 8457, 8459, 8461, 8463, 8465, 8467, 8469, 8471, 8473, 8475,
8477, 8479, 8481, 8483, 8485, 8487, 8489, 8491, 8493, 8495, 8497, 8499, 8501, 8503, 8505, 8507, 8509, 8511, 8513,
8515, 8517, 8519, 8521, 8523, 8525, 8527, 8529, 8531, 8533, 8535, 8537, 8539, 8541, 8543, 8545, 8547, 8549, 8551,
8553, 8555, 8557, 8559, 8561, 8563, 8565, 8567, 8569, 8571, 8573, 8575, 8577, 8579, 8581, 8583, 8585, 8587, 8589,
8591, 8593, 8595, 8597, 8599, 8601, 8603, 8605, 8607, 8609, 8611, 8613, 8615, 8617, 8619, 8621, 8623, 8625, 8627,
8629, 8631, 8633, 8635, 8637, 8639, 8641, 8643, 8645, 8647, 8649, 8651, 8653, 8655, 8657, 8659, 8661, 8663, 8665,
8667, 8669, 8671, 8673, 8675, 8677, 8679, 8681, 8683, 8685, 8687, 8689, 8691, 8693, 8695, 8697, 8699, 8701, 8703,
8705, 8707, 8709, 8711, 8713, 8715, 8717, 8719, 8721, 8723, 8725, 8727, 8729, 8731, 8733, 8735, 8737, 8739, 8741,
8743, 8745, 8747, 8749, 8751, 8753, 8755, 8757, 8759, 8761, 8763, 8765, 8767, 8769, 8771, 8773, 8775, 8777, 8779,
8781, 8783, 8785, 8787, 8789, 8791, 8793, 8795, 8797, 8799, 8801, 8803, 8805, 8807, 8809, 8811, 8813, 8815, 8817,
8819, 8821, 8823, 8825, 8827, 8829, 8831, 8833, 8835, 8837, 8839, 8841, 8843, 8845, 8847, 8849, 8851, 8853, 8855,
8857, 8859, 8861, 8863, 8865, 8867, 8869, 8871, 8873, 8875, 8877, 8879, 8881, 8883, 8885, 8887, 8889, 8891, 8893,
8895, 8897, 8899, 8901, 8903, 8905, 8907, 8909, 8911, 8913, 8915, 8917, 8919, 8921, 8923, 8925, 8927, 8929, 8931,
8933, 8935, 8937, 8939, 8941, 8943, 8945, 8947, 8949, 8951, 8953, 8955, 8957, 8959, 8961, 8963, 8965, 8967, 8969,
8971, 8973, 8975, 8977, 8979, 8981, 8983, 8985, 8987, 8989, 8991, 8993, 8995, 8997, 8999, 9001, 9003, 9005, 9007,
9009, 9011, 9013, 9015, 9017, 9019, 9021, 9023, 9025, 9027, 9029, 9031, 9033, 9035, 9037, 9039, 9041, 9043, 9045,
9047, 9049, 9051, 9053, 9055, 9057, 9059, 9061, 9063, 9065, 9067, 9069, 9071, 9073, 9075, 9077, 9079, 9081, 9083,
9085, 9087, 9089, 9091, 9093, 9095, 9097, 9099, 9101, 9103, 9105, 9107, 9109, 9111, 9113, 9115, 9117, 9119, 9121,
9123, 9125, 9127, 9129, 9131, 9133, 9135, 9137, 9139, 9141, 9143, 9145, 9147, 9149, 9151, 9153, 9155, 9157, 9159,
9161, 9163, 9165, 9167, 9169, 9171, 9173, 9175, 9177, 9179, 9181, 9183, 9185, 9187, 9189, 9191, 9193, 9195, 9197,
9199, 9201, 9203, 9205, 9207, 9209, 9211, 9213, 9215, 9217, 9219, 9221, 9223, 9225, 9227, 9229, 9231, 9233, 9235,
9237, 9239, 9241, 9243, 9245, 9247, 9249, 9251, 9253, 9255, 9257, 9259, 9261, 9263, 9265, 9267, 9269, 9271, 9273,
9275, 9277, 9279, 9281, 9283, 9285, 9287, 9289, 9291, 9293, 9295, 9297, 9299, 9301, 9303, 9305, 9307, 9309, 9311,
9313, 9315, 9317, 9319, 9321, 9323, 9325, 9327, 9329, 9331, 9333, 9335, 9337, 9339, 9341, 9343, 9345, 9347, 9349,
9351, 9353, 9355, 9357, 9359, 9361, 9363, 9365, 9367, 9369, 9371, 9373, 9375, 9377, 9379, 9381, 9383, 9385, 9387,
9389, 9391, 9393, 9395, 9397, 9399, 9401, 9403, 9405, 9407, 9409, 9411, 9413, 9415, 9417, 9419, 9421, 9423, 9425,
9427, 9429, 9431, 9433, 9435, 9437, 9439, 9441, 9443, 9445, 9447, 9449, 9451, 9453, 9455, 9457, 9459, 9461, 9463,
9465, 9467, 9469, 9471, 9473, 9475, 9477, 9479, 9481, 9483, 9485, 9487, 9489, 9491, 9493, 9495, 9497, 9499, 9501,
9503, 9505, 9507, 9509, 9511, 9513, 9515, 9517, 9519, 9521, 9523, 9525, 9527, 9529, 9531, 9533, 9535, 9537, 9539,
9541, 9543, 9545, 9547, 9549, 9551, 9553, 9555, 9557, 9559, 9561, 9563, 9565, 9567, 9569, 9571, 9573, 9575, 9577,
9579, 9581, 9583, 9585, 9587, 9589, 9591, 9593, 9595, 9597, 9599, 9601, 9603, 9605, 9607, 9609, 9611, 9613, 9615,
9617, 9619, 9621, 9623, 9625, 9627, 9629, 9631, 9633, 9635, 9637, 9639, 9641, 9643, 9645, 9647, 9649, 9651, 9653,
9655, 9657, 9659, 9661, 9663, 9665, 9667, 9669, 9671, 9673, 9675, 9677, 9679, 9681, 9683, 9685, 9687, 9689, 9691,
9693, 9695, 9697, 9699, 9701, 9703, 9705, 9707, 9709, 9711, 9713, 9715, 9717, 9719, 9721, 9723, 9725, 9727, 9729,
9731, 9733, 9735, 9737, 9739, 9741, 9743, 9745, 9747, 9749, 9751, 9753, 9755, 9757, 9759, 9761, 9763, 9765, 9767,
9769, 9771, 9773, 9775, 9777, 9779, 9781, 9783, 9785, 9787, 9789, 9791, 9793, 9795, 9797, 9799, 9801, 9803, 9805,
9807, 9809, 9811, 9813, 9815, 9817, 9819, 9821, 9823, 9825, 9827, 9829, 9831, 9833, 9835, 9837, 9839, 9841, 9843,
9845, 9847, 9849, 9851, 9853, 9855, 9857, 9859, 9861, 9863, 9865, 9867, 9869, 9871, 9873, 9875, 9877, 9879, 9881,
9883, 9885, 9887, 9889, 9891, 9893, 9895, 9897, 9899, 9901, 9903, 9905, 9907, 9909, 9911, 9913, 9915, 9917, 9919,
9921, 9923, 9925, 9927, 9929, 9931, 9933, 9935, 9937, 9939, 9941, 9943, 9945, 9947, 9949, 9951, 9953, 9955, 9957,
9959, 9961, 9963, 9965, 9967, 9969, 9971, 9973, 9975, 9977, 9979, 9981, 9983, 9985, 9987, 9989, 9991, 9993, 9995,
9997, 9999, 4982]
b1 = [2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58,
60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112,
114, 116, 118, 120, 122, 124, 126, 128, 130, 132, 134, 136, 138, 140, 142, 144, 146, 148, 150, 152, 154, 156, 158,
160, 162, 164, 166, 168, 170, 172, 174, 176, 178, 180, 182, 184, 186, 188, 190, 192, 194, 196, 198, 200, 202, 204,
206, 208, 210, 212, 214, 216, 218, 220, 222, 224, 226, 228, 230, 232, 234, 236, 238, 240, 242, 244, 246, 248, 250,
252, 254, 256, 258, 260, 262, 264, 266, 268, 270, 272, 274, 276, 278, 280, 282, 284, 286, 288, 290, 292, 294, 296,
298, 300, 302, 304, 306, 308, 310, 312, 314, 316, 318, 320, 322, 324, 326, 328, 330, 332, 334, 336, 338, 340, 342,
344, 346, 348, 350, 352, 354, 356, 358, 360, 362, 364, 366, 368, 370, 372, 374, 376, 378, 380, 382, 384, 386, 388,
390, 392, 394, 396, 398, 400, 402, 404, 406, 408, 410, 412, 414, 416, 418, 420, 422, 424, 426, 428, 430, 432, 434,
436, 438, 440, 442, 444, 446, 448, 450, 452, 454, 456, 458, 460, 462, 464, 466, 468, 470, 472, 474, 476, 478, 480,
482, 484, 486, 488, 490, 492, 494, 496, 498, 500, 502, 504, 506, 508, 510, 512, 514, 516, 518, 520, 522, 524, 526,
528, 530, 532, 534, 536, 538, 540, 542, 544, 546, 548, 550, 552, 554, 556, 558, 560, 562, 564, 566, 568, 570, 572,
574, 576, 578, 580, 582, 584, 586, 588, 590, 592, 594, 596, 598, 600, 602, 604, 606, 608, 610, 612, 614, 616, 618,
620, 622, 624, 626, 628, 630, 632, 634, 636, 638, 640, 642, 644, 646, 648, 650, 652, 654, 656, 658, 660, 662, 664,
666, 668, 670, 672, 674, 676, 678, 680, 682, 684, 686, 688, 690, 692, 694, 696, 698, 700, 702, 704, 706, 708, 710,
712, 714, 716, 718, 720, 722, 724, 726, 728, 730, 732, 734, 736, 738, 740, 742, 744, 746, 748, 750, 752, 754, 756,
758, 760, 762, 764, 766, 768, 770, 772, 774, 776, 778, 780, 782, 784, 786, 788, 790, 792, 794, 796, 798, 800, 802,
804, 806, 808, 810, 812, 814, 816, 818, 820, 822, 824, 826, 828, 830, 832, 834, 836, 838, 840, 842, 844, 846, 848,
850, 852, 854, 856, 858, 860, 862, 864, 866, 868, 870, 872, 874, 876, 878, 880, 882, 884, 886, 888, 890, 892, 894,
896, 898, 900, 902, 904, 906, 908, 910, 912, 914, 916, 918, 920, 922, 924, 926, 928, 930, 932, 934, 936, 938, 940,
942, 944, 946, 948, 950, 952, 954, 956, 958, 960, 962, 964, 966, 968, 970, 972, 974, 976, 978, 980, 982, 984, 986,
988, 990, 992, 994, 996, 998, 1000, 1002, 1004, 1006, 1008, 1010, 1012, 1014, 1016, 1018, 1020, 1022, 1024, 1026,
1028, 1030, 1032, 1034, 1036, 1038, 1040, 1042, 1044, 1046, 1048, 1050, 1052, 1054, 1056, 1058, 1060, 1062, 1064,
1066, 1068, 1070, 1072, 1074, 1076, 1078, 1080, 1082, 1084, 1086, 1088, 1090, 1092, 1094, 1096, 1098, 1100, 1102,
1104, 1106, 1108, 1110, 1112, 1114, 1116, 1118, 1120, 1122, 1124, 1126, 1128, 1130, 1132, 1134, 1136, 1138, 1140,
1142, 1144, 1146, 1148, 1150, 1152, 1154, 1156, 1158, 1160, 1162, 1164, 1166, 1168, 1170, 1172, 1174, 1176, 1178,
1180, 1182, 1184, 1186, 1188, 1190, 1192, 1194, 1196, 1198, 1200, 1202, 1204, 1206, 1208, 1210, 1212, 1214, 1216,
1218, 1220, 1222, 1224, 1226, 1228, 1230, 1232, 1234, 1236, 1238, 1240, 1242, 1244, 1246, 1248, 1250, 1252, 1254,
1256, 1258, 1260, 1262, 1264, 1266, 1268, 1270, 1272, 1274, 1276, 1278, 1280, 1282, 1284, 1286, 1288, 1290, 1292,
1294, 1296, 1298, 1300, 1302, 1304, 1306, 1308, 1310, 1312, 1314, 1316, 1318, 1320, 1322, 1324, 1326, 1328, 1330,
1332, 1334, 1336, 1338, 1340, 1342, 1344, 1346, 1348, 1350, 1352, 1354, 1356, 1358, 1360, 1362, 1364, 1366, 1368,
1370, 1372, 1374, 1376, 1378, 1380, 1382, 1384, 1386, 1388, 1390, 1392, 1394, 1396, 1398, 1400, 1402, 1404, 1406,
1408, 1410, 1412, 1414, 1416, 1418, 1420, 1422, 1424, 1426, 1428, 1430, 1432, 1434, 1436, 1438, 1440, 1442, 1444,
1446, 1448, 1450, 1452, 1454, 1456, 1458, 1460, 1462, 1464, 1466, 1468, 1470, 1472, 1474, 1476, 1478, 1480, 1482,
1484, 1486, 1488, 1490, 1492, 1494, 1496, 1498, 1500, 1502, 1504, 1506, 1508, 1510, 1512, 1514, 1516, 1518, 1520,
1522, 1524, 1526, 1528, 1530, 1532, 1534, 1536, 1538, 1540, 1542, 1544, 1546, 1548, 1550, 1552, 1554, 1556, 1558,
1560, 1562, 1564, 1566, 1568, 1570, 1572, 1574, 1576, 1578, 1580, 1582, 1584, 1586, 1588, 1590, 1592, 1594, 1596,
1598, 1600, 1602, 1604, 1606, 1608, 1610, 1612, 1614, 1616, 1618, 1620, 1622, 1624, 1626, 1628, 1630, 1632, 1634,
1636, 1638, 1640, 1642, 1644, 1646, 1648, 1650, 1652, 1654, 1656, 1658, 1660, 1662, 1664, 1666, 1668, 1670, 1672,
1674, 1676, 1678, 1680, 1682, 1684, 1686, 1688, 1690, 1692, 1694, 1696, 1698, 1700, 1702, 1704, 1706, 1708, 1710,
1712, 1714, 1716, 1718, 1720, 1722, 1724, 1726, 1728, 1730, 1732, 1734, 1736, 1738, 1740, 1742, 1744, 1746, 1748,
1750, 1752, 1754, 1756, 1758, 1760, 1762, 1764, 1766, 1768, 1770, 1772, 1774, 1776, 1778, 1780, 1782, 1784, 1786,
1788, 1790, 1792, 1794, 1796, 1798, 1800, 1802, 1804, 1806, 1808, 1810, 1812, 1814, 1816, 1818, 1820, 1822, 1824,
1826, 1828, 1830, 1832, 1834, 1836, 1838, 1840, 1842, 1844, 1846, 1848, 1850, 1852, 1854, 1856, 1858, 1860, 1862,
1864, 1866, 1868, 1870, 1872, 1874, 1876, 1878, 1880, 1882, 1884, 1886, 1888, 1890, 1892, 1894, 1896, 1898, 1900,
1902, 1904, 1906, 1908, 1910, 1912, 1914, 1916, 1918, 1920, 1922, 1924, 1926, 1928, 1930, 1932, 1934, 1936, 1938,
1940, 1942, 1944, 1946, 1948, 1950, 1952, 1954, 1956, 1958, 1960, 1962, 1964, 1966, 1968, 1970, 1972, 1974, 1976,
1978, 1980, 1982, 1984, 1986, 1988, 1990, 1992, 1994, 1996, 1998, 2000, 2002, 2004, 2006, 2008, 2010, 2012, 2014,
2016, 2018, 2020, 2022, 2024, 2026, 2028, 2030, 2032, 2034, 2036, 2038, 2040, 2042, 2044, 2046, 2048, 2050, 2052,
2054, 2056, 2058, 2060, 2062, 2064, 2066, 2068, 2070, 2072, 2074, 2076, 2078, 2080, 2082, 2084, 2086, 2088, 2090,
2092, 2094, 2096, 2098, 2100, 2102, 2104, 2106, 2108, 2110, 2112, 2114, 2116, 2118, 2120, 2122, 2124, 2126, 2128,
2130, 2132, 2134, 2136, 2138, 2140, 2142, 2144, 2146, 2148, 2150, 2152, 2154, 2156, 2158, 2160, 2162, 2164, 2166,
2168, 2170, 2172, 2174, 2176, 2178, 2180, 2182, 2184, 2186, 2188, 2190, 2192, 2194, 2196, 2198, 2200, 2202, 2204,
2206, 2208, 2210, 2212, 2214, 2216, 2218, 2220, 2222, 2224, 2226, 2228, 2230, 2232, 2234, 2236, 2238, 2240, 2242,
2244, 2246, 2248, 2250, 2252, 2254, 2256, 2258, 2260, 2262, 2264, 2266, 2268, 2270, 2272, 2274, 2276, 2278, 2280,
2282, 2284, 2286, 2288, 2290, 2292, 2294, 2296, 2298, 2300, 2302, 2304, 2306, 2308, 2310, 2312, 2314, 2316, 2318,
2320, 2322, 2324, 2326, 2328, 2330, 2332, 2334, 2336, 2338, 2340, 2342, 2344, 2346, 2348, 2350, 2352, 2354, 2356,
2358, 2360, 2362, 2364, 2366, 2368, 2370, 2372, 2374, 2376, 2378, 2380, 2382, 2384, 2386, 2388, 2390, 2392, 2394,
2396, 2398, 2400, 2402, 2404, 2406, 2408, 2410, 2412, 2414, 2416, 2418, 2420, 2422, 2424, 2426, 2428, 2430, 2432,
2434, 2436, 2438, 2440, 2442, 2444, 2446, 2448, 2450, 2452, 2454, 2456, 2458, 2460, 2462, 2464, 2466, 2468, 2470,
2472, 2474, 2476, 2478, 2480, 2482, 2484, 2486, 2488, 2490, 2492, 2494, 2496, 2498, 2500, 2502, 2504, 2506, 2508,
2510, 2512, 2514, 2516, 2518, 2520, 2522, 2524, 2526, 2528, 2530, 2532, 2534, 2536, 2538, 2540, 2542, 2544, 2546,
2548, 2550, 2552, 2554, 2556, 2558, 2560, 2562, 2564, 2566, 2568, 2570, 2572, 2574, 2576, 2578, 2580, 2582, 2584,
2586, 2588, 2590, 2592, 2594, 2596, 2598, 2600, 2602, 2604, 2606, 2608, 2610, 2612, 2614, 2616, 2618, 2620, 2622,
2624, 2626, 2628, 2630, 2632, 2634, 2636, 2638, 2640, 2642, 2644, 2646, 2648, 2650, 2652, 2654, 2656, 2658, 2660,
2662, 2664, 2666, 2668, 2670, 2672, 2674, 2676, 2678, 2680, 2682, 2684, 2686, 2688, 2690, 2692, 2694, 2696, 2698,
2700, 2702, 2704, 2706, 2708, 2710, 2712, 2714, 2716, 2718, 2720, 2722, 2724, 2726, 2728, 2730, 2732, 2734, 2736,
2738, 2740, 2742, 2744, 2746, 2748, 2750, 2752, 2754, 2756, 2758, 2760, 2762, 2764, 2766, 2768, 2770, 2772, 2774,
2776, 2778, 2780, 2782, 2784, 2786, 2788, 2790, 2792, 2794, 2796, 2798, 2800, 2802, 2804, 2806, 2808, 2810, 2812,
2814, 2816, 2818, 2820, 2822, 2824, 2826, 2828, 2830, 2832, 2834, 2836, 2838, 2840, 2842, 2844, 2846, 2848, 2850,
2852, 2854, 2856, 2858, 2860, 2862, 2864, 2866, 2868, 2870, 2872, 2874, 2876, 2878, 2880, 2882, 2884, 2886, 2888,
2890, 2892, 2894, 2896, 2898, 2900, 2902, 2904, 2906, 2908, 2910, 2912, 2914, 2916, 2918, 2920, 2922, 2924, 2926,
2928, 2930, 2932, 2934, 2936, 2938, 2940, 2942, 2944, 2946, 2948, 2950, 2952, 2954, 2956, 2958, 2960, 2962, 2964,
2966, 2968, 2970, 2972, 2974, 2976, 2978, 2980, 2982, 2984, 2986, 2988, 2990, 2992, 2994, 2996, 2998, 3000, 3002,
3004, 3006, 3008, 3010, 3012, 3014, 3016, 3018, 3020, 3022, 3024, 3026, 3028, 3030, 3032, 3034, 3036, 3038, 3040,
3042, 3044, 3046, 3048, 3050, 3052, 3054, 3056, 3058, 3060, 3062, 3064, 3066, 3068, 3070, 3072, 3074, 3076, 3078,
3080, 3082, 3084, 3086, 3088, 3090, 3092, 3094, 3096, 3098, 3100, 3102, 3104, 3106, 3108, 3110, 3112, 3114, 3116,
3118, 3120, 3122, 3124, 3126, 3128, 3130, 3132, 3134, 3136, 3138, 3140, 3142, 3144, 3146, 3148, 3150, 3152, 3154,
3156, 3158, 3160, 3162, 3164, 3166, 3168, 3170, 3172, 3174, 3176, 3178, 3180, 3182, 3184, 3186, 3188, 3190, 3192,
3194, 3196, 3198, 3200, 3202, 3204, 3206, 3208, 3210, 3212, 3214, 3216, 3218, 3220, 3222, 3224, 3226, 3228, 3230,
3232, 3234, 3236, 3238, 3240, 3242, 3244, 3246, 3248, 3250, 3252, 3254, 3256, 3258, 3260, 3262, 3264, 3266, 3268,
3270, 3272, 3274, 3276, 3278, 3280, 3282, 3284, 3286, 3288, 3290, 3292, 3294, 3296, 3298, 3300, 3302, 3304, 3306,
3308, 3310, 3312, 3314, 3316, 3318, 3320, 3322, 3324, 3326, 3328, 3330, 3332, 3334, 3336, 3338, 3340, 3342, 3344,
3346, 3348, 3350, 3352, 3354, 3356, 3358, 3360, 3362, 3364, 3366, 3368, 3370, 3372, 3374, 3376, 3378, 3380, 3382,
3384, 3386, 3388, 3390, 3392, 3394, 3396, 3398, 3400, 3402, 3404, 3406, 3408, 3410, 3412, 3414, 3416, 3418, 3420,
3422, 3424, 3426, 3428, 3430, 3432, 3434, 3436, 3438, 3440, 3442, 3444, 3446, 3448, 3450, 3452, 3454, 3456, 3458,
3460, 3462, 3464, 3466, 3468, 3470, 3472, 3474, 3476, 3478, 3480, 3482, 3484, 3486, 3488, 3490, 3492, 3494, 3496,
3498, 3500, 3502, 3504, 3506, 3508, 3510, 3512, 3514, 3516, 3518, 3520, 3522, 3524, 3526, 3528, 3530, 3532, 3534,
3536, 3538, 3540, 3542, 3544, 3546, 3548, 3550, 3552, 3554, 3556, 3558, 3560, 3562, 3564, 3566, 3568, 3570, 3572,
3574, 3576, 3578, 3580, 3582, 3584, 3586, 3588, 3590, 3592, 3594, 3596, 3598, 3600, 3602, 3604, 3606, 3608, 3610,
3612, 3614, 3616, 3618, 3620, 3622, 3624, 3626, 3628, 3630, 3632, 3634, 3636, 3638, 3640, 3642, 3644, 3646, 3648,
3650, 3652, 3654, 3656, 3658, 3660, 3662, 3664, 3666, 3668, 3670, 3672, 3674, 3676, 3678, 3680, 3682, 3684, 3686,
3688, 3690, 3692, 3694, 3696, 3698, 3700, 3702, 3704, 3706, 3708, 3710, 3712, 3714, 3716, 3718, 3720, 3722, 3724,
3726, 3728, 3730, 3732, 3734, 3736, 3738, 3740, 3742, 3744, 3746, 3748, 3750, 3752, 3754, 3756, 3758, 3760, 3762,
3764, 3766, 3768, 3770, 3772, 3774, 3776, 3778, 3780, 3782, 3784, 3786, 3788, 3790, 3792, 3794, 3796, 3798, 3800,
3802, 3804, 3806, 3808, 3810, 3812, 3814, 3816, 3818, 3820, 3822, 3824, 3826, 3828, 3830, 3832, 3834, 3836, 3838,
3840, 3842, 3844, 3846, 3848, 3850, 3852, 3854, 3856, 3858, 3860, 3862, 3864, 3866, 3868, 3870, 3872, 3874, 3876,
3878, 3880, 3882, 3884, 3886, 3888, 3890, 3892, 3894, 3896, 3898, 3900, 3902, 3904, 3906, 3908, 3910, 3912, 3914,
3916, 3918, 3920, 3922, 3924, 3926, 3928, 3930, 3932, 3934, 3936, 3938, 3940, 3942, 3944, 3946, 3948, 3950, 3952,
3954, 3956, 3958, 3960, 3962, 3964, 3966, 3968, 3970, 3972, 3974, 3976, 3978, 3980, 3982, 3984, 3986, 3988, 3990,
3992, 3994, 3996, 3998, 4000, 4002, 4004, 4006, 4008, 4010, 4012, 4014, 4016, 4018, 4020, 4022, 4024, 4026, 4028,
4030, 4032, 4034, 4036, 4038, 4040, 4042, 4044, 4046, 4048, 4050, 4052, 4054, 4056, 4058, 4060, 4062, 4064, 4066,
4068, 4070, 4072, 4074, 4076, 4078, 4080, 4082, 4084, 4086, 4088, 4090, 4092, 4094, 4096, 4098, 4100, 4102, 4104,
4106, 4108, 4110, 4112, 4114, 4116, 4118, 4120, 4122, 4124, 4126, 4128, 4130, 4132, 4134, 4136, 4138, 4140, 4142,
4144, 4146, 4148, 4150, 4152, 4154, 4156, 4158, 4160, 4162, 4164, 4166, 4168, 4170, 4172, 4174, 4176, 4178, 4180,
4182, 4184, 4186, 4188, 4190, 4192, 4194, 4196, 4198, 4200, 4202, 4204, 4206, 4208, 4210, 4212, 4214, 4216, 4218,
4220, 4222, 4224, 4226, 4228, 4230, 4232, 4234, 4236, 4238, 4240, 4242, 4244, 4246, 4248, 4250, 4252, 4254, 4256,
4258, 4260, 4262, 4264, 4266, 4268, 4270, 4272, 4274, 4276, 4278, 4280, 4282, 4284, 4286, 4288, 4290, 4292, 4294,
4296, 4298, 4300, 4302, 4304, 4306, 4308, 4310, 4312, 4314, 4316, 4318, 4320, 4322, 4324, 4326, 4328, 4330, 4332,
4334, 4336, 4338, 4340, 4342, 4344, 4346, 4348, 4350, 4352, 4354, 4356, 4358, 4360, 4362, 4364, 4366, 4368, 4370,
4372, 4374, 4376, 4378, 4380, 4382, 4384, 4386, 4388, 4390, 4392, 4394, 4396, 4398, 4400, 4402, 4404, 4406, 4408,
4410, 4412, 4414, 4416, 4418, 4420, 4422, 4424, 4426, 4428, 4430, 4432, 4434, 4436, 4438, 4440, 4442, 4444, 4446,
4448, 4450, 4452, 4454, 4456, 4458, 4460, 4462, 4464, 4466, 4468, 4470, 4472, 4474, 4476, 4478, 4480, 4482, 4484,
4486, 4488, 4490, 4492, 4494, 4496, 4498, 4500, 4502, 4504, 4506, 4508, 4510, 4512, 4514, 4516, 4518, 4520, 4522,
4524, 4526, 4528, 4530, 4532, 4534, 4536, 4538, 4540, 4542, 4544, 4546, 4548, 4550, 4552, 4554, 4556, 4558, 4560,
4562, 4564, 4566, 4568, 4570, 4572, 4574, 4576, 4578, 4580, 4582, 4584, 4586, 4588, 4590, 4592, 4594, 4596, 4598,
4600, 4602, 4604, 4606, 4608, 4610, 4612, 4614, 4616, 4618, 4620, 4622, 4624, 4626, 4628, 4630, 4632, 4634, 4636,
4638, 4640, 4642, 4644, 4646, 4648, 4650, 4652, 4654, 4656, 4658, 4660, 4662, 4664, 4666, 4668, 4670, 4672, 4674,
4676, 4678, 4680, 4682, 4684, 4686, 4688, 4690, 4692, 4694, 4696, 4698, 4700, 4702, 4704, 4706, 4708, 4710, 4712,
4714, 4716, 4718, 4720, 4722, 4724, 4726, 4728, 4730, 4732, 4734, 4736, 4738, 4740, 4742, 4744, 4746, 4748, 4750,
4752, 4754, 4756, 4758, 4760, 4762, 4764, 4766, 4768, 4770, 4772, 4774, 4776, 4778, 4780, 4782, 4784, 4786, 4788,
4790, 4792, 4794, 4796, 4798, 4800, 4802, 4804, 4806, 4808, 4810, 4812, 4814, 4816, 4818, 4820, 4822, 4824, 4826,
4828, 4830, 4832, 4834, 4836, 4838, 4840, 4842, 4844, 4846, 4848, 4850, 4852, 4854, 4856, 4858, 4860, 4862, 4864,
4866, 4868, 4870, 4872, 4874, 4876, 4878, 4880, 4882, 4884, 4886, 4888, 4890, 4892, 4894, 4896, 4898, 4900, 4902,
4904, 4906, 4908, 4910, 4912, 4914, 4916, 4918, 4920, 4922, 4924, 4926, 4928, 4930, 4932, 4934, 4936, 4938, 4940,
4942, 4944, 4946, 4948, 4950, 4952, 4954, 4956, 4958, 4960, 4962, 4964, 4966, 4968, 4970, 4972, 4974, 4976, 4978,
4980, 4982, 4984, 4986, 4988, 4990, 4992, 4994, 4996, 4998, 5000, 5002, 5004, 5006, 5008, 5010, 5012, 5014, 5016,
5018, 5020, 5022, 5024, 5026, 5028, 5030, 5032, 5034, 5036, 5038, 5040, 5042, 5044, 5046, 5048, 5050, 5052, 5054,
5056, 5058, 5060, 5062, 5064, 5066, 5068, 5070, 5072, 5074, 5076, 5078, 5080, 5082, 5084, 5086, 5088, 5090, 5092,
5094, 5096, 5098, 5100, 5102, 5104, 5106, 5108, 5110, 5112, 5114, 5116, 5118, 5120, 5122, 5124, 5126, 5128, 5130,
5132, 5134, 5136, 5138, 5140, 5142, 5144, 5146, 5148, 5150, 5152, 5154, 5156, 5158, 5160, 5162, 5164, 5166, 5168,
5170, 5172, 5174, 5176, 5178, 5180, 5182, 5184, 5186, 5188, 5190, 5192, 5194, 5196, 5198, 5200, 5202, 5204, 5206,
5208, 5210, 5212, 5214, 5216, 5218, 5220, 5222, 5224, 5226, 5228, 5230, 5232, 5234, 5236, 5238, 5240, 5242, 5244,
5246, 5248, 5250, 5252, 5254, 5256, 5258, 5260, 5262, 5264, 5266, 5268, 5270, 5272, 5274, 5276, 5278, 5280, 5282,
5284, 5286, 5288, 5290, 5292, 5294, 5296, 5298, 5300, 5302, 5304, 5306, 5308, 5310, 5312, 5314, 5316, 5318, 5320,
5322, 5324, 5326, 5328, 5330, 5332, 5334, 5336, 5338, 5340, 5342, 5344, 5346, 5348, 5350, 5352, 5354, 5356, 5358,
5360, 5362, 5364, 5366, 5368, 5370, 5372, 5374, 5376, 5378, 5380, 5382, 5384, 5386, 5388, 5390, 5392, 5394, 5396,
5398, 5400, 5402, 5404, 5406, 5408, 5410, 5412, 5414, 5416, 5418, 5420, 5422, 5424, 5426, 5428, 5430, 5432, 5434,
5436, 5438, 5440, 5442, 5444, 5446, 5448, 5450, 5452, 5454, 5456, 5458, 5460, 5462, 5464, 5466, 5468, 5470, 5472,
5474, 5476, 5478, 5480, 5482, 5484, 5486, 5488, 5490, 5492, 5494, 5496, 5498, 5500, 5502, 5504, 5506, 5508, 5510,
5512, 5514, 5516, 5518, 5520, 5522, 5524, 5526, 5528, 5530, 5532, 5534, 5536, 5538, 5540, 5542, 5544, 5546, 5548,
5550, 5552, 5554, 5556, 5558, 5560, 5562, 5564, 5566, 5568, 5570, 5572, 5574, 5576, 5578, 5580, 5582, 5584, 5586,
5588, 5590, 5592, 5594, 5596, 5598, 5600, 5602, 5604, 5606, 5608, 5610, 5612, 5614, 5616, 5618, 5620, 5622, 5624,
5626, 5628, 5630, 5632, 5634, 5636, 5638, 5640, 5642, 5644, 5646, 5648, 5650, 5652, 5654, 5656, 5658, 5660, 5662,
5664, 5666, 5668, 5670, 5672, 5674, 5676, 5678, 5680, 5682, 5684, 5686, 5688, 5690, 5692, 5694, 5696, 5698, 5700,
5702, 5704, 5706, 5708, 5710, 5712, 5714, 5716, 5718, 5720, 5722, 5724, 5726, 5728, 5730, 5732, 5734, 5736, 5738,
5740, 5742, 5744, 5746, 5748, 5750, 5752, 5754, 5756, 5758, 5760, 5762, 5764, 5766, 5768, 5770, 5772, 5774, 5776,
5778, 5780, 5782, 5784, 5786, 5788, 5790, 5792, 5794, 5796, 5798, 5800, 5802, 5804, 5806, 5808, 5810, 5812, 5814,
5816, 5818, 5820, 5822, 5824, 5826, 5828, 5830, 5832, 5834, 5836, 5838, 5840, 5842, 5844, 5846, 5848, 5850, 5852,
5854, 5856, 5858, 5860, 5862, 5864, 5866, 5868, 5870, 5872, 5874, 5876, 5878, 5880, 5882, 5884, 5886, 5888, 5890,
5892, 5894, 5896, 5898, 5900, 5902, 5904, 5906, 5908, 5910, 5912, 5914, 5916, 5918, 5920, 5922, 5924, 5926, 5928,
5930, 5932, 5934, 5936, 5938, 5940, 5942, 5944, 5946, 5948, 5950, 5952, 5954, 5956, 5958, 5960, 5962, 5964, 5966,
5968, 5970, 5972, 5974, 5976, 5978, 5980, 5982, 5984, 5986, 5988, 5990, 5992, 5994, 5996, 5998, 6000, 6002, 6004,
6006, 6008, 6010, 6012, 6014, 6016, 6018, 6020, 6022, 6024, 6026, 6028, 6030, 6032, 6034, 6036, 6038, 6040, 6042,
6044, 6046, 6048, 6050, 6052, 6054, 6056, 6058, 6060, 6062, 6064, 6066, 6068, 6070, 6072, 6074, 6076, 6078, 6080,
6082, 6084, 6086, 6088, 6090, 6092, 6094, 6096, 6098, 6100, 6102, 6104, 6106, 6108, 6110, 6112, 6114, 6116, 6118,
6120, 6122, 6124, 6126, 6128, 6130, 6132, 6134, 6136, 6138, 6140, 6142, 6144, 6146, 6148, 6150, 6152, 6154, 6156,
6158, 6160, 6162, 6164, 6166, 6168, 6170, 6172, 6174, 6176, 6178, 6180, 6182, 6184, 6186, 6188, 6190, 6192, 6194,
6196, 6198, 6200, 6202, 6204, 6206, 6208, 6210, 6212, 6214, 6216, 6218, 6220, 6222, 6224, 6226, 6228, 6230, 6232,
6234, 6236, 6238, 6240, 6242, 6244, 6246, 6248, 6250, 6252, 6254, 6256, 6258, 6260, 6262, 6264, 6266, 6268, 6270,
6272, 6274, 6276, 6278, 6280, 6282, 6284, 6286, 6288, 6290, 6292, 6294, 6296, 6298, 6300, 6302, 6304, 6306, 6308,
6310, 6312, 6314, 6316, 6318, 6320, 6322, 6324, 6326, 6328, 6330, 6332, 6334, 6336, 6338, 6340, 6342, 6344, 6346,
6348, 6350, 6352, 6354, 6356, 6358, 6360, 6362, 6364, 6366, 6368, 6370, 6372, 6374, 6376, 6378, 6380, 6382, 6384,
6386, 6388, 6390, 6392, 6394, 6396, 6398, 6400, 6402, 6404, 6406, 6408, 6410, 6412, 6414, 6416, 6418, 6420, 6422,
6424, 6426, 6428, 6430, 6432, 6434, 6436, 6438, 6440, 6442, 6444, 6446, 6448, 6450, 6452, 6454, 6456, 6458, 6460,
6462, 6464, 6466, 6468, 6470, 6472, 6474, 6476, 6478, 6480, 6482, 6484, 6486, 6488, 6490, 6492, 6494, 6496, 6498,
6500, 6502, 6504, 6506, 6508, 6510, 6512, 6514, 6516, 6518, 6520, 6522, 6524, 6526, 6528, 6530, 6532, 6534, 6536,
6538, 6540, 6542, 6544, 6546, 6548, 6550, 6552, 6554, 6556, 6558, 6560, 6562, 6564, 6566, 6568, 6570, 6572, 6574,
6576, 6578, 6580, 6582, 6584, 6586, 6588, 6590, 6592, 6594, 6596, 6598, 6600, 6602, 6604, 6606, 6608, 6610, 6612,
6614, 6616, 6618, 6620, 6622, 6624, 6626, 6628, 6630, 6632, 6634, 6636, 6638, 6640, 6642, 6644, 6646, 6648, 6650,
6652, 6654, 6656, 6658, 6660, 6662, 6664, 6666, 6668, 6670, 6672, 6674, 6676, 6678, 6680, 6682, 6684, 6686, 6688,
6690, 6692, 6694, 6696, 6698, 6700, 6702, 6704, 6706, 6708, 6710, 6712, 6714, 6716, 6718, 6720, 6722, 6724, 6726,
6728, 6730, 6732, 6734, 6736, 6738, 6740, 6742, 6744, 6746, 6748, 6750, 6752, 6754, 6756, 6758, 6760, 6762, 6764,
6766, 6768, 6770, 6772, 6774, 6776, 6778, 6780, 6782, 6784, 6786, 6788, 6790, 6792, 6794, 6796, 6798, 6800, 6802,
6804, 6806, 6808, 6810, 6812, 6814, 6816, 6818, 6820, 6822, 6824, 6826, 6828, 6830, 6832, 6834, 6836, 6838, 6840,
6842, 6844, 6846, 6848, 6850, 6852, 6854, 6856, 6858, 6860, 6862, 6864, 6866, 6868, 6870, 6872, 6874, 6876, 6878,
6880, 6882, 6884, 6886, 6888, 6890, 6892, 6894, 6896, 6898, 6900, 6902, 6904, 6906, 6908, 6910, 6912, 6914, 6916,
6918, 6920, 6922, 6924, 6926, 6928, 6930, 6932, 6934, 6936, 6938, 6940, 6942, 6944, 6946, 6948, 6950, 6952, 6954,
6956, 6958, 6960, 6962, 6964, 6966, 6968, 6970, 6972, 6974, 6976, 6978, 6980, 6982, 6984, 6986, 6988, 6990, 6992,
6994, 6996, 6998, 7000, 7002, 7004, 7006, 7008, 7010, 7012, 7014, 7016, 7018, 7020, 7022, 7024, 7026, 7028, 7030,
7032, 7034, 7036, 7038, 7040, 7042, 7044, 7046, 7048, 7050, 7052, 7054, 7056, 7058, 7060, 7062, 7064, 7066, 7068,
7070, 7072, 7074, 7076, 7078, 7080, 7082, 7084, 7086, 7088, 7090, 7092, 7094, 7096, 7098, 7100, 7102, 7104, 7106,
7108, 7110, 7112, 7114, 7116, 7118, 7120, 7122, 7124, 7126, 7128, 7130, 7132, 7134, 7136, 7138, 7140, 7142, 7144,
7146, 7148, 7150, 7152, 7154, 7156, 7158, 7160, 7162, 7164, 7166, 7168, 7170, 7172, 7174, 7176, 7178, 7180, 7182,
7184, 7186, 7188, 7190, 7192, 7194, 7196, 7198, 7200, 7202, 7204, 7206, 7208, 7210, 7212, 7214, 7216, 7218, 7220,
7222, 7224, 7226, 7228, 7230, 7232, 7234, 7236, 7238, 7240, 7242, 7244, 7246, 7248, 7250, 7252, 7254, 7256, 7258,
7260, 7262, 7264, 7266, 7268, 7270, 7272, 7274, 7276, 7278, 7280, 7282, 7284, 7286, 7288, 7290, 7292, 7294, 7296,
7298, 7300, 7302, 7304, 7306, 7308, 7310, 7312, 7314, 7316, 7318, 7320, 7322, 7324, 7326, 7328, 7330, 7332, 7334,
7336, 7338, 7340, 7342, 7344, 7346, 7348, 7350, 7352, 7354, 7356, 7358, 7360, 7362, 7364, 7366, 7368, 7370, 7372,
7374, 7376, 7378, 7380, 7382, 7384, 7386, 7388, 7390, 7392, 7394, 7396, 7398, 7400, 7402, 7404, 7406, 7408, 7410,
7412, 7414, 7416, 7418, 7420, 7422, 7424, 7426, 7428, 7430, 7432, 7434, 7436, 7438, 7440, 7442, 7444, 7446, 7448,
7450, 7452, 7454, 7456, 7458, 7460, 7462, 7464, 7466, 7468, 7470, 7472, 7474, 7476, 7478, 7480, 7482, 7484, 7486,
7488, 7490, 7492, 7494, 7496, 7498, 7500, 7502, 7504, 7506, 7508, 7510, 7512, 7514, 7516, 7518, 7520, 7522, 7524,
7526, 7528, 7530, 7532, 7534, 7536, 7538, 7540, 7542, 7544, 7546, 7548, 7550, 7552, 7554, 7556, 7558, 7560, 7562,
7564, 7566, 7568, 7570, 7572, 7574, 7576, 7578, 7580, 7582, 7584, 7586, 7588, 7590, 7592, 7594, 7596, 7598, 7600,
7602, 7604, 7606, 7608, 7610, 7612, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7628, 7630, 7632, 7634, 7636, 7638,
7640, 7642, 7644, 7646, 7648, 7650, 7652, 7654, 7656, 7658, 7660, 7662, 7664, 7666, 7668, 7670, 7672, 7674, 7676,
7678, 7680, 7682, 7684, 7686, 7688, 7690, 7692, 7694, 7696, 7698, 7700, 7702, 7704, 7706, 7708, 7710, 7712, 7714,
7716, 7718, 7720, 7722, 7724, 7726, 7728, 7730, 7732, 7734, 7736, 7738, 7740, 7742, 7744, 7746, 7748, 7750, 7752,
7754, 7756, 7758, 7760, 7762, 7764, 7766, 7768, 7770, 7772, 7774, 7776, 7778, 7780, 7782, 7784, 7786, 7788, 7790,
7792, 7794, 7796, 7798, 7800, 7802, 7804, 7806, 7808, 7810, 7812, 7814, 7816, 7818, 7820, 7822, 7824, 7826, 7828,
7830, 7832, 7834, 7836, 7838, 7840, 7842, 7844, 7846, 7848, 7850, 7852, 7854, 7856, 7858, 7860, 7862, 7864, 7866,
7868, 7870, 7872, 7874, 7876, 7878, 7880, 7882, 7884, 7886, 7888, 7890, 7892, 7894, 7896, 7898, 7900, 7902, 7904,
7906, 7908, 7910, 7912, 7914, 7916, 7918, 7920, 7922, 7924, 7926, 7928, 7930, 7932, 7934, 7936, 7938, 7940, 7942,
7944, 7946, 7948, 7950, 7952, 7954, 7956, 7958, 7960, 7962, 7964, 7966, 7968, 7970, 7972, 7974, 7976, 7978, 7980,
7982, 7984, 7986, 7988, 7990, 7992, 7994, 7996, 7998, 8000, 8002, 8004, 8006, 8008, 8010, 8012, 8014, 8016, 8018,
8020, 8022, 8024, 8026, 8028, 8030, 8032, 8034, 8036, 8038, 8040, 8042, 8044, 8046, 8048, 8050, 8052, 8054, 8056,
8058, 8060, 8062, 8064, 8066, 8068, 8070, 8072, 8074, 8076, 8078, 8080, 8082, 8084, 8086, 8088, 8090, 8092, 8094,
8096, 8098, 8100, 8102, 8104, 8106, 8108, 8110, 8112, 8114, 8116, 8118, 8120, 8122, 8124, 8126, 8128, 8130, 8132,
8134, 8136, 8138, 8140, 8142, 8144, 8146, 8148, 8150, 8152, 8154, 8156, 8158, 8160, 8162, 8164, 8166, 8168, 8170,
8172, 8174, 8176, 8178, 8180, 8182, 8184, 8186, 8188, 8190, 8192, 8194, 8196, 8198, 8200, 8202, 8204, 8206, 8208,
8210, 8212, 8214, 8216, 8218, 8220, 8222, 8224, 8226, 8228, 8230, 8232, 8234, 8236, 8238, 8240, 8242, 8244, 8246,
8248, 8250, 8252, 8254, 8256, 8258, 8260, 8262, 8264, 8266, 8268, 8270, 8272, 8274, 8276, 8278, 8280, 8282, 8284,
8286, 8288, 8290, 8292, 8294, 8296, 8298, 8300, 8302, 8304, 8306, 8308, 8310, 8312, 8314, 8316, 8318, 8320, 8322,
8324, 8326, 8328, 8330, 8332, 8334, 8336, 8338, 8340, 8342, 8344, 8346, 8348, 8350, 8352, 8354, 8356, 8358, 8360,
8362, 8364, 8366, 8368, 8370, 8372, 8374, 8376, 8378, 8380, 8382, 8384, 8386, 8388, 8390, 8392, 8394, 8396, 8398,
8400, 8402, 8404, 8406, 8408, 8410, 8412, 8414, 8416, 8418, 8420, 8422, 8424, 8426, 8428, 8430, 8432, 8434, 8436,
8438, 8440, 8442, 8444, 8446, 8448, 8450, 8452, 8454, 8456, 8458, 8460, 8462, 8464, 8466, 8468, 8470, 8472, 8474,
8476, 8478, 8480, 8482, 8484, 8486, 8488, 8490, 8492, 8494, 8496, 8498, 8500, 8502, 8504, 8506, 8508, 8510, 8512,
8514, 8516, 8518, 8520, 8522, 8524, 8526, 8528, 8530, 8532, 8534, 8536, 8538, 8540, 8542, 8544, 8546, 8548, 8550,
8552, 8554, 8556, 8558, 8560, 8562, 8564, 8566, 8568, 8570, 8572, 8574, 8576, 8578, 8580, 8582, 8584, 8586, 8588,
8590, 8592, 8594, 8596, 8598, 8600, 8602, 8604, 8606, 8608, 8610, 8612, 8614, 8616, 8618, 8620, 8622, 8624, 8626,
8628, 8630, 8632, 8634, 8636, 8638, 8640, 8642, 8644, 8646, 8648, 8650, 8652, 8654, 8656, 8658, 8660, 8662, 8664,
8666, 8668, 8670, 8672, 8674, 8676, 8678, 8680, 8682, 8684, 8686, 8688, 8690, 8692, 8694, 8696, 8698, 8700, 8702,
8704, 8706, 8708, 8710, 8712, 8714, 8716, 8718, 8720, 8722, 8724, 8726, 8728, 8730, 8732, 8734, 8736, 8738, 8740,
8742, 8744, 8746, 8748, 8750, 8752, 8754, 8756, 8758, 8760, 8762, 8764, 8766, 8768, 8770, 8772, 8774, 8776, 8778,
8780, 8782, 8784, 8786, 8788, 8790, 8792, 8794, 8796, 8798, 8800, 8802, 8804, 8806, 8808, 8810, 8812, 8814, 8816,
8818, 8820, 8822, 8824, 8826, 8828, 8830, 8832, 8834, 8836, 8838, 8840, 8842, 8844, 8846, 8848, 8850, 8852, 8854,
8856, 8858, 8860, 8862, 8864, 8866, 8868, 8870, 8872, 8874, 8876, 8878, 8880, 8882, 8884, 8886, 8888, 8890, 8892,
8894, 8896, 8898, 8900, 8902, 8904, 8906, 8908, 8910, 8912, 8914, 8916, 8918, 8920, 8922, 8924, 8926, 8928, 8930,
8932, 8934, 8936, 8938, 8940, 8942, 8944, 8946, 8948, 8950, 8952, 8954, 8956, 8958, 8960, 8962, 8964, 8966, 8968,
8970, 8972, 8974, 8976, 8978, 8980, 8982, 8984, 8986, 8988, 8990, 8992, 8994, 8996, 8998, 9000, 9002, 9004, 9006,
9008, 9010, 9012, 9014, 9016, 9018, 9020, 9022, 9024, 9026, 9028, 9030, 9032, 9034, 9036, 9038, 9040, 9042, 9044,
9046, 9048, 9050, 9052, 9054, 9056, 9058, 9060, 9062, 9064, 9066, 9068, 9070, 9072, 9074, 9076, 9078, 9080, 9082,
9084, 9086, 9088, 9090, 9092, 9094, 9096, 9098, 9100, 9102, 9104, 9106, 9108, 9110, 9112, 9114, 9116, 9118, 9120,
9122, 9124, 9126, 9128, 9130, 9132, 9134, 9136, 9138, 9140, 9142, 9144, 9146, 9148, 9150, 9152, 9154, 9156, 9158,
9160, 9162, 9164, 9166, 9168, 9170, 9172, 9174, 9176, 9178, 9180, 9182, 9184, 9186, 9188, 9190, 9192, 9194, 9196,
9198, 9200, 9202, 9204, 9206, 9208, 9210, 9212, 9214, 9216, 9218, 9220, 9222, 9224, 9226, 9228, 9230, 9232, 9234,
9236, 9238, 9240, 9242, 9244, 9246, 9248, 9250, 9252, 9254, 9256, 9258, 9260, 9262, 9264, 9266, 9268, 9270, 9272,
9274, 9276, 9278, 9280, 9282, 9284, 9286, 9288, 9290, 9292, 9294, 9296, 9298, 9300, 9302, 9304, 9306, 9308, 9310,
9312, 9314, 9316, 9318, 9320, 9322, 9324, 9326, 9328, 9330, 9332, 9334, 9336, 9338, 9340, 9342, 9344, 9346, 9348,
9350, 9352, 9354, 9356, 9358, 9360, 9362, 9364, 9366, 9368, 9370, 9372, 9374, 9376, 9378, 9380, 9382, 9384, 9386,
9388, 9390, 9392, 9394, 9396, 9398, 9400, 9402, 9404, 9406, 9408, 9410, 9412, 9414, 9416, 9418, 9420, 9422, 9424,
9426, 9428, 9430, 9432, 9434, 9436, 9438, 9440, 9442, 9444, 9446, 9448, 9450, 9452, 9454, 9456, 9458, 9460, 9462,
9464, 9466, 9468, 9470, 9472, 9474, 9476, 9478, 9480, 9482, 9484, 9486, 9488, 9490, 9492, 9494, 9496, 9498, 9500,
9502, 9504, 9506, 9508, 9510, 9512, 9514, 9516, 9518, 9520, 9522, 9524, 9526, 9528, 9530, 9532, 9534, 9536, 9538,
9540, 9542, 9544, 9546, 9548, 9550, 9552, 9554, 9556, 9558, 9560, 9562, 9564, 9566, 9568, 9570, 9572, 9574, 9576,
9578, 9580, 9582, 9584, 9586, 9588, 9590, 9592, 9594, 9596, 9598, 9600, 9602, 9604, 9606, 9608, 9610, 9612, 9614,
9616, 9618, 9620, 9622, 9624, 9626, 9628, 9630, 9632, 9634, 9636, 9638, 9640, 9642, 9644, 9646, 9648, 9650, 9652,
9654, 9656, 9658, 9660, 9662, 9664, 9666, 9668, 9670, 9672, 9674, 9676, 9678, 9680, 9682, 9684, 9686, 9688, 9690,
9692, 9694, 9696, 9698, 9700, 9702, 9704, 9706, 9708, 9710, 9712, 9714, 9716, 9718, 9720, 9722, 9724, 9726, 9728,
9730, 9732, 9734, 9736, 9738, 9740, 9742, 9744, 9746, 9748, 9750, 9752, 9754, 9756, 9758, 9760, 9762, 9764, 9766,
9768, 9770, 9772, 9774, 9776, 9778, 9780, 9782, 9784, 9786, 9788, 9790, 9792, 9794, 9796, 9798, 9800, 9802, 9804,
9806, 9808, 9810, 9812, 9814, 9816, 9818, 9820, 9822, 9824, 9826, 9828, 9830, 9832, 9834, 9836, 9838, 9840, 9842,
9844, 9846, 9848, 9850, 9852, 9854, 9856, 9858, 9860, 9862, 9864, 9866, 9868, 9870, 9872, 9874, 9876, 9878, 9880,
9882, 9884, 9886, 9888, 9890, 9892, 9894, 9896, 9898, 9900, 9902, 9904, 9906, 9908, 9910, 9912, 9914, 9916, 9918,
9920, 9922, 9924, 9926, 9928, 9930, 9932, 9934, 9936, 9938, 9940, 9942, 9944, 9946, 9948, 9950, 9952, 9954, 9956,
9958, 9960, 9962, 9964, 9966, 9968, 9970, 9972, 9974, 9976, 9978, 9980, 9982, 9984, 9986, 9988, 9990, 9992, 9994,
9996, 9998, 10000, 10002]
res = Solution().fairCandySwap(a1, b1)
print(res)
end = time.time()
print('Running time: %s Seconds' % (end - start))
| true | true |
f71f6977583be15f02e5a3484137a80e4aecac84 | 926 | py | Python | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | 1 | 2022-03-09T19:12:22.000Z | 2022-03-09T19:12:22.000Z | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | null | null | null | supervised_learning/0x03-optimization/12-learning_rate_decay.py | cbarros7/holbertonschool-machine_learning | 1edb4c253441f6319b86c9c590d1e7dd3fc32bf4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
"""Learning Rate Decay Upgraded"""
import tensorflow as tf
def learning_rate_decay(alpha, decay_rate, global_step, decay_step):
"""learning_rate_decay: creates a learning rate decay operation in
tensorflow using inverse time decay:
Args:
alpha: is the original learning rate
decay_rate: is the weight used to determine the rate at
which alpha will decay
global_step: is the number of passes of gradient descent
that have elapsed
decay_step: is the number of passes of gradient descent
that should occur before alpha is decayed further
Returns: the learning rate decay operation
"""
return tf.train.inverse_time_decay(
learning_rate=alpha, global_step=global_step, decay_steps=decay_step,
decay_rate=decay_rate, staircase=True, name=None
)
| 37.04 | 77 | 0.671706 |
import tensorflow as tf
def learning_rate_decay(alpha, decay_rate, global_step, decay_step):
return tf.train.inverse_time_decay(
learning_rate=alpha, global_step=global_step, decay_steps=decay_step,
decay_rate=decay_rate, staircase=True, name=None
)
| true | true |
f71f6a82759cb768681d3430b292afb226aeca63 | 11,152 | py | Python | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | null | null | null | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | 1 | 2018-08-09T14:13:16.000Z | 2018-08-09T15:34:00.000Z | analyses/weat.py | raphaottoni/youtube-hatespeech | 4467d065c2bddd86feec673ad74670c4c975f017 | [
"MIT"
] | null | null | null | from gensim import corpora, models, similarities
import json
import numpy
import random
import csv
import itertools
import gensim, copy
from collections import defaultdict
import glob, os
target_words = {}
attribute_words= {}
weats_name = []
# 1 WEAT Immigrants
target_words["1_a"] = ["immigrant", "migrant"]
target_words["1_b"] = ["citizen", "native"]
attribute_words["1_a"] = ["bad", "burden", "pirate", "plague", "taker", "thief"]
attribute_words["1_b"] = ["good", "honest", "maker", "rightful"]
weats_name.append("Immigrants")
# 2 WEAT Muslims
target_words["2_a"] = ["islamism", "muhammed", "muslim", "quran"]
target_words["2_b"] = ["bible", "christian", "christianity", "jesus"]
attribute_words["2_a"] = ["assassin", "attack", "bomb", "death", "murder", "radical", "terrorist"]
attribute_words["2_b"] = ["compassionate", "gentle", "humane", "kind", "tolerant"]
weats_name.append("Muslims")
# 3 WEAT LGBT people
target_words["3_a"] = ["bisexual", "gay", "homosexual", "lesbian"]
target_words["3_b"] = ["het", "hetero", "heterosexual", "straight"]
attribute_words["3_a"] = ["immoral", "outrageous", "promiscuous", "revolting", "sinner"]
attribute_words["3_b"] = ["moral", "natural", "normal"]
weats_name.append("LGBT People")
# 4 WEAT math arts male_female
target_words["4_a"] = ["math","algebra","geometry","calculus","equations","computation","numbers" ]
target_words["4_b"] = ["poetry","art","dance","literature","novel","symphony","drama","sculpture" ]
attribute_words["4_a"] = ["male","man","boy","brother","he","him","his", "son"]
attribute_words["4_b"] = ["female","woman","girl","sister","she","her","hers", "daughter"]
weats_name.append("Math Arts")
# 5 WEAT Judaism and Christianity
# Religious Judaism Christianity
target_words["5_a"] = ["church","bible","christian","christianity"]
target_words["5_b"] = ["synagogue","torah","jew","judaism"]
attribute_words["5_a"] = ["terrific","delight","celebrate","glad","beautiful","appealing", "pleasure", "enjoy" ]
attribute_words["5_b"] = ["hate","angry","detest","despise","dirty","humiliate","pain", "horrible"]
weats_name.append("Jewish")
# 6 WEAT African American 1
target_words["6_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["6_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["6_a"] = [ "joy", "love", "peace", "wonderful", "pleasure", "friend", "laughter", "happy"]
attribute_words["6_b"] = ["agony", "terrible", "horrible", "nasty", "evil", "war", "awful","failure"]
weats_name.append("African - 1")
# 7 WEAT African American 2
target_words["7_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["7_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["7_a"] = ["caress", "freedom", "health", "love", "peace",
"cheer", "friend", "heaven", "loyal", "pleasure",
"diamond", "gentle", "honest", "lucky", "rainbow",
"diploma", "gift", "honor", "miracle", "sunrise",
"family", "happy","laughter","paradise", "vacation"]
attribute_words["7_b"] = ["abuse", "crash", "filth", "murder", "sickness",
"accident", "death", "grief", "poison", "stink",
"assault", "disaster", "hatred","pollute", "tragedy",
"bomb", "divorce", "jail", "poverty", "ugly", "cancer",
"evil", "kill", "rotten","vomit"]
weats_name.append("African - 2")
def statistic_test(X,Y,A,B,M):
result = 0.0
sum_X = 0.0
sum_Y = 0.0
for word_X in X:
sum_X += sub_statistic_test(word_X, A,B,M)
for word_Y in Y:
sum_Y += sub_statistic_test(word_Y, A,B,M)
return (sum_X - sum_Y)
def sub_statistic_test(w,A,B,M):
result = 0.0
sum_cos_A = 0.0
sum_cos_B = 0.0
for word_A in A:
sum_cos_A += numpy.dot(M[w],M[word_A])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_A]))
for word_B in B:
sum_cos_B += numpy.dot(M[w],M[word_B])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_B]))
return (sum_cos_A/len(A) - sum_cos_B/len(B))
def effect_size(x_words,y_words,a_attributes,b_attributes,M):
# Effect size
test_x = 0.0
test_y = 0.0
samples = []
for word_x in target_words[x_words]:
test_x += sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M))
for word_y in target_words[y_words]:
test_y += sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M))
mean_x = test_x/len(target_words[x_words])
mean_y = test_y/len(target_words[y_words])
std_dev = numpy.std(samples)
effect_size = (mean_x - mean_y)/std_dev
return effect_size
# P-Value
def p_value(X,Y,A,B,model):
null_hipotese_evidance = 0.0
number_permitations = 0.0
# Finds the biggest possible set of the same size for the two classes
X_size = len(target_words[X])
Y_size = len(target_words[Y])
size = max(X_size, Y_size)
union = set(target_words[X] + target_words[Y])
random_test_statistic_values = []
test_statistic_value = statistic_test(target_words[X],target_words[Y],attribute_words[A],attribute_words[B],model)
if (Y_size + X_size) < 14:
# there will be less than 5000 combinations
permutations = itertools.combinations(union,size)
for i,permutation in enumerate(permutations):
x_i = permutation
y_i = union - set(permutation)
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
#print("null hipotese_evidance: " + str(null_hipotese_evidance))
#print("num_permutations: " + str(number_permitations))
#print("P-Value():")
#print(null_hipotese_evidance/number_permitations)
p_value_result = null_hipotese_evidance/number_permitations
#print("enviando " + str(p_value_result))
return(p_value_result)
else:
# There will be more than 5000, thus we should randomize
print("Generating 5k random")
classes = target_words[X] + target_words[Y]
for i in range(5000):
random.shuffle(classes)
x_i = classes[:size]
y_i = classes[size+1:]
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
# save the valus to be used for each channel
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
#if number_permitations % 100 == 0:
# print(number_permitations)
#print("null hipotese_evidance: " + str(null_hipotese_evidance))
#print("num_permutations: " + str(number_permitations))
#print("P-Value(english):")
#print(null_hipotese_evidance/number_permitations)
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
def main():
# Which models to load
political_biases_model = ["left", "leftcenter", "center", "right-center", "right"]
model_types = [ "captions", "comments"]
# list of WEATs to execute
weats = [1,2,3]
with open("../data/weat/weat_results.csv", "w") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(["channel","WEAT","political_bias", "source", "effect_size", "p_value"])
#for political_bias in political_biases_model:
# for model_type in model_types:
# for file in os.listdir("../models/biases/" + model_type + "/" + political_bias):
# if file.endswith(".model"):
# print("Loading " + political_bias + " word2vec " + model_type + " model " + "(" + file + ")")
# model = gensim.models.Word2Vec.load("../models/biases/" + model_type + "/" + political_bias+ "/" + file)
# #model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
# print("Executing WEATs on current model" )
# for weat_number in weats:
# X = str(weat_number) + "_a"
# Y = str(weat_number) + "_b"
# A = str(weat_number) + "_a"
# B = str(weat_number) + "_b"
# ## Effect size of the base model
# effect_size_result = effect_size(X,Y,A,B,model)
# print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
# p_value_result = p_value(X,Y,A,B,model)
# print("P-value("+str(weat_number)+ "):" + str(p_value_result))
# writer.writerow([file[:-6],weats_name[weat_number -1],political_bias , model_type, effect_size_result, p_value_result])
# Add the baseline weat results the wikipedia model
print("Loading the wiki base model")
model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
print("Executing WEATs on current model" )
for weat_number in weats:
X = str(weat_number) + "_a"
Y = str(weat_number) + "_b"
A = str(weat_number) + "_a"
B = str(weat_number) + "_b"
## Effect size of the base model
effect_size_result = effect_size(X,Y,A,B,model)
print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
p_value_result = p_value(X,Y,A,B,model)
print("P-value("+str(weat_number)+ "):" + str(p_value_result))
writer.writerow(["wikipedia",weats_name[weat_number -1], "wiki", "wiki", effect_size_result, p_value_result])
if __name__ == "__main__":
main()
| 43.733333 | 148 | 0.598816 | from gensim import corpora, models, similarities
import json
import numpy
import random
import csv
import itertools
import gensim, copy
from collections import defaultdict
import glob, os
target_words = {}
attribute_words= {}
weats_name = []
target_words["1_a"] = ["immigrant", "migrant"]
target_words["1_b"] = ["citizen", "native"]
attribute_words["1_a"] = ["bad", "burden", "pirate", "plague", "taker", "thief"]
attribute_words["1_b"] = ["good", "honest", "maker", "rightful"]
weats_name.append("Immigrants")
target_words["2_a"] = ["islamism", "muhammed", "muslim", "quran"]
target_words["2_b"] = ["bible", "christian", "christianity", "jesus"]
attribute_words["2_a"] = ["assassin", "attack", "bomb", "death", "murder", "radical", "terrorist"]
attribute_words["2_b"] = ["compassionate", "gentle", "humane", "kind", "tolerant"]
weats_name.append("Muslims")
target_words["3_a"] = ["bisexual", "gay", "homosexual", "lesbian"]
target_words["3_b"] = ["het", "hetero", "heterosexual", "straight"]
attribute_words["3_a"] = ["immoral", "outrageous", "promiscuous", "revolting", "sinner"]
attribute_words["3_b"] = ["moral", "natural", "normal"]
weats_name.append("LGBT People")
target_words["4_a"] = ["math","algebra","geometry","calculus","equations","computation","numbers" ]
target_words["4_b"] = ["poetry","art","dance","literature","novel","symphony","drama","sculpture" ]
attribute_words["4_a"] = ["male","man","boy","brother","he","him","his", "son"]
attribute_words["4_b"] = ["female","woman","girl","sister","she","her","hers", "daughter"]
weats_name.append("Math Arts")
target_words["5_a"] = ["church","bible","christian","christianity"]
target_words["5_b"] = ["synagogue","torah","jew","judaism"]
attribute_words["5_a"] = ["terrific","delight","celebrate","glad","beautiful","appealing", "pleasure", "enjoy" ]
attribute_words["5_b"] = ["hate","angry","detest","despise","dirty","humiliate","pain", "horrible"]
weats_name.append("Jewish")
target_words["6_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["6_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["6_a"] = [ "joy", "love", "peace", "wonderful", "pleasure", "friend", "laughter", "happy"]
attribute_words["6_b"] = ["agony", "terrible", "horrible", "nasty", "evil", "war", "awful","failure"]
weats_name.append("African - 1")
target_words["7_a"] = ["brad", "brendan", "geoffrey", "greg", "brett", "jay",
"matthew", "neil", "todd", "allison", "anne", "carrie",
"emily", "jill", "laurie", "kristen", "meredith", "sarah"]
target_words["7_b"] = ["darnell", "hakim", "jermaine", "kareem", "jamal",
"leroy", "rasheed", "tremayne", "tyrone", "aisha",
"ebony", "keisha", "kenya", "latonya", "lakisha",
"latoya", "tamika", "tanisha"]
attribute_words["7_a"] = ["caress", "freedom", "health", "love", "peace",
"cheer", "friend", "heaven", "loyal", "pleasure",
"diamond", "gentle", "honest", "lucky", "rainbow",
"diploma", "gift", "honor", "miracle", "sunrise",
"family", "happy","laughter","paradise", "vacation"]
attribute_words["7_b"] = ["abuse", "crash", "filth", "murder", "sickness",
"accident", "death", "grief", "poison", "stink",
"assault", "disaster", "hatred","pollute", "tragedy",
"bomb", "divorce", "jail", "poverty", "ugly", "cancer",
"evil", "kill", "rotten","vomit"]
weats_name.append("African - 2")
def statistic_test(X,Y,A,B,M):
result = 0.0
sum_X = 0.0
sum_Y = 0.0
for word_X in X:
sum_X += sub_statistic_test(word_X, A,B,M)
for word_Y in Y:
sum_Y += sub_statistic_test(word_Y, A,B,M)
return (sum_X - sum_Y)
def sub_statistic_test(w,A,B,M):
result = 0.0
sum_cos_A = 0.0
sum_cos_B = 0.0
for word_A in A:
sum_cos_A += numpy.dot(M[w],M[word_A])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_A]))
for word_B in B:
sum_cos_B += numpy.dot(M[w],M[word_B])/(numpy.linalg.norm(M[w])*numpy.linalg.norm(M[word_B]))
return (sum_cos_A/len(A) - sum_cos_B/len(B))
def effect_size(x_words,y_words,a_attributes,b_attributes,M):
test_x = 0.0
test_y = 0.0
samples = []
for word_x in target_words[x_words]:
test_x += sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_x,attribute_words[a_attributes],attribute_words[b_attributes],M))
for word_y in target_words[y_words]:
test_y += sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M)
samples.append(sub_statistic_test(word_y,attribute_words[a_attributes],attribute_words[b_attributes],M))
mean_x = test_x/len(target_words[x_words])
mean_y = test_y/len(target_words[y_words])
std_dev = numpy.std(samples)
effect_size = (mean_x - mean_y)/std_dev
return effect_size
def p_value(X,Y,A,B,model):
null_hipotese_evidance = 0.0
number_permitations = 0.0
X_size = len(target_words[X])
Y_size = len(target_words[Y])
size = max(X_size, Y_size)
union = set(target_words[X] + target_words[Y])
random_test_statistic_values = []
test_statistic_value = statistic_test(target_words[X],target_words[Y],attribute_words[A],attribute_words[B],model)
if (Y_size + X_size) < 14:
permutations = itertools.combinations(union,size)
for i,permutation in enumerate(permutations):
x_i = permutation
y_i = union - set(permutation)
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
else:
print("Generating 5k random")
classes = target_words[X] + target_words[Y]
for i in range(5000):
random.shuffle(classes)
x_i = classes[:size]
y_i = classes[size+1:]
test_value = statistic_test(x_i,y_i,attribute_words[A],attribute_words[B],model)
random_test_statistic_values.append(test_value)
if( test_value > test_statistic_value):
null_hipotese_evidance += 1
number_permitations += 1
p_value_result = null_hipotese_evidance/number_permitations
return(p_value_result)
def main():
political_biases_model = ["left", "leftcenter", "center", "right-center", "right"]
model_types = [ "captions", "comments"]
weats = [1,2,3]
with open("../data/weat/weat_results.csv", "w") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(["channel","WEAT","political_bias", "source", "effect_size", "p_value"])
base model")
model = gensim.models.Word2Vec.load("../models/wiki-word2vec/wiki-en.word2vec.model")
print("Executing WEATs on current model" )
for weat_number in weats:
X = str(weat_number) + "_a"
Y = str(weat_number) + "_b"
A = str(weat_number) + "_a"
B = str(weat_number) + "_b"
= effect_size(X,Y,A,B,model)
print("Effect-Size("+str(weat_number)+ "):" + str(effect_size_result))
p_value_result = p_value(X,Y,A,B,model)
print("P-value("+str(weat_number)+ "):" + str(p_value_result))
writer.writerow(["wikipedia",weats_name[weat_number -1], "wiki", "wiki", effect_size_result, p_value_result])
if __name__ == "__main__":
main()
| true | true |
f71f6c8fd9d986ab03b10daa79ec6a243a174abe | 1,152 | py | Python | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | cgi-bin/utils.py | alexander1389/IMS.WebAPI | cfc8c6c899655c337973f9a32a620e9cd6af34b9 | [
"MIT"
] | null | null | null | from datetime import datetime
def validate_dt(date):
""" Validate datetime string
:param date: The datetime string
:type date: str
:returns: True if the date is correct datetime string,
False otherwise
:rtype: bool
"""
pattern = '000101000000'
# letters in date
if not date.isdecimal():
return False
# at least year must be specified
if len(date) < 2 or len(date) > 12:
return False
if len(date) % 2 > 0:
return False
chk = date + pattern[len(date):]
try:
datetime.strptime(chk, '%y%m%d%H%M%S')
except ValueError:
return False
return True
if __name__ == '__main__':
print('\nDate Validator Check --- START')
print('------------------------------\n')
dates = [
'99', '1312', '010212', '200229', '131024122203', '0',
'03014', '01021312121222', '201301', '200230', '310131271212'
]
for date in dates:
print('%-15s - %s' % (date,
'valid' if validate_dt(date) else 'invalid'))
print('\n----------------------------')
print('Date Validator Check --- END\n')
| 22.588235 | 69 | 0.539063 | from datetime import datetime
def validate_dt(date):
pattern = '000101000000'
if not date.isdecimal():
return False
if len(date) < 2 or len(date) > 12:
return False
if len(date) % 2 > 0:
return False
chk = date + pattern[len(date):]
try:
datetime.strptime(chk, '%y%m%d%H%M%S')
except ValueError:
return False
return True
if __name__ == '__main__':
print('\nDate Validator Check --- START')
print('------------------------------\n')
dates = [
'99', '1312', '010212', '200229', '131024122203', '0',
'03014', '01021312121222', '201301', '200230', '310131271212'
]
for date in dates:
print('%-15s - %s' % (date,
'valid' if validate_dt(date) else 'invalid'))
print('\n----------------------------')
print('Date Validator Check --- END\n')
| true | true |
f71f6cf1e351242fc9e0d3e8fd6d87cf389216c6 | 383 | py | Python | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | 1 | 2022-01-13T06:32:10.000Z | 2022-01-13T06:32:10.000Z | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | null | null | null | mitre_attack/data/types/group.py | check-spelling/mitre-attack | f3be1ccff235593c4277f3b9ec2696757924894b | [
"MIT"
] | 1 | 2022-01-14T00:00:24.000Z | 2022-01-14T00:00:24.000Z | from dataclasses import dataclass, field
from typing import List
from mitre_attack import INTRUSION_SET
from mitre_attack.data.types.object import Object
@dataclass(frozen=True)
class Group(Object):
type: str = field(default=INTRUSION_SET, init=False)
name: str
aliases: List[str] = field(default_factory=list)
contributors: List[str] = field(default_factory=list)
| 29.461538 | 57 | 0.775457 | from dataclasses import dataclass, field
from typing import List
from mitre_attack import INTRUSION_SET
from mitre_attack.data.types.object import Object
@dataclass(frozen=True)
class Group(Object):
type: str = field(default=INTRUSION_SET, init=False)
name: str
aliases: List[str] = field(default_factory=list)
contributors: List[str] = field(default_factory=list)
| true | true |
f71f6d3f9666a930b13bac187344c124d81e2c1e | 31,993 | py | Python | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | electrum/gui/kivy/uix/dialogs/lightning_channels.py | jacky4566/electrum | f1c2191392780a559ecdc374c81c82191a5d1eb5 | [
"MIT"
] | null | null | null | import asyncio
from typing import TYPE_CHECKING, Optional, Union
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.popup import Popup
from .fee_dialog import FeeDialog
from electrum.util import bh2u
from electrum.logging import Logger
from electrum.lnutil import LOCAL, REMOTE, format_short_channel_id
from electrum.lnchannel import AbstractChannel, Channel, ChannelState
from electrum.gui.kivy.i18n import _
from .question import Question
from electrum.transaction import PartialTxOutput, Transaction
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate
from electrum.lnutil import ln_dummy_address
from electrum.gui import messages
from .qr_dialog import QRDialog
from .choice_dialog import ChoiceDialog
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum import SimpleConfig
Builder.load_string(r'''
<SwapDialog@Popup>
id: popup
title: _('Lightning Swap')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
mining_fee_text: ''
fee_rate_text: ''
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Send') + ':'
size_hint: 0.4, 1
Label:
id: send_amount_label
size_hint: 0.6, 1
text: _('0')
background_color: (0,0,0,0)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Receive') + ':'
size_hint: 0.4, 1
Label:
id: receive_amount_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Server Fee') + ':'
size_hint: 0.4, 1
Label:
id: server_fee_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
id: swap_action_label
text: _('Adds receiving capacity')
background_color: (0,0,0,0)
font_size: '14dp'
Slider:
id: swap_slider
range: 0, 4
step: 1
on_value: root.swap_slider_moved(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Mining Fee') + ':'
size_hint: 0.4, 1
Button:
text: root.mining_fee_text + ' (' + root.fee_rate_text + ')'
background_color: (0,0,0,0)
bold: True
on_release:
root.on_fee_button()
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: root.dismiss()
Button:
id: ok_button
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
<LightningChannelItem@CardItem>
details: {}
active: False
short_channel_id: '<channelId not set>'
status: ''
is_backup: False
balances: ''
node_alias: ''
_chan: None
BoxLayout:
size_hint: 0.7, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
color: (.5,.5,.5,1) if not root.active else (1,1,1,1)
text: root.short_channel_id
font_size: '15sp'
Widget
CardLabel:
font_size: '13sp'
shorten: True
text: root.node_alias
Widget
BoxLayout:
size_hint: 0.3, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
text: root.status
font_size: '13sp'
halign: 'right'
Widget
CardLabel:
text: root.balances if not root.is_backup else ''
font_size: '13sp'
halign: 'right'
Widget
<LightningChannelsDialog@Popup>:
name: 'lightning_channels'
title: _('Lightning Network')
has_lightning: False
has_gossip: False
can_send: ''
can_receive: ''
num_channels_text: ''
id: popup
BoxLayout:
id: box
orientation: 'vertical'
spacing: '2dp'
padding: '12dp'
BoxLabel:
text: _('You can send') + ':'
value: root.can_send
BoxLabel:
text: _('You can receive') + ':'
value: root.can_receive
TopLabel:
text: root.num_channels_text
ScrollView:
GridLayout:
cols: 1
id: lightning_channels_container
size_hint: 1, None
height: self.minimum_height
spacing: '2dp'
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Open Channel')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('lightning_open_channel_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Swap')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('swap_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Gossip')
disabled: not root.has_gossip
on_release: popup.app.popup_dialog('lightning')
<ChannelDetailsPopup@Popup>:
id: popuproot
data: []
is_closed: False
is_redeemed: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
local_ctn:0
remote_ctn:0
local_csv:0
remote_csv:0
feerate:''
can_send:''
can_receive:''
is_open:False
warning: ''
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
TopLabel:
text: root.warning
color: .905, .709, .509, 1
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
BoxLabel:
text: _('Can send')
value: root.can_send if root.is_open else 'n/a'
BoxLabel:
text: _('Can receive')
value: root.can_receive if root.is_open else 'n/a'
BoxLabel:
text: _('CSV delay')
value: 'Local: %d\nRemote: %d' % (root.local_csv, root.remote_csv)
BoxLabel:
text: _('CTN')
value: 'Local: %d\nRemote: %d' % (root.local_ctn, root.remote_ctn)
BoxLabel:
text: _('Fee rate')
value: '{} sat/byte'.format(root.feerate)
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Backup')
on_release: root.export_backup()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Force-close')
on_release: root.force_close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_channel()
disabled: not root.is_redeemed
<ChannelBackupPopup@Popup>:
id: popuproot
data: []
is_funded: False
is_imported: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
is_open:False
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Request force-close')
on_release: root.request_force_close()
disabled: not root.is_funded
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_backup()
disabled: not root.is_imported
''')
class ChannelBackupPopup(Popup, Logger):
def __init__(self, chan: AbstractChannel, app, **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.chan = chan
self.is_funded = chan.get_state() == ChannelState.FUNDED
self.is_imported = chan.is_imported
self.funding_txid = chan.funding_outpoint.txid
self.app = app
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.title = _('Channel Backup')
def request_force_close(self):
msg = _('Request force close?')
Question(msg, self._request_force_close).open()
def _request_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id), loop)
try:
coro.result(5)
self.app.show_info(_('Request sent'))
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_backup(self):
msg = _('Delete backup?')
Question(msg, self._remove_backup).open()
def _remove_backup(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel_backup(self.chan.channel_id)
self.dismiss()
class ChannelDetailsPopup(Popup, Logger):
def __init__(self, chan: Channel, app: 'ElectrumWindow', **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.is_closed = chan.is_closed()
self.is_redeemed = chan.is_redeemed()
self.app = app
self.chan = chan
self.title = _('Channel details')
self.node_id = bh2u(chan.node_id)
self.channel_id = bh2u(chan.channel_id)
self.funding_txid = chan.funding_outpoint.txid
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.local_ctn = chan.get_latest_ctn(LOCAL)
self.remote_ctn = chan.get_latest_ctn(REMOTE)
self.local_csv = chan.config[LOCAL].to_self_delay
self.remote_csv = chan.config[REMOTE].to_self_delay
self.initiator = 'Local' if chan.constraints.is_initiator else 'Remote'
feerate_kw = chan.get_latest_feerate(LOCAL)
self.feerate = str(quantize_feerate(Transaction.satperbyte_from_satperkw(feerate_kw)))
self.can_send = self.app.format_amount_and_units(chan.available_to_spend(LOCAL) // 1000)
self.can_receive = self.app.format_amount_and_units(chan.available_to_spend(REMOTE) // 1000)
self.is_open = chan.is_open()
closed = chan.get_closing_height()
if closed:
self.closing_txid, closing_height, closing_timestamp = closed
msg = ' '.join([
_("Trampoline routing is enabled, but this channel is with a non-trampoline node."),
_("This channel may still be used for receiving, but it is frozen for sending."),
_("If you want to keep using this channel, you need to disable trampoline routing in your preferences."),
])
self.warning = '' if self.app.wallet.lnworker.channel_db or self.app.wallet.lnworker.is_trampoline_peer(chan.node_id) else _('Warning') + ': ' + msg
def close(self):
dialog = ChoiceDialog(
title=_('Close channel'),
choices={0:_('Cooperative close'), 1:_('Request force-close')}, key=0,
callback=self._close,
description=_(messages.MSG_REQUEST_FORCE_CLOSE),
keep_choice_order=True)
dialog.open()
def _close(self, choice):
loop = self.app.wallet.network.asyncio_loop
if choice == 1:
coro = self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id)
msg = _('Request sent')
else:
coro = self.app.wallet.lnworker.close_channel(self.chan.channel_id)
msg = _('Channel closed')
f = asyncio.run_coroutine_threadsafe(coro, loop)
try:
f.result(5)
self.app.show_info(msg)
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e)) # repr because str(Exception()) == ''
def remove_channel(self):
msg = _('Are you sure you want to delete this channel? This will purge associated transactions from your wallet history.')
Question(msg, self._remove_channel).open()
def _remove_channel(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel(self.chan.channel_id)
self.app._trigger_update_history()
self.dismiss()
def export_backup(self):
text = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
# TODO: some messages are duplicated between Kivy and Qt.
help_text = ' '.join([
_("Channel backups can be imported in another instance of the same wallet, by scanning this QR code."),
_("Please note that channel backups cannot be used to restore your channels."),
_("If you lose your wallet file, the only thing you can do with a backup is to request your channel to be closed, so that your funds will be sent on-chain."),
])
self.app.qr_dialog(_("Channel Backup " + self.chan.short_id_for_GUI()), text, help_text=help_text)
def force_close(self):
if self.chan.is_closed():
self.app.show_error(_('Channel already closed'))
return
to_self_delay = self.chan.config[REMOTE].to_self_delay
help_text = ' '.join([
_('If you force-close this channel, the funds you have in it will not be available for {} blocks.').format(to_self_delay),
_('During that time, funds will not be recoverable from your seed, and may be lost if you lose your device.'),
_('To prevent that, please save this channel backup.'),
_('It may be imported in another wallet with the same seed.')
])
title = _('Save backup and force-close')
data = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
popup = QRDialog(
title, data,
show_text=False,
text_for_clipboard=data,
help_text=help_text,
close_button_text=_('Next'),
on_close=self._confirm_force_close)
popup.open()
def _confirm_force_close(self):
Question(
_('Confirm force close?'),
self._do_force_close,
title=_('Force-close channel'),
no_str=_('Cancel'),
yes_str=_('Proceed')).open()
def _do_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.force_close_channel(self.chan.channel_id), loop)
try:
coro.result(1)
self.app.show_info(_('Channel closed, you may need to wait at least {} blocks, because of CSV delays'.format(self.chan.config[REMOTE].to_self_delay)))
except Exception as e:
self.logger.exception("Could not force close channel")
self.app.show_info(_('Could not force close channel: ') + repr(e)) # repr because str(Exception()) == ''
class LightningChannelsDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow'):
super(LightningChannelsDialog, self).__init__()
self.clocks = []
self.app = app
self.has_lightning = app.wallet.has_lightning()
self.has_gossip = self.app.network.channel_db is not None
self.update()
def show_item(self, obj):
chan = obj._chan
if chan.is_backup():
p = ChannelBackupPopup(chan, self.app)
else:
p = ChannelDetailsPopup(chan, self.app)
p.open()
def format_fields(self, chan):
labels = {}
for subject in (REMOTE, LOCAL):
bal_minus_htlcs = chan.balance_minus_outgoing_htlcs(subject)//1000
label = self.app.format_amount(bal_minus_htlcs)
other = subject.inverted()
bal_other = chan.balance(other)//1000
bal_minus_htlcs_other = chan.balance_minus_outgoing_htlcs(other)//1000
if bal_other != bal_minus_htlcs_other:
label += ' (+' + self.app.format_amount(bal_other - bal_minus_htlcs_other) + ')'
labels[subject] = label
closed = chan.is_closed()
return [
'n/a' if closed else labels[LOCAL],
'n/a' if closed else labels[REMOTE],
]
def update_item(self, item):
chan = item._chan
item.status = chan.get_state_for_GUI()
item.short_channel_id = chan.short_id_for_GUI()
l, r = self.format_fields(chan)
item.balances = l + '/' + r
self.update_can_send()
def update(self):
channel_cards = self.ids.lightning_channels_container
channel_cards.clear_widgets()
if not self.app.wallet:
return
lnworker = self.app.wallet.lnworker
channels = list(lnworker.channels.values()) if lnworker else []
backups = list(lnworker.channel_backups.values()) if lnworker else []
for i in channels + backups:
item = Factory.LightningChannelItem()
item.screen = self
item.active = not i.is_closed()
item.is_backup = i.is_backup()
item._chan = i
item.node_alias = lnworker.get_node_alias(i.node_id) or i.node_id.hex()
self.update_item(item)
channel_cards.add_widget(item)
self.update_can_send()
def update_can_send(self):
lnworker = self.app.wallet.lnworker
if not lnworker:
self.can_send = 'n/a'
self.can_receive = 'n/a'
return
self.num_channels_text = _(f'You have {len(lnworker.channels)} channels.')
self.can_send = self.app.format_amount_and_units(lnworker.num_sats_can_send())
self.can_receive = self.app.format_amount_and_units(lnworker.num_sats_can_receive())
# Swaps should be done in due time which is why we recommend a certain fee.
RECOMMEND_BLOCKS_SWAP = 25
class SwapDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', config: 'SimpleConfig'):
super(SwapDialog, self).__init__()
self.app = app
self.config = config
self.fmt_amt = self.app.format_amount_and_units
self.lnworker = self.app.wallet.lnworker
# swap related
self.swap_manager = self.lnworker.swap_manager
self.send_amount: Optional[int] = None
self.receive_amount: Optional[int] = None
self.tx = None # only for forward swap
self.is_reverse = None
# init swaps and sliders
asyncio.run(self.swap_manager.get_pairs())
self.update_and_init()
def update_and_init(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(0)
def on_fee_button(self):
fee_dialog = FeeDialog(self, self.config, self.after_fee_changed)
fee_dialog.open()
def after_fee_changed(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(self.ids.swap_slider.value)
def update_fee_text(self):
fee_per_kb = self.config.fee_per_kb()
# eta is -1 when block inclusion cannot be estimated for low fees
eta = self.config.fee_to_eta(fee_per_kb)
fee_per_b = format_fee_satoshis(fee_per_kb / 1000)
suggest_fee = self.config.eta_target_to_fee(RECOMMEND_BLOCKS_SWAP)
suggest_fee_per_b = format_fee_satoshis(suggest_fee / 1000)
s = 's' if eta > 1 else ''
if eta > RECOMMEND_BLOCKS_SWAP or eta == -1:
msg = f'Warning: Your fee rate of {fee_per_b} sat/B may be too ' \
f'low for the swap to succeed before its timeout. ' \
f'The recommended fee rate is at least {suggest_fee_per_b} ' \
f'sat/B.'
else:
msg = f'Info: Your swap is estimated to be processed in {eta} ' \
f'block{s} with an onchain fee rate of {fee_per_b} sat/B.'
self.fee_rate_text = f'{fee_per_b} sat/B'
self.ids.fee_estimate.text = msg
def update_tx(self, onchain_amount: Union[int, str]):
"""Updates the transaction associated with a forward swap."""
if onchain_amount is None:
self.tx = None
self.ids.ok_button.disabled = True
return
outputs = [PartialTxOutput.from_address_and_value(ln_dummy_address(), onchain_amount)]
coins = self.app.wallet.get_spendable_coins(None)
try:
self.tx = self.app.wallet.make_unsigned_transaction(
coins=coins,
outputs=outputs)
except (NotEnoughFunds, NoDynamicFeeEstimates):
self.tx = None
self.ids.ok_button.disabled = True
def update_swap_slider(self):
"""Sets the minimal and maximal amount that can be swapped for the swap
slider."""
# tx is updated again afterwards with send_amount in case of normal swap
# this is just to estimate the maximal spendable onchain amount for HTLC
self.update_tx('!')
try:
max_onchain_spend = self.tx.output_value_for_address(ln_dummy_address())
except AttributeError: # happens if there are no utxos
max_onchain_spend = 0
reverse = int(min(self.lnworker.num_sats_can_send(),
self.swap_manager.get_max_amount()))
forward = int(min(self.lnworker.num_sats_can_receive(),
# maximally supported swap amount by provider
self.swap_manager.get_max_amount(),
max_onchain_spend))
# we expect range to adjust the value of the swap slider to be in the
# correct range, i.e., to correct an overflow when reducing the limits
self.ids.swap_slider.range = (-reverse, forward)
def swap_slider_moved(self, position: float):
position = int(position)
# pay_amount and receive_amounts are always with fees already included
# so they reflect the net balance change after the swap
if position < 0: # reverse swap
self.ids.swap_action_label.text = "Adds Lightning receiving capacity."
self.is_reverse = True
pay_amount = abs(position)
self.send_amount = pay_amount
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (offchain)" if pay_amount else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=pay_amount, is_reverse=True)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (onchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.lockup_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.swap_manager.get_claim_fee())}"
else: # forward (normal) swap
self.ids.swap_action_label.text = f"Adds Lightning sending capacity."
self.is_reverse = False
self.send_amount = position
self.update_tx(self.send_amount)
# add lockup fees, but the swap amount is position
pay_amount = position + self.tx.get_fee() if self.tx else 0
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (onchain)" if self.fmt_amt(pay_amount) else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=position, is_reverse=False)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (offchain)" if receive_amount else ""
# fee breakdown
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.normal_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.tx.get_fee())}" if self.tx else ""
if pay_amount and receive_amount:
self.ids.ok_button.disabled = False
else:
# add more nuanced error reporting?
self.ids.swap_action_label.text = "Swap below minimal swap size, change the slider."
self.ids.ok_button.disabled = True
def do_normal_swap(self, lightning_amount, onchain_amount, password):
tx = self.tx
assert tx
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.normal_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount,
password=password,
tx=tx,
)
asyncio.run_coroutine_threadsafe(coro, loop)
def do_reverse_swap(self, lightning_amount, onchain_amount, password):
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.reverse_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount + self.swap_manager.get_claim_fee(),
)
asyncio.run_coroutine_threadsafe(coro, loop)
def on_ok(self):
if not self.app.network:
self.window.show_error(_("You are offline."))
return
if self.is_reverse:
lightning_amount = self.send_amount
onchain_amount = self.receive_amount
self.app.protected(
'Do you want to do a reverse submarine swap?',
self.do_reverse_swap, (lightning_amount, onchain_amount))
else:
lightning_amount = self.receive_amount
onchain_amount = self.send_amount
self.app.protected(
'Do you want to do a submarine swap? '
'You will need to wait for the swap transaction to confirm.',
self.do_normal_swap, (lightning_amount, onchain_amount))
| 37.772137 | 170 | 0.568218 | import asyncio
from typing import TYPE_CHECKING, Optional, Union
from kivy.lang import Builder
from kivy.factory import Factory
from kivy.uix.popup import Popup
from .fee_dialog import FeeDialog
from electrum.util import bh2u
from electrum.logging import Logger
from electrum.lnutil import LOCAL, REMOTE, format_short_channel_id
from electrum.lnchannel import AbstractChannel, Channel, ChannelState
from electrum.gui.kivy.i18n import _
from .question import Question
from electrum.transaction import PartialTxOutput, Transaction
from electrum.util import NotEnoughFunds, NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate
from electrum.lnutil import ln_dummy_address
from electrum.gui import messages
from .qr_dialog import QRDialog
from .choice_dialog import ChoiceDialog
if TYPE_CHECKING:
from ...main_window import ElectrumWindow
from electrum import SimpleConfig
Builder.load_string(r'''
<SwapDialog@Popup>
id: popup
title: _('Lightning Swap')
size_hint: 0.8, 0.8
pos_hint: {'top':0.9}
mining_fee_text: ''
fee_rate_text: ''
method: 0
BoxLayout:
orientation: 'vertical'
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Send') + ':'
size_hint: 0.4, 1
Label:
id: send_amount_label
size_hint: 0.6, 1
text: _('0')
background_color: (0,0,0,0)
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('You Receive') + ':'
size_hint: 0.4, 1
Label:
id: receive_amount_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Server Fee') + ':'
size_hint: 0.4, 1
Label:
id: server_fee_label
text: _('0')
background_color: (0,0,0,0)
size_hint: 0.6, 1
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
id: swap_action_label
text: _('Adds receiving capacity')
background_color: (0,0,0,0)
font_size: '14dp'
Slider:
id: swap_slider
range: 0, 4
step: 1
on_value: root.swap_slider_moved(self.value)
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Label:
text: _('Mining Fee') + ':'
size_hint: 0.4, 1
Button:
text: root.mining_fee_text + ' (' + root.fee_rate_text + ')'
background_color: (0,0,0,0)
bold: True
on_release:
root.on_fee_button()
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
TopLabel:
id: fee_estimate
text: ''
font_size: '14dp'
Widget:
size_hint: 1, 0.5
BoxLayout:
orientation: 'horizontal'
size_hint: 1, 0.5
Button:
text: 'Cancel'
size_hint: 0.5, None
height: '48dp'
on_release: root.dismiss()
Button:
id: ok_button
text: 'OK'
size_hint: 0.5, None
height: '48dp'
on_release:
root.on_ok()
root.dismiss()
<LightningChannelItem@CardItem>
details: {}
active: False
short_channel_id: '<channelId not set>'
status: ''
is_backup: False
balances: ''
node_alias: ''
_chan: None
BoxLayout:
size_hint: 0.7, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
color: (.5,.5,.5,1) if not root.active else (1,1,1,1)
text: root.short_channel_id
font_size: '15sp'
Widget
CardLabel:
font_size: '13sp'
shorten: True
text: root.node_alias
Widget
BoxLayout:
size_hint: 0.3, None
spacing: '8dp'
height: '32dp'
orientation: 'vertical'
Widget
CardLabel:
text: root.status
font_size: '13sp'
halign: 'right'
Widget
CardLabel:
text: root.balances if not root.is_backup else ''
font_size: '13sp'
halign: 'right'
Widget
<LightningChannelsDialog@Popup>:
name: 'lightning_channels'
title: _('Lightning Network')
has_lightning: False
has_gossip: False
can_send: ''
can_receive: ''
num_channels_text: ''
id: popup
BoxLayout:
id: box
orientation: 'vertical'
spacing: '2dp'
padding: '12dp'
BoxLabel:
text: _('You can send') + ':'
value: root.can_send
BoxLabel:
text: _('You can receive') + ':'
value: root.can_receive
TopLabel:
text: root.num_channels_text
ScrollView:
GridLayout:
cols: 1
id: lightning_channels_container
size_hint: 1, None
height: self.minimum_height
spacing: '2dp'
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Open Channel')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('lightning_open_channel_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Swap')
disabled: not root.has_lightning
on_release: popup.app.popup_dialog('swap_dialog')
Button:
size_hint: 0.3, None
height: '48dp'
text: _('Gossip')
disabled: not root.has_gossip
on_release: popup.app.popup_dialog('lightning')
<ChannelDetailsPopup@Popup>:
id: popuproot
data: []
is_closed: False
is_redeemed: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
local_ctn:0
remote_ctn:0
local_csv:0
remote_csv:0
feerate:''
can_send:''
can_receive:''
is_open:False
warning: ''
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
TopLabel:
text: root.warning
color: .905, .709, .509, 1
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
BoxLabel:
text: _('Can send')
value: root.can_send if root.is_open else 'n/a'
BoxLabel:
text: _('Can receive')
value: root.can_receive if root.is_open else 'n/a'
BoxLabel:
text: _('CSV delay')
value: 'Local: %d\nRemote: %d' % (root.local_csv, root.remote_csv)
BoxLabel:
text: _('CTN')
value: 'Local: %d\nRemote: %d' % (root.local_ctn, root.remote_ctn)
BoxLabel:
text: _('Fee rate')
value: '{} sat/byte'.format(root.feerate)
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Backup')
on_release: root.export_backup()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Force-close')
on_release: root.force_close()
disabled: root.is_closed
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_channel()
disabled: not root.is_redeemed
<ChannelBackupPopup@Popup>:
id: popuproot
data: []
is_funded: False
is_imported: False
node_id:''
short_id:''
initiator:''
capacity:''
funding_txid:''
closing_txid:''
state:''
is_open:False
BoxLayout:
padding: '12dp', '12dp', '12dp', '12dp'
spacing: '12dp'
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
scroll_wheel_distance: dp(114)
BoxLayout:
orientation: 'vertical'
height: self.minimum_height
size_hint_y: None
spacing: '5dp'
BoxLabel:
text: _('Channel ID')
value: root.short_id
BoxLabel:
text: _('State')
value: root.state
BoxLabel:
text: _('Initiator')
value: root.initiator
BoxLabel:
text: _('Capacity')
value: root.capacity
Widget:
size_hint: 1, 0.1
TopLabel:
text: _('Remote Node ID')
TxHashLabel:
data: root.node_id
name: _('Remote Node ID')
TopLabel:
text: _('Funding Transaction')
TxHashLabel:
data: root.funding_txid
name: _('Funding Transaction')
touch_callback: lambda: app.show_transaction(root.funding_txid)
TopLabel:
text: _('Closing Transaction')
opacity: int(bool(root.closing_txid))
TxHashLabel:
opacity: int(bool(root.closing_txid))
data: root.closing_txid
name: _('Closing Transaction')
touch_callback: lambda: app.show_transaction(root.closing_txid)
Widget:
size_hint: 1, 0.1
Widget:
size_hint: 1, 0.05
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Request force-close')
on_release: root.request_force_close()
disabled: not root.is_funded
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Delete')
on_release: root.remove_backup()
disabled: not root.is_imported
''')
class ChannelBackupPopup(Popup, Logger):
def __init__(self, chan: AbstractChannel, app, **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.chan = chan
self.is_funded = chan.get_state() == ChannelState.FUNDED
self.is_imported = chan.is_imported
self.funding_txid = chan.funding_outpoint.txid
self.app = app
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.title = _('Channel Backup')
def request_force_close(self):
msg = _('Request force close?')
Question(msg, self._request_force_close).open()
def _request_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id), loop)
try:
coro.result(5)
self.app.show_info(_('Request sent'))
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e))
def remove_backup(self):
msg = _('Delete backup?')
Question(msg, self._remove_backup).open()
def _remove_backup(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel_backup(self.chan.channel_id)
self.dismiss()
class ChannelDetailsPopup(Popup, Logger):
def __init__(self, chan: Channel, app: 'ElectrumWindow', **kwargs):
Popup.__init__(self, **kwargs)
Logger.__init__(self)
self.is_closed = chan.is_closed()
self.is_redeemed = chan.is_redeemed()
self.app = app
self.chan = chan
self.title = _('Channel details')
self.node_id = bh2u(chan.node_id)
self.channel_id = bh2u(chan.channel_id)
self.funding_txid = chan.funding_outpoint.txid
self.short_id = format_short_channel_id(chan.short_channel_id)
self.capacity = self.app.format_amount_and_units(chan.get_capacity())
self.state = chan.get_state_for_GUI()
self.local_ctn = chan.get_latest_ctn(LOCAL)
self.remote_ctn = chan.get_latest_ctn(REMOTE)
self.local_csv = chan.config[LOCAL].to_self_delay
self.remote_csv = chan.config[REMOTE].to_self_delay
self.initiator = 'Local' if chan.constraints.is_initiator else 'Remote'
feerate_kw = chan.get_latest_feerate(LOCAL)
self.feerate = str(quantize_feerate(Transaction.satperbyte_from_satperkw(feerate_kw)))
self.can_send = self.app.format_amount_and_units(chan.available_to_spend(LOCAL) // 1000)
self.can_receive = self.app.format_amount_and_units(chan.available_to_spend(REMOTE) // 1000)
self.is_open = chan.is_open()
closed = chan.get_closing_height()
if closed:
self.closing_txid, closing_height, closing_timestamp = closed
msg = ' '.join([
_("Trampoline routing is enabled, but this channel is with a non-trampoline node."),
_("This channel may still be used for receiving, but it is frozen for sending."),
_("If you want to keep using this channel, you need to disable trampoline routing in your preferences."),
])
self.warning = '' if self.app.wallet.lnworker.channel_db or self.app.wallet.lnworker.is_trampoline_peer(chan.node_id) else _('Warning') + ': ' + msg
def close(self):
dialog = ChoiceDialog(
title=_('Close channel'),
choices={0:_('Cooperative close'), 1:_('Request force-close')}, key=0,
callback=self._close,
description=_(messages.MSG_REQUEST_FORCE_CLOSE),
keep_choice_order=True)
dialog.open()
def _close(self, choice):
loop = self.app.wallet.network.asyncio_loop
if choice == 1:
coro = self.app.wallet.lnworker.request_force_close_from_backup(self.chan.channel_id)
msg = _('Request sent')
else:
coro = self.app.wallet.lnworker.close_channel(self.chan.channel_id)
msg = _('Channel closed')
f = asyncio.run_coroutine_threadsafe(coro, loop)
try:
f.result(5)
self.app.show_info(msg)
except Exception as e:
self.logger.exception("Could not close channel")
self.app.show_info(_('Could not close channel: ') + repr(e))
def remove_channel(self):
msg = _('Are you sure you want to delete this channel? This will purge associated transactions from your wallet history.')
Question(msg, self._remove_channel).open()
def _remove_channel(self, b):
if not b:
return
self.app.wallet.lnworker.remove_channel(self.chan.channel_id)
self.app._trigger_update_history()
self.dismiss()
def export_backup(self):
text = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
help_text = ' '.join([
_("Channel backups can be imported in another instance of the same wallet, by scanning this QR code."),
_("Please note that channel backups cannot be used to restore your channels."),
_("If you lose your wallet file, the only thing you can do with a backup is to request your channel to be closed, so that your funds will be sent on-chain."),
])
self.app.qr_dialog(_("Channel Backup " + self.chan.short_id_for_GUI()), text, help_text=help_text)
def force_close(self):
if self.chan.is_closed():
self.app.show_error(_('Channel already closed'))
return
to_self_delay = self.chan.config[REMOTE].to_self_delay
help_text = ' '.join([
_('If you force-close this channel, the funds you have in it will not be available for {} blocks.').format(to_self_delay),
_('During that time, funds will not be recoverable from your seed, and may be lost if you lose your device.'),
_('To prevent that, please save this channel backup.'),
_('It may be imported in another wallet with the same seed.')
])
title = _('Save backup and force-close')
data = self.app.wallet.lnworker.export_channel_backup(self.chan.channel_id)
popup = QRDialog(
title, data,
show_text=False,
text_for_clipboard=data,
help_text=help_text,
close_button_text=_('Next'),
on_close=self._confirm_force_close)
popup.open()
def _confirm_force_close(self):
Question(
_('Confirm force close?'),
self._do_force_close,
title=_('Force-close channel'),
no_str=_('Cancel'),
yes_str=_('Proceed')).open()
def _do_force_close(self, b):
if not b:
return
loop = self.app.wallet.network.asyncio_loop
coro = asyncio.run_coroutine_threadsafe(self.app.wallet.lnworker.force_close_channel(self.chan.channel_id), loop)
try:
coro.result(1)
self.app.show_info(_('Channel closed, you may need to wait at least {} blocks, because of CSV delays'.format(self.chan.config[REMOTE].to_self_delay)))
except Exception as e:
self.logger.exception("Could not force close channel")
self.app.show_info(_('Could not force close channel: ') + repr(e))
class LightningChannelsDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow'):
super(LightningChannelsDialog, self).__init__()
self.clocks = []
self.app = app
self.has_lightning = app.wallet.has_lightning()
self.has_gossip = self.app.network.channel_db is not None
self.update()
def show_item(self, obj):
chan = obj._chan
if chan.is_backup():
p = ChannelBackupPopup(chan, self.app)
else:
p = ChannelDetailsPopup(chan, self.app)
p.open()
def format_fields(self, chan):
labels = {}
for subject in (REMOTE, LOCAL):
bal_minus_htlcs = chan.balance_minus_outgoing_htlcs(subject)//1000
label = self.app.format_amount(bal_minus_htlcs)
other = subject.inverted()
bal_other = chan.balance(other)//1000
bal_minus_htlcs_other = chan.balance_minus_outgoing_htlcs(other)//1000
if bal_other != bal_minus_htlcs_other:
label += ' (+' + self.app.format_amount(bal_other - bal_minus_htlcs_other) + ')'
labels[subject] = label
closed = chan.is_closed()
return [
'n/a' if closed else labels[LOCAL],
'n/a' if closed else labels[REMOTE],
]
def update_item(self, item):
chan = item._chan
item.status = chan.get_state_for_GUI()
item.short_channel_id = chan.short_id_for_GUI()
l, r = self.format_fields(chan)
item.balances = l + '/' + r
self.update_can_send()
def update(self):
channel_cards = self.ids.lightning_channels_container
channel_cards.clear_widgets()
if not self.app.wallet:
return
lnworker = self.app.wallet.lnworker
channels = list(lnworker.channels.values()) if lnworker else []
backups = list(lnworker.channel_backups.values()) if lnworker else []
for i in channels + backups:
item = Factory.LightningChannelItem()
item.screen = self
item.active = not i.is_closed()
item.is_backup = i.is_backup()
item._chan = i
item.node_alias = lnworker.get_node_alias(i.node_id) or i.node_id.hex()
self.update_item(item)
channel_cards.add_widget(item)
self.update_can_send()
def update_can_send(self):
lnworker = self.app.wallet.lnworker
if not lnworker:
self.can_send = 'n/a'
self.can_receive = 'n/a'
return
self.num_channels_text = _(f'You have {len(lnworker.channels)} channels.')
self.can_send = self.app.format_amount_and_units(lnworker.num_sats_can_send())
self.can_receive = self.app.format_amount_and_units(lnworker.num_sats_can_receive())
RECOMMEND_BLOCKS_SWAP = 25
class SwapDialog(Factory.Popup):
def __init__(self, app: 'ElectrumWindow', config: 'SimpleConfig'):
super(SwapDialog, self).__init__()
self.app = app
self.config = config
self.fmt_amt = self.app.format_amount_and_units
self.lnworker = self.app.wallet.lnworker
self.swap_manager = self.lnworker.swap_manager
self.send_amount: Optional[int] = None
self.receive_amount: Optional[int] = None
self.tx = None
self.is_reverse = None
asyncio.run(self.swap_manager.get_pairs())
self.update_and_init()
def update_and_init(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(0)
def on_fee_button(self):
fee_dialog = FeeDialog(self, self.config, self.after_fee_changed)
fee_dialog.open()
def after_fee_changed(self):
self.update_fee_text()
self.update_swap_slider()
self.swap_slider_moved(self.ids.swap_slider.value)
def update_fee_text(self):
fee_per_kb = self.config.fee_per_kb()
eta = self.config.fee_to_eta(fee_per_kb)
fee_per_b = format_fee_satoshis(fee_per_kb / 1000)
suggest_fee = self.config.eta_target_to_fee(RECOMMEND_BLOCKS_SWAP)
suggest_fee_per_b = format_fee_satoshis(suggest_fee / 1000)
s = 's' if eta > 1 else ''
if eta > RECOMMEND_BLOCKS_SWAP or eta == -1:
msg = f'Warning: Your fee rate of {fee_per_b} sat/B may be too ' \
f'low for the swap to succeed before its timeout. ' \
f'The recommended fee rate is at least {suggest_fee_per_b} ' \
f'sat/B.'
else:
msg = f'Info: Your swap is estimated to be processed in {eta} ' \
f'block{s} with an onchain fee rate of {fee_per_b} sat/B.'
self.fee_rate_text = f'{fee_per_b} sat/B'
self.ids.fee_estimate.text = msg
def update_tx(self, onchain_amount: Union[int, str]):
if onchain_amount is None:
self.tx = None
self.ids.ok_button.disabled = True
return
outputs = [PartialTxOutput.from_address_and_value(ln_dummy_address(), onchain_amount)]
coins = self.app.wallet.get_spendable_coins(None)
try:
self.tx = self.app.wallet.make_unsigned_transaction(
coins=coins,
outputs=outputs)
except (NotEnoughFunds, NoDynamicFeeEstimates):
self.tx = None
self.ids.ok_button.disabled = True
def update_swap_slider(self):
self.update_tx('!')
try:
max_onchain_spend = self.tx.output_value_for_address(ln_dummy_address())
except AttributeError:
max_onchain_spend = 0
reverse = int(min(self.lnworker.num_sats_can_send(),
self.swap_manager.get_max_amount()))
forward = int(min(self.lnworker.num_sats_can_receive(),
self.swap_manager.get_max_amount(),
max_onchain_spend))
self.ids.swap_slider.range = (-reverse, forward)
def swap_slider_moved(self, position: float):
position = int(position)
if position < 0:
self.ids.swap_action_label.text = "Adds Lightning receiving capacity."
self.is_reverse = True
pay_amount = abs(position)
self.send_amount = pay_amount
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (offchain)" if pay_amount else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=pay_amount, is_reverse=True)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (onchain)" if receive_amount else ""
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.lockup_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.swap_manager.get_claim_fee())}"
else:
self.ids.swap_action_label.text = f"Adds Lightning sending capacity."
self.is_reverse = False
self.send_amount = position
self.update_tx(self.send_amount)
pay_amount = position + self.tx.get_fee() if self.tx else 0
self.ids.send_amount_label.text = \
f"{self.fmt_amt(pay_amount)} (onchain)" if self.fmt_amt(pay_amount) else ""
receive_amount = self.swap_manager.get_recv_amount(
send_amount=position, is_reverse=False)
self.receive_amount = receive_amount
self.ids.receive_amount_label.text = \
f"{self.fmt_amt(receive_amount)} (offchain)" if receive_amount else ""
self.ids.server_fee_label.text = \
f"{self.swap_manager.percentage:0.1f}% + {self.fmt_amt(self.swap_manager.normal_fee)}"
self.mining_fee_text = \
f"{self.fmt_amt(self.tx.get_fee())}" if self.tx else ""
if pay_amount and receive_amount:
self.ids.ok_button.disabled = False
else:
self.ids.swap_action_label.text = "Swap below minimal swap size, change the slider."
self.ids.ok_button.disabled = True
def do_normal_swap(self, lightning_amount, onchain_amount, password):
tx = self.tx
assert tx
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.normal_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount,
password=password,
tx=tx,
)
asyncio.run_coroutine_threadsafe(coro, loop)
def do_reverse_swap(self, lightning_amount, onchain_amount, password):
if lightning_amount is None or onchain_amount is None:
return
loop = self.app.network.asyncio_loop
coro = self.swap_manager.reverse_swap(
lightning_amount_sat=lightning_amount,
expected_onchain_amount_sat=onchain_amount + self.swap_manager.get_claim_fee(),
)
asyncio.run_coroutine_threadsafe(coro, loop)
def on_ok(self):
if not self.app.network:
self.window.show_error(_("You are offline."))
return
if self.is_reverse:
lightning_amount = self.send_amount
onchain_amount = self.receive_amount
self.app.protected(
'Do you want to do a reverse submarine swap?',
self.do_reverse_swap, (lightning_amount, onchain_amount))
else:
lightning_amount = self.receive_amount
onchain_amount = self.send_amount
self.app.protected(
'Do you want to do a submarine swap? '
'You will need to wait for the swap transaction to confirm.',
self.do_normal_swap, (lightning_amount, onchain_amount))
| true | true |
f71f6d9f3398355ffe923f131ddebd4aceaed71f | 8,876 | py | Python | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | tests/conftest.py | forestriveral/floris | 02c31e121283ad6ccae987cfa3aa1bf1e4b43014 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 NREL
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# See https://floris.readthedocs.io for documentation
import pytest
def turbines_to_array(turbine_list: list):
return [[t.Ct, t.power, t.aI, t.average_velocity] for t in turbine_list]
def print_test_values(turbine_list: list):
for t in turbine_list:
print(
"({:.7f}, {:.7f}, {:.7f}, {:.7f}),".format(
t.Ct, t.power, t.aI, t.average_velocity
)
)
@pytest.fixture
def sample_inputs_fixture():
return SampleInputs()
class SampleInputs:
"""
SampleInputs class
"""
def __init__(self):
self.turbine = {
"type": "turbine",
"name": "nrel_5mw",
"description": "NREL 5MW",
"properties": {
"rotor_diameter": 126.0,
"hub_height": 90.0,
"blade_count": 3,
"pP": 1.88,
"pT": 1.88,
"generator_efficiency": 1.0,
"power_thrust_table": {
"power": [
0.0,
0.0,
0.1780851,
0.28907459,
0.34902166,
0.3847278,
0.40605878,
0.4202279,
0.42882274,
0.43387274,
0.43622267,
0.43684468,
0.43657497,
0.43651053,
0.4365612,
0.43651728,
0.43590309,
0.43467276,
0.43322955,
0.43003137,
0.37655587,
0.33328466,
0.29700574,
0.26420779,
0.23839379,
0.21459275,
0.19382354,
0.1756635,
0.15970926,
0.14561785,
0.13287856,
0.12130194,
0.11219941,
0.10311631,
0.09545392,
0.08813781,
0.08186763,
0.07585005,
0.07071926,
0.06557558,
0.06148104,
0.05755207,
0.05413366,
0.05097969,
0.04806545,
0.04536883,
0.04287006,
0.04055141
],
"thrust": [
1.19187945,
1.17284634,
1.09860817,
1.02889592,
0.97373036,
0.92826162,
0.89210543,
0.86100905,
0.835423,
0.81237673,
0.79225789,
0.77584769,
0.7629228,
0.76156073,
0.76261984,
0.76169723,
0.75232027,
0.74026851,
0.72987175,
0.70701647,
0.54054532,
0.45509459,
0.39343381,
0.34250785,
0.30487242,
0.27164979,
0.24361964,
0.21973831,
0.19918151,
0.18131868,
0.16537679,
0.15103727,
0.13998636,
0.1289037,
0.11970413,
0.11087113,
0.10339901,
0.09617888,
0.09009926,
0.08395078,
0.0791188,
0.07448356,
0.07050731,
0.06684119,
0.06345518,
0.06032267,
0.05741999,
0.05472609
],
"wind_speed": [
2.0,
2.5,
3.0,
3.5,
4.0,
4.5,
5.0,
5.5,
6.0,
6.5,
7.0,
7.5,
8.0,
8.5,
9.0,
9.5,
10.0,
10.5,
11.0,
11.5,
12.0,
12.5,
13.0,
13.5,
14.0,
14.5,
15.0,
15.5,
16.0,
16.5,
17.0,
17.5,
18.0,
18.5,
19.0,
19.5,
20.0,
20.5,
21.0,
21.5,
22.0,
22.5,
23.0,
23.5,
24.0,
24.5,
25.0,
25.5
],
},
"yaw_angle": 0.0,
"tilt_angle": 0.0,
"TSR": 8.0,
},
}
self.farm = {
"type": "farm",
"name": "farm_example_2x2",
"properties": {
"wind_speed": [8.0],
"wind_direction": [270.0],
"turbulence_intensity": [0.1],
"wind_shear": 0.12,
"wind_veer": 0.0,
"air_density": 1.225,
"wake_combination": "sosfs",
"layout_x": [
0.0,
5 * self.turbine["properties"]["rotor_diameter"],
10 * self.turbine["properties"]["rotor_diameter"],
],
"layout_y": [0.0, 0.0, 0.0],
"wind_x": [0],
"wind_y": [0],
"specified_wind_height": self.turbine["properties"]["hub_height"],
},
}
self.wake = {
"type": "wake",
"name": "wake_default",
"properties": {
"velocity_model": "gauss_legacy",
"deflection_model": "gauss",
"combination_model": "sosfs",
"turbulence_model": "crespo_hernandez",
"parameters": {
"wake_deflection_parameters": {
"gauss": {
"dm": 1.0,
"eps_gain": 0.2,
"use_secondary_steering": False,
}
},
"wake_velocity_parameters": {
"gauss_legacy": {
"calculate_VW_velocities": False,
"eps_gain": 0.2,
"ka": 0.38,
"kb": 0.004,
"use_yaw_added_recovery": False,
}
},
},
},
}
self.floris = {
"farm": self.farm,
"turbine": self.turbine,
"wake": self.wake,
"logging": {
"console": {"enable": True, "level": 1},
"file": {"enable": False, "level": 1},
},
}
| 32.512821 | 82 | 0.305994 |
import pytest
def turbines_to_array(turbine_list: list):
return [[t.Ct, t.power, t.aI, t.average_velocity] for t in turbine_list]
def print_test_values(turbine_list: list):
for t in turbine_list:
print(
"({:.7f}, {:.7f}, {:.7f}, {:.7f}),".format(
t.Ct, t.power, t.aI, t.average_velocity
)
)
@pytest.fixture
def sample_inputs_fixture():
return SampleInputs()
class SampleInputs:
def __init__(self):
self.turbine = {
"type": "turbine",
"name": "nrel_5mw",
"description": "NREL 5MW",
"properties": {
"rotor_diameter": 126.0,
"hub_height": 90.0,
"blade_count": 3,
"pP": 1.88,
"pT": 1.88,
"generator_efficiency": 1.0,
"power_thrust_table": {
"power": [
0.0,
0.0,
0.1780851,
0.28907459,
0.34902166,
0.3847278,
0.40605878,
0.4202279,
0.42882274,
0.43387274,
0.43622267,
0.43684468,
0.43657497,
0.43651053,
0.4365612,
0.43651728,
0.43590309,
0.43467276,
0.43322955,
0.43003137,
0.37655587,
0.33328466,
0.29700574,
0.26420779,
0.23839379,
0.21459275,
0.19382354,
0.1756635,
0.15970926,
0.14561785,
0.13287856,
0.12130194,
0.11219941,
0.10311631,
0.09545392,
0.08813781,
0.08186763,
0.07585005,
0.07071926,
0.06557558,
0.06148104,
0.05755207,
0.05413366,
0.05097969,
0.04806545,
0.04536883,
0.04287006,
0.04055141
],
"thrust": [
1.19187945,
1.17284634,
1.09860817,
1.02889592,
0.97373036,
0.92826162,
0.89210543,
0.86100905,
0.835423,
0.81237673,
0.79225789,
0.77584769,
0.7629228,
0.76156073,
0.76261984,
0.76169723,
0.75232027,
0.74026851,
0.72987175,
0.70701647,
0.54054532,
0.45509459,
0.39343381,
0.34250785,
0.30487242,
0.27164979,
0.24361964,
0.21973831,
0.19918151,
0.18131868,
0.16537679,
0.15103727,
0.13998636,
0.1289037,
0.11970413,
0.11087113,
0.10339901,
0.09617888,
0.09009926,
0.08395078,
0.0791188,
0.07448356,
0.07050731,
0.06684119,
0.06345518,
0.06032267,
0.05741999,
0.05472609
],
"wind_speed": [
2.0,
2.5,
3.0,
3.5,
4.0,
4.5,
5.0,
5.5,
6.0,
6.5,
7.0,
7.5,
8.0,
8.5,
9.0,
9.5,
10.0,
10.5,
11.0,
11.5,
12.0,
12.5,
13.0,
13.5,
14.0,
14.5,
15.0,
15.5,
16.0,
16.5,
17.0,
17.5,
18.0,
18.5,
19.0,
19.5,
20.0,
20.5,
21.0,
21.5,
22.0,
22.5,
23.0,
23.5,
24.0,
24.5,
25.0,
25.5
],
},
"yaw_angle": 0.0,
"tilt_angle": 0.0,
"TSR": 8.0,
},
}
self.farm = {
"type": "farm",
"name": "farm_example_2x2",
"properties": {
"wind_speed": [8.0],
"wind_direction": [270.0],
"turbulence_intensity": [0.1],
"wind_shear": 0.12,
"wind_veer": 0.0,
"air_density": 1.225,
"wake_combination": "sosfs",
"layout_x": [
0.0,
5 * self.turbine["properties"]["rotor_diameter"],
10 * self.turbine["properties"]["rotor_diameter"],
],
"layout_y": [0.0, 0.0, 0.0],
"wind_x": [0],
"wind_y": [0],
"specified_wind_height": self.turbine["properties"]["hub_height"],
},
}
self.wake = {
"type": "wake",
"name": "wake_default",
"properties": {
"velocity_model": "gauss_legacy",
"deflection_model": "gauss",
"combination_model": "sosfs",
"turbulence_model": "crespo_hernandez",
"parameters": {
"wake_deflection_parameters": {
"gauss": {
"dm": 1.0,
"eps_gain": 0.2,
"use_secondary_steering": False,
}
},
"wake_velocity_parameters": {
"gauss_legacy": {
"calculate_VW_velocities": False,
"eps_gain": 0.2,
"ka": 0.38,
"kb": 0.004,
"use_yaw_added_recovery": False,
}
},
},
},
}
self.floris = {
"farm": self.farm,
"turbine": self.turbine,
"wake": self.wake,
"logging": {
"console": {"enable": True, "level": 1},
"file": {"enable": False, "level": 1},
},
}
| true | true |
f71f6e1acacc2c48f4a28b2d425b5fac6cb232dd | 113,230 | py | Python | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 12 | 2020-12-28T09:40:53.000Z | 2022-03-13T15:36:21.000Z | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 28 | 2021-01-04T14:58:59.000Z | 2022-01-03T03:00:16.000Z | tests/test_class.py | michelp/cxxheaderparser | 83bb2903790cf448bf838cdb8a93ca96e758bd1a | [
"BSD-3-Clause"
] | 1 | 2021-11-06T03:44:53.000Z | 2021-11-06T03:44:53.000Z | # Note: testcases generated via `python -m cxxheaderparser.gentest`
from cxxheaderparser.types import (
AnonymousName,
Array,
BaseClass,
ClassDecl,
EnumDecl,
Enumerator,
Field,
ForwardDecl,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
Operator,
PQName,
Parameter,
Pointer,
Reference,
TemplateArgument,
TemplateDecl,
TemplateSpecialization,
TemplateTypeParam,
Token,
Type,
Typedef,
UsingDecl,
Value,
Variable,
)
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
parse_string,
ParsedData,
)
def test_class_member_spec_1():
content = """
class S {
int d1; // non-static data member
int a[10] = {1, 2}; // non-static data member with initializer (C++11)
static const int d2 = 1; // static data member with initializer
virtual void f1(int) = 0; // pure virtual member function
std::string d3, *d4, f2(int); // two data members and a member function
enum { NORTH, SOUTH, EAST, WEST };
struct NestedS {
std::string s;
} d5, *d6;
typedef NestedS value_type, *pointer_type;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
access="private",
),
fields=[
Field(
name="s",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="public",
)
],
)
],
enums=[
EnumDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="enum"
),
values=[
Enumerator(name="NORTH"),
Enumerator(name="SOUTH"),
Enumerator(name="EAST"),
Enumerator(name="WEST"),
],
access="private",
)
],
fields=[
Field(
name="d1",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="private",
),
Field(
name="a",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="10")]),
),
access="private",
value=Value(
tokens=[
Token(value="{"),
Token(value="1"),
Token(value=","),
Token(value="2"),
Token(value="}"),
]
),
),
Field(
name="d2",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
access="private",
value=Value(tokens=[Token(value="1")]),
static=True,
),
Field(
name="d3",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="private",
),
Field(
name="d4",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
)
),
access="private",
),
Field(
name="d5",
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
),
access="private",
),
Field(
name="d6",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
)
),
access="private",
),
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
),
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
),
name="value_type",
access="private",
),
Typedef(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
)
),
name="pointer_type",
access="private",
),
],
)
]
)
)
def test_class_member_spec_2():
content = """
class M {
std::size_t C;
std::vector<int> data;
public:
M(std::size_t R, std::size_t C)
: C(C), data(R * C) {} // constructor definition
int operator()(size_t r, size_t c) const { // member function definition
return data[r * C + c];
}
int &operator()(size_t r, size_t c) { // another member function definition
return data[r * C + c];
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="M")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="data",
),
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="M")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="R",
),
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
],
has_body=True,
access="public",
constructor=True,
),
Operator(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
const=True,
operator="()",
),
Operator(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
operator="()",
),
],
)
]
)
)
def test_class_member_spec_3():
content = """
class S {
public:
S(); // public constructor
S(const S &); // public copy constructor
virtual ~S(); // public virtual destructor
private:
int *ptr; // private data member
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
fields=[
Field(
name="ptr",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
access="private",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="S")]
),
const=True,
)
)
)
],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="~S")]),
parameters=[],
access="public",
destructor=True,
virtual=True,
),
],
)
]
)
)
def test_class_using():
content = """
class Base {
protected:
int d;
};
class Derived : public Base {
public:
using Base::Base; // inherit all parent's constructors (C++11)
using Base::d; // make Base's protected member d a public member of Derived
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Base")], classkey="class"
)
),
fields=[
Field(
name="d",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="protected",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Derived")], classkey="class"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Base")]),
)
],
),
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="Base"),
]
),
access="public",
),
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="d"),
]
),
access="public",
),
],
),
]
)
)
def test_class_member_spec_6():
content = """
struct S {
template<typename T>
void f(T&& n);
template<class CharT>
struct NestedS {
std::basic_string<CharT> s;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="CharT")
]
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="basic_string",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="CharT"
)
]
)
)
)
]
),
),
]
)
),
name="s",
)
],
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
)
),
name="n",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="public",
)
],
)
]
)
)
def test_class_fn_default_params():
content = """
// clang-format off
class Hen
{
public:
void add(int a=100, b=0xfd, float c=1.7e-3, float d=3.14);
void join(string s1="", string s2="nothing");
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Hen")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
default=Value(tokens=[Token(value="100")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="b")]
)
),
default=Value(tokens=[Token(value="0xfd")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="c",
default=Value(tokens=[Token(value="1.7e-3")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="d",
default=Value(tokens=[Token(value="3.14")]),
),
],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="join")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s1",
default=Value(tokens=[Token(value='""')]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s2",
default=Value(tokens=[Token(value='"nothing"')]),
),
],
access="public",
),
],
)
]
)
)
def test_class_fn_inline_virtual():
content = """
class B {
public:
virtual inline int aMethod();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="B")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="aMethod")]),
parameters=[],
inline=True,
access="public",
virtual=True,
)
],
)
]
)
)
def test_class_fn_pure_virtual_const():
content = """
class StoneClass {
virtual int getNum2() const = 0;
int getNum3();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="StoneClass")],
classkey="class",
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum2")]),
parameters=[],
access="private",
const=True,
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum3")]),
parameters=[],
access="private",
),
],
)
]
)
)
def test_class_fn_return_global_ns():
content = """
struct Avacado {
uint8_t foo() { return 4; }
::uint8_t bar() { return 0; }
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Avacado")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[NameSpecifier(name="uint8_t")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="uint8_t"),
]
)
),
name=PQName(segments=[NameSpecifier(name="bar")]),
parameters=[],
has_body=True,
access="public",
),
],
)
]
)
)
def test_class_ns_class():
content = """
namespace ns {
class N;
};
class ns::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="N"),
],
classkey="class",
)
)
)
],
namespaces={
"ns": NamespaceScope(
name="ns",
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
)
)
],
)
},
)
)
def test_class_ns_w_base():
content = """
class Herb::Cilantro : public Plant {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="Herb"),
NameSpecifier(name="Cilantro"),
],
classkey="class",
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Plant")]),
)
],
)
)
]
)
)
def test_class_inner_class():
content = """
class C {
class Inner {};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Inner")],
classkey="class",
),
access="private",
)
)
],
)
]
)
)
def test_class_inner_fwd_class():
content = """
class C {
class N;
};
class C::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
),
access="private",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C"), NameSpecifier(name="N")],
classkey="class",
)
)
),
]
)
)
def test_class_inner_var_access():
content = """
class Bug_3488053 {
public:
class Bug_3488053_Nested {
public:
int x;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053_Nested")],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
)
]
)
)
def test_class_ns_and_inner():
content = """
namespace RoosterNamespace {
class RoosterOuterClass {
public:
int member1;
class RoosterSubClass1 {
public:
int publicMember1;
private:
int privateMember1;
};
private:
int member2;
class RoosterSubClass2 {
public:
int publicMember2;
private:
int privateMember2;
};
};
} // namespace RoosterNamespace
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"RoosterNamespace": NamespaceScope(
name="RoosterNamespace",
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="RoosterOuterClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass1")
],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember1",
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass2")
],
classkey="class",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember2",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember2",
),
],
),
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member2",
),
],
)
],
)
}
)
)
def test_class_struct_access():
content = """
struct SampleStruct {
unsigned int meth();
private:
int prop;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="SampleStruct")],
classkey="struct",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="prop",
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="unsigned int")]
)
),
name=PQName(segments=[NameSpecifier(name="meth")]),
parameters=[],
access="public",
)
],
)
]
)
)
def test_class_volatile_move_deleted_fn():
content = """
struct C {
void foo() volatile && = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
volatile=True,
ref_qualifier="&&",
deleted=True,
)
],
)
]
)
)
def test_class_bitfield_1():
content = """
struct S {
// will usually occupy 2 bytes:
// 3 bits: value of b1
// 2 bits: unused
// 6 bits: value of b2
// 2 bits: value of b3
// 3 bits: unused
unsigned char b1 : 3, : 2, b2 : 6, b3 : 2;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
fields=[
Field(
name="b1",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=3,
),
Field(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
Field(
name="b2",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=6,
),
Field(
name="b3",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
],
)
]
)
)
def test_class_bitfield_2():
content = """
struct HAL_ControlWord {
int x : 1;
int y : 1;
};
typedef struct HAL_ControlWord HAL_ControlWord;
int HAL_GetControlWord(HAL_ControlWord *controlWord);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
fields=[
Field(
name="x",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
Field(
name="y",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="HAL_GetControlWord")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")]
)
)
),
name="controlWord",
)
],
)
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
name="HAL_ControlWord",
)
],
)
)
def test_class_anon_struct_as_globalvar():
content = """
struct {
int m;
} unnamed, *p_unnamed;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
fields=[
Field(
name="m",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
)
),
access="public",
)
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="unnamed")]),
type=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="p_unnamed")]),
type=Pointer(
ptr_to=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
)
),
),
],
)
)
def test_class_anon_struct_as_classvar():
content = """
struct AnonHolderClass {
struct {
int x;
} a;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="AnonHolderClass")],
classkey="struct",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
)
),
name="a",
)
],
)
]
)
)
def test_initializer_with_initializer_list_1():
content = """
struct ComplexInit : SomeBase {
ComplexInit(int i) : m_stuff{i, 2} { auto i = something(); }
void fn();
std::vector<int> m_stuff;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="ComplexInit")],
classkey="struct",
),
bases=[
BaseClass(
access="public",
typename=PQName(
segments=[NameSpecifier(name="SomeBase")]
),
)
],
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="m_stuff",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="ComplexInit")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
)
],
has_body=True,
access="public",
constructor=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
access="public",
),
],
)
]
)
)
def test_initializer_with_initializer_list_2():
content = """
template <typename T> class future final {
public:
template <typename R>
future(future<R> &&oth) noexcept
: future(oth.then([](R &&val) -> T { return val; })) {}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="future")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
final=True,
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="future")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="future",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="R"
)
]
)
)
)
]
),
)
]
)
)
),
name="oth",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="R")]
),
noexcept=Value(tokens=[]),
access="public",
constructor=True,
)
],
)
]
)
)
def test_class_with_arrays():
content = """
const int MAX_ITEM = 7;
class Bird {
int items[MAX_ITEM];
int otherItems[7];
int oneItem;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bird")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="MAX_ITEM")]),
),
name="items",
),
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="7")]),
),
name="otherItems",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="oneItem",
),
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="MAX_ITEM")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")]),
const=True,
),
value=Value(tokens=[Token(value="7")]),
)
],
)
)
def test_class_fn_inline_impl():
content = """
class Monkey {
private:
static void Create();
};
inline void Monkey::Create() {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Monkey")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="Create")]),
parameters=[],
static=True,
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(
segments=[
NameSpecifier(name="Monkey"),
NameSpecifier(name="Create"),
]
),
parameters=[],
inline=True,
has_body=True,
)
],
)
)
def test_class_fn_virtual_final_override():
content = """
struct Lemon {
virtual void foo() final;
virtual void foo2();
};
struct Lime final : Lemon {
void abc();
void foo2() override;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lemon")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
virtual=True,
final=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
virtual=True,
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lime")], classkey="struct"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Lemon")]),
)
],
final=True,
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="abc")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
override=True,
),
],
),
]
)
)
def test_class_fn_return_class():
content = """
class Peach {
int abc;
};
class Plumb {
class Peach *doSomethingGreat(class Peach *pInCurPtr);
class Peach *var;
};
class Peach *Plumb::myMethod(class Peach *pInPtr) {
return pInPtr;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="abc",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Plumb")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="var",
)
],
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name=PQName(
segments=[NameSpecifier(name="doSomethingGreat")]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInCurPtr",
)
],
access="private",
)
],
),
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
)
),
name=PQName(
segments=[
NameSpecifier(name="Plumb"),
NameSpecifier(name="myMethod"),
]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInPtr",
)
],
has_body=True,
)
],
)
)
def test_class_fn_template_impl():
content = """
class Owl {
private:
template <typename T> int *tFunc(int count);
};
template <typename T> int *Owl::tFunc(int count) {
if (count == 0) {
return NULL;
}
return NULL;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Owl")], classkey="class"
)
),
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="tFunc")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
name=PQName(
segments=[
NameSpecifier(name="Owl"),
NameSpecifier(name="tFunc"),
]
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_inline_template_impl():
content = """
class Chicken {
template <typename T> static T Get();
};
template <typename T> T Chicken::Get() { return T(); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Chicken")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="Get")]),
parameters=[],
static=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(
segments=[
NameSpecifier(name="Chicken"),
NameSpecifier(name="Get"),
]
),
parameters=[],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_explicit_constructors():
content = """
class Lizzard {
Lizzard();
explicit Lizzard(int a);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lizzard")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[],
access="private",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
access="private",
constructor=True,
explicit=True,
),
],
)
]
)
)
def test_class_fn_default_constructor():
content = """
class DefaultConstDest {
public:
DefaultConstDest() = default;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="DefaultConstDest")],
classkey="class",
)
),
methods=[
Method(
return_type=None,
name=PQName(
segments=[NameSpecifier(name="DefaultConstDest")]
),
parameters=[],
access="public",
constructor=True,
default=True,
)
],
)
]
)
)
def test_class_fn_delete_constructor():
content = """
class A {
public:
A() = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="A")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="A")]),
parameters=[],
access="public",
constructor=True,
deleted=True,
)
],
)
]
)
)
def test_class_multi_vars():
content = """
class Grape {
public:
int a, b, c;
map<string, int> d;
map<string, int> e, f;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grape")], classkey="class"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="b",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="c",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="d",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="e",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="f",
),
],
)
]
)
)
def test_class_static_const_var_expr():
content = """
class PandaClass {
static const int CONST_A = (1 << 7) - 1;
static const int CONST_B = sizeof(int);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PandaClass")],
classkey="class",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_A",
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value="<<"),
Token(value="7"),
Token(value=")"),
Token(value="-"),
Token(value="1"),
]
),
static=True,
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_B",
value=Value(
tokens=[
Token(value="sizeof"),
Token(value="("),
Token(value="int"),
Token(value=")"),
]
),
static=True,
),
],
)
]
)
)
def test_class_fwd_struct():
content = """
class PotatoClass {
struct FwdStruct;
FwdStruct *ptr;
struct FwdStruct {
int a;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PotatoClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
)
],
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")]
)
)
),
name="ptr",
)
],
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
)
],
)
]
)
)
def test_class_multi_array():
content = """
struct Picture {
char name[25];
unsigned int pdata[128][256];
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Picture")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="char")]
)
),
size=Value(tokens=[Token(value="25")]),
),
name="name",
),
Field(
access="public",
type=Array(
array_of=Array(
array_of=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="unsigned int"
)
]
)
),
size=Value(tokens=[Token(value="256")]),
),
size=Value(tokens=[Token(value="128")]),
),
name="pdata",
),
],
)
]
)
)
def test_class_noexcept():
content = """
struct Grackle {
void no_noexcept();
void just_noexcept() noexcept;
void const_noexcept() const noexcept;
void noexcept_bool() noexcept(true);
void const_noexcept_bool() const noexcept(true);
void noexcept_noexceptOperator() noexcept(noexcept(Grackle()));
void const_noexcept_noexceptOperator() const noexcept(noexcept(Grackle()));
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grackle")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="no_noexcept")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="just_noexcept")]),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept")]
),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="noexcept_bool")]),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept_bool")]
),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(name="noexcept_noexceptOperator")
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(
name="const_noexcept_noexceptOperator"
)
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
const=True,
),
],
)
]
)
)
def test_class_volatile():
content = """
class Foo
{
public:
private:
volatile bool myToShutDown;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Foo")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
),
volatile=True,
),
name="myToShutDown",
)
],
)
]
)
)
| 38.240459 | 94 | 0.2641 |
from cxxheaderparser.types import (
AnonymousName,
Array,
BaseClass,
ClassDecl,
EnumDecl,
Enumerator,
Field,
ForwardDecl,
Function,
FundamentalSpecifier,
Method,
MoveReference,
NameSpecifier,
Operator,
PQName,
Parameter,
Pointer,
Reference,
TemplateArgument,
TemplateDecl,
TemplateSpecialization,
TemplateTypeParam,
Token,
Type,
Typedef,
UsingDecl,
Value,
Variable,
)
from cxxheaderparser.simple import (
ClassScope,
NamespaceScope,
parse_string,
ParsedData,
)
def test_class_member_spec_1():
content = """
class S {
int d1; // non-static data member
int a[10] = {1, 2}; // non-static data member with initializer (C++11)
static const int d2 = 1; // static data member with initializer
virtual void f1(int) = 0; // pure virtual member function
std::string d3, *d4, f2(int); // two data members and a member function
enum { NORTH, SOUTH, EAST, WEST };
struct NestedS {
std::string s;
} d5, *d6;
typedef NestedS value_type, *pointer_type;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
access="private",
),
fields=[
Field(
name="s",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="public",
)
],
)
],
enums=[
EnumDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="enum"
),
values=[
Enumerator(name="NORTH"),
Enumerator(name="SOUTH"),
Enumerator(name="EAST"),
Enumerator(name="WEST"),
],
access="private",
)
],
fields=[
Field(
name="d1",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="private",
),
Field(
name="a",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="10")]),
),
access="private",
value=Value(
tokens=[
Token(value="{"),
Token(value="1"),
Token(value=","),
Token(value="2"),
Token(value="}"),
]
),
),
Field(
name="d2",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
access="private",
value=Value(tokens=[Token(value="1")]),
static=True,
),
Field(
name="d3",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
access="private",
),
Field(
name="d4",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
)
),
access="private",
),
Field(
name="d5",
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
),
access="private",
),
Field(
name="d6",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
)
)
),
access="private",
),
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f1")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="string"),
]
)
),
name=PQName(segments=[NameSpecifier(name="f2")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
)
],
access="private",
),
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
),
name="value_type",
access="private",
),
Typedef(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="NestedS")]
)
)
),
name="pointer_type",
access="private",
),
],
)
]
)
)
def test_class_member_spec_2():
content = """
class M {
std::size_t C;
std::vector<int> data;
public:
M(std::size_t R, std::size_t C)
: C(C), data(R * C) {} // constructor definition
int operator()(size_t r, size_t c) const { // member function definition
return data[r * C + c];
}
int &operator()(size_t r, size_t c) { // another member function definition
return data[r * C + c];
}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="M")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="data",
),
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="M")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="R",
),
Parameter(
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(name="size_t"),
]
)
),
name="C",
),
],
has_body=True,
access="public",
constructor=True,
),
Operator(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
const=True,
operator="()",
),
Operator(
return_type=Reference(
ref_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="operator()")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="r",
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="size_t")]
)
),
name="c",
),
],
has_body=True,
access="public",
operator="()",
),
],
)
]
)
)
def test_class_member_spec_3():
content = """
class S {
public:
S(); // public constructor
S(const S &); // public copy constructor
virtual ~S(); // public virtual destructor
private:
int *ptr; // private data member
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="class"
)
),
fields=[
Field(
name="ptr",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
access="private",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="S")]),
parameters=[
Parameter(
type=Reference(
ref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="S")]
),
const=True,
)
)
)
],
access="public",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="~S")]),
parameters=[],
access="public",
destructor=True,
virtual=True,
),
],
)
]
)
)
def test_class_using():
content = """
class Base {
protected:
int d;
};
class Derived : public Base {
public:
using Base::Base; // inherit all parent's constructors (C++11)
using Base::d; // make Base's protected member d a public member of Derived
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Base")], classkey="class"
)
),
fields=[
Field(
name="d",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="protected",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Derived")], classkey="class"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Base")]),
)
],
),
using=[
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="Base"),
]
),
access="public",
),
UsingDecl(
typename=PQName(
segments=[
NameSpecifier(name="Base"),
NameSpecifier(name="d"),
]
),
access="public",
),
],
),
]
)
)
def test_class_member_spec_6():
content = """
struct S {
template<typename T>
void f(T&& n);
template<class CharT>
struct NestedS {
std::basic_string<CharT> s;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="NestedS")],
classkey="struct",
),
template=TemplateDecl(
params=[
TemplateTypeParam(typekey="class", name="CharT")
]
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="basic_string",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="CharT"
)
]
)
)
)
]
),
),
]
)
),
name="s",
)
],
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="f")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[NameSpecifier(name="T")]
)
)
),
name="n",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="public",
)
],
)
]
)
)
def test_class_fn_default_params():
content = """
// clang-format off
class Hen
{
public:
void add(int a=100, b=0xfd, float c=1.7e-3, float d=3.14);
void join(string s1="", string s2="nothing");
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Hen")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="add")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
default=Value(tokens=[Token(value="100")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="b")]
)
),
default=Value(tokens=[Token(value="0xfd")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="c",
default=Value(tokens=[Token(value="1.7e-3")]),
),
Parameter(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="float")
]
)
),
name="d",
default=Value(tokens=[Token(value="3.14")]),
),
],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="join")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s1",
default=Value(tokens=[Token(value='""')]),
),
Parameter(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="string")]
)
),
name="s2",
default=Value(tokens=[Token(value='"nothing"')]),
),
],
access="public",
),
],
)
]
)
)
def test_class_fn_inline_virtual():
content = """
class B {
public:
virtual inline int aMethod();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="B")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="aMethod")]),
parameters=[],
inline=True,
access="public",
virtual=True,
)
],
)
]
)
)
def test_class_fn_pure_virtual_const():
content = """
class StoneClass {
virtual int getNum2() const = 0;
int getNum3();
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="StoneClass")],
classkey="class",
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum2")]),
parameters=[],
access="private",
const=True,
pure_virtual=True,
virtual=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name=PQName(segments=[NameSpecifier(name="getNum3")]),
parameters=[],
access="private",
),
],
)
]
)
)
def test_class_fn_return_global_ns():
content = """
struct Avacado {
uint8_t foo() { return 4; }
::uint8_t bar() { return 0; }
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Avacado")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[NameSpecifier(name="uint8_t")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
has_body=True,
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[
NameSpecifier(name=""),
NameSpecifier(name="uint8_t"),
]
)
),
name=PQName(segments=[NameSpecifier(name="bar")]),
parameters=[],
has_body=True,
access="public",
),
],
)
]
)
)
def test_class_ns_class():
content = """
namespace ns {
class N;
};
class ns::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="ns"),
NameSpecifier(name="N"),
],
classkey="class",
)
)
)
],
namespaces={
"ns": NamespaceScope(
name="ns",
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
)
)
],
)
},
)
)
def test_class_ns_w_base():
content = """
class Herb::Cilantro : public Plant {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="Herb"),
NameSpecifier(name="Cilantro"),
],
classkey="class",
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Plant")]),
)
],
)
)
]
)
)
def test_class_inner_class():
content = """
class C {
class Inner {};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Inner")],
classkey="class",
),
access="private",
)
)
],
)
]
)
)
def test_class_inner_fwd_class():
content = """
class C {
class N;
};
class C::N {};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="class"
)
),
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="N")], classkey="class"
),
access="private",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C"), NameSpecifier(name="N")],
classkey="class",
)
)
),
]
)
)
def test_class_inner_var_access():
content = """
class Bug_3488053 {
public:
class Bug_3488053_Nested {
public:
int x;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bug_3488053_Nested")],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
)
]
)
)
def test_class_ns_and_inner():
content = """
namespace RoosterNamespace {
class RoosterOuterClass {
public:
int member1;
class RoosterSubClass1 {
public:
int publicMember1;
private:
int privateMember1;
};
private:
int member2;
class RoosterSubClass2 {
public:
int publicMember2;
private:
int privateMember2;
};
};
} // namespace RoosterNamespace
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
namespaces={
"RoosterNamespace": NamespaceScope(
name="RoosterNamespace",
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="RoosterOuterClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass1")
],
classkey="class",
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember1",
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[
NameSpecifier(name="RoosterSubClass2")
],
classkey="class",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="publicMember2",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="int")
]
)
),
name="privateMember2",
),
],
),
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member1",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="member2",
),
],
)
],
)
}
)
)
def test_class_struct_access():
content = """
struct SampleStruct {
unsigned int meth();
private:
int prop;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="SampleStruct")],
classkey="struct",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="prop",
)
],
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="unsigned int")]
)
),
name=PQName(segments=[NameSpecifier(name="meth")]),
parameters=[],
access="public",
)
],
)
]
)
)
def test_class_volatile_move_deleted_fn():
content = """
struct C {
void foo() volatile && = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="C")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
volatile=True,
ref_qualifier="&&",
deleted=True,
)
],
)
]
)
)
def test_class_bitfield_1():
content = """
struct S {
// will usually occupy 2 bytes:
// 3 bits: value of b1
// 2 bits: unused
// 6 bits: value of b2
// 2 bits: value of b3
// 3 bits: unused
unsigned char b1 : 3, : 2, b2 : 6, b3 : 2;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="S")], classkey="struct"
)
),
fields=[
Field(
name="b1",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=3,
),
Field(
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
Field(
name="b2",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=6,
),
Field(
name="b3",
type=Type(
typename=PQName(
segments=[
FundamentalSpecifier(name="unsigned char")
]
)
),
access="public",
bits=2,
),
],
)
]
)
)
def test_class_bitfield_2():
content = """
struct HAL_ControlWord {
int x : 1;
int y : 1;
};
typedef struct HAL_ControlWord HAL_ControlWord;
int HAL_GetControlWord(HAL_ControlWord *controlWord);
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
fields=[
Field(
name="x",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
Field(
name="y",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
access="public",
bits=1,
),
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
),
name=PQName(segments=[NameSpecifier(name="HAL_GetControlWord")]),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")]
)
)
),
name="controlWord",
)
],
)
],
typedefs=[
Typedef(
type=Type(
typename=PQName(
segments=[NameSpecifier(name="HAL_ControlWord")],
classkey="struct",
)
),
name="HAL_ControlWord",
)
],
)
)
def test_class_anon_struct_as_globalvar():
content = """
struct {
int m;
} unnamed, *p_unnamed;
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
fields=[
Field(
name="m",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")],
)
),
access="public",
)
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="unnamed")]),
type=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
),
),
Variable(
name=PQName(segments=[NameSpecifier(name="p_unnamed")]),
type=Pointer(
ptr_to=Type(
typename=PQName(
classkey="struct", segments=[AnonymousName(id=1)]
)
)
),
),
],
)
)
def test_class_anon_struct_as_classvar():
content = """
struct AnonHolderClass {
struct {
int x;
} a;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="AnonHolderClass")],
classkey="struct",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
),
access="public",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="x",
)
],
)
],
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[AnonymousName(id=1)], classkey="struct"
)
),
name="a",
)
],
)
]
)
)
def test_initializer_with_initializer_list_1():
content = """
struct ComplexInit : SomeBase {
ComplexInit(int i) : m_stuff{i, 2} { auto i = something(); }
void fn();
std::vector<int> m_stuff;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="ComplexInit")],
classkey="struct",
),
bases=[
BaseClass(
access="public",
typename=PQName(
segments=[NameSpecifier(name="SomeBase")]
),
)
],
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(name="std"),
NameSpecifier(
name="vector",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
)
]
),
),
]
)
),
name="m_stuff",
)
],
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="ComplexInit")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="i",
)
],
has_body=True,
access="public",
constructor=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="fn")]),
parameters=[],
access="public",
),
],
)
]
)
)
def test_initializer_with_initializer_list_2():
content = """
template <typename T> class future final {
public:
template <typename R>
future(future<R> &&oth) noexcept
: future(oth.then([](R &&val) -> T { return val; })) {}
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="future")], classkey="class"
),
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
final=True,
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="future")]),
parameters=[
Parameter(
type=MoveReference(
moveref_to=Type(
typename=PQName(
segments=[
NameSpecifier(
name="future",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="R"
)
]
)
)
)
]
),
)
]
)
)
),
name="oth",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="R")]
),
noexcept=Value(tokens=[]),
access="public",
constructor=True,
)
],
)
]
)
)
def test_class_with_arrays():
content = """
const int MAX_ITEM = 7;
class Bird {
int items[MAX_ITEM];
int otherItems[7];
int oneItem;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Bird")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="MAX_ITEM")]),
),
name="items",
),
Field(
access="private",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
size=Value(tokens=[Token(value="7")]),
),
name="otherItems",
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="oneItem",
),
],
)
],
variables=[
Variable(
name=PQName(segments=[NameSpecifier(name="MAX_ITEM")]),
type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")]),
const=True,
),
value=Value(tokens=[Token(value="7")]),
)
],
)
)
def test_class_fn_inline_impl():
content = """
class Monkey {
private:
static void Create();
};
inline void Monkey::Create() {}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Monkey")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="Create")]),
parameters=[],
static=True,
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[FundamentalSpecifier(name="void")])
),
name=PQName(
segments=[
NameSpecifier(name="Monkey"),
NameSpecifier(name="Create"),
]
),
parameters=[],
inline=True,
has_body=True,
)
],
)
)
def test_class_fn_virtual_final_override():
content = """
struct Lemon {
virtual void foo() final;
virtual void foo2();
};
struct Lime final : Lemon {
void abc();
void foo2() override;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lemon")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo")]),
parameters=[],
access="public",
virtual=True,
final=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
virtual=True,
),
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lime")], classkey="struct"
),
bases=[
BaseClass(
access="public",
typename=PQName(segments=[NameSpecifier(name="Lemon")]),
)
],
final=True,
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="abc")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="foo2")]),
parameters=[],
access="public",
override=True,
),
],
),
]
)
)
def test_class_fn_return_class():
content = """
class Peach {
int abc;
};
class Plumb {
class Peach *doSomethingGreat(class Peach *pInCurPtr);
class Peach *var;
};
class Peach *Plumb::myMethod(class Peach *pInPtr) {
return pInPtr;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="abc",
)
],
),
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Plumb")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="var",
)
],
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name=PQName(
segments=[NameSpecifier(name="doSomethingGreat")]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInCurPtr",
)
],
access="private",
)
],
),
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")], classkey="class"
)
)
),
name=PQName(
segments=[
NameSpecifier(name="Plumb"),
NameSpecifier(name="myMethod"),
]
),
parameters=[
Parameter(
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="Peach")],
classkey="class",
)
)
),
name="pInPtr",
)
],
has_body=True,
)
],
)
)
def test_class_fn_template_impl():
content = """
class Owl {
private:
template <typename T> int *tFunc(int count);
};
template <typename T> int *Owl::tFunc(int count) {
if (count == 0) {
return NULL;
}
return NULL;
}
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Owl")], classkey="class"
)
),
methods=[
Method(
return_type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
)
),
name=PQName(segments=[NameSpecifier(name="tFunc")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Pointer(
ptr_to=Type(
typename=PQName(segments=[FundamentalSpecifier(name="int")])
)
),
name=PQName(
segments=[
NameSpecifier(name="Owl"),
NameSpecifier(name="tFunc"),
]
),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="count",
)
],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_inline_template_impl():
content = """
class Chicken {
template <typename T> static T Get();
};
template <typename T> T Chicken::Get() { return T(); }
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Chicken")], classkey="class"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(segments=[NameSpecifier(name="Get")]),
parameters=[],
static=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
access="private",
)
],
)
],
functions=[
Function(
return_type=Type(
typename=PQName(segments=[NameSpecifier(name="T")])
),
name=PQName(
segments=[
NameSpecifier(name="Chicken"),
NameSpecifier(name="Get"),
]
),
parameters=[],
has_body=True,
template=TemplateDecl(
params=[TemplateTypeParam(typekey="typename", name="T")]
),
)
],
)
)
def test_class_fn_explicit_constructors():
content = """
class Lizzard {
Lizzard();
explicit Lizzard(int a);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Lizzard")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[],
access="private",
constructor=True,
),
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="Lizzard")]),
parameters=[
Parameter(
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
access="private",
constructor=True,
explicit=True,
),
],
)
]
)
)
def test_class_fn_default_constructor():
content = """
class DefaultConstDest {
public:
DefaultConstDest() = default;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="DefaultConstDest")],
classkey="class",
)
),
methods=[
Method(
return_type=None,
name=PQName(
segments=[NameSpecifier(name="DefaultConstDest")]
),
parameters=[],
access="public",
constructor=True,
default=True,
)
],
)
]
)
)
def test_class_fn_delete_constructor():
content = """
class A {
public:
A() = delete;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="A")], classkey="class"
)
),
methods=[
Method(
return_type=None,
name=PQName(segments=[NameSpecifier(name="A")]),
parameters=[],
access="public",
constructor=True,
deleted=True,
)
],
)
]
)
)
def test_class_multi_vars():
content = """
class Grape {
public:
int a, b, c;
map<string, int> d;
map<string, int> e, f;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grape")], classkey="class"
)
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="b",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="c",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="d",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="e",
),
Field(
access="public",
type=Type(
typename=PQName(
segments=[
NameSpecifier(
name="map",
specialization=TemplateSpecialization(
args=[
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
NameSpecifier(
name="string"
)
]
)
)
),
TemplateArgument(
arg=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="int"
)
]
)
)
),
]
),
)
]
)
),
name="f",
),
],
)
]
)
)
def test_class_static_const_var_expr():
content = """
class PandaClass {
static const int CONST_A = (1 << 7) - 1;
static const int CONST_B = sizeof(int);
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PandaClass")],
classkey="class",
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_A",
value=Value(
tokens=[
Token(value="("),
Token(value="1"),
Token(value="<<"),
Token(value="7"),
Token(value=")"),
Token(value="-"),
Token(value="1"),
]
),
static=True,
),
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
),
const=True,
),
name="CONST_B",
value=Value(
tokens=[
Token(value="sizeof"),
Token(value="("),
Token(value="int"),
Token(value=")"),
]
),
static=True,
),
],
)
]
)
)
def test_class_fwd_struct():
content = """
class PotatoClass {
struct FwdStruct;
FwdStruct *ptr;
struct FwdStruct {
int a;
};
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="PotatoClass")],
classkey="class",
)
),
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
),
fields=[
Field(
access="public",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="int")]
)
),
name="a",
)
],
)
],
fields=[
Field(
access="private",
type=Pointer(
ptr_to=Type(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")]
)
)
),
name="ptr",
)
],
forward_decls=[
ForwardDecl(
typename=PQName(
segments=[NameSpecifier(name="FwdStruct")],
classkey="struct",
),
access="private",
)
],
)
]
)
)
def test_class_multi_array():
content = """
struct Picture {
char name[25];
unsigned int pdata[128][256];
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Picture")], classkey="struct"
)
),
fields=[
Field(
access="public",
type=Array(
array_of=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="char")]
)
),
size=Value(tokens=[Token(value="25")]),
),
name="name",
),
Field(
access="public",
type=Array(
array_of=Array(
array_of=Type(
typename=PQName(
segments=[
FundamentalSpecifier(
name="unsigned int"
)
]
)
),
size=Value(tokens=[Token(value="256")]),
),
size=Value(tokens=[Token(value="128")]),
),
name="pdata",
),
],
)
]
)
)
def test_class_noexcept():
content = """
struct Grackle {
void no_noexcept();
void just_noexcept() noexcept;
void const_noexcept() const noexcept;
void noexcept_bool() noexcept(true);
void const_noexcept_bool() const noexcept(true);
void noexcept_noexceptOperator() noexcept(noexcept(Grackle()));
void const_noexcept_noexceptOperator() const noexcept(noexcept(Grackle()));
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Grackle")], classkey="struct"
)
),
methods=[
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="no_noexcept")]),
parameters=[],
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="just_noexcept")]),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept")]
),
parameters=[],
noexcept=Value(tokens=[]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(segments=[NameSpecifier(name="noexcept_bool")]),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[NameSpecifier(name="const_noexcept_bool")]
),
parameters=[],
noexcept=Value(tokens=[Token(value="true")]),
access="public",
const=True,
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(name="noexcept_noexceptOperator")
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
),
Method(
return_type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="void")]
)
),
name=PQName(
segments=[
NameSpecifier(
name="const_noexcept_noexceptOperator"
)
]
),
parameters=[],
noexcept=Value(
tokens=[
Token(value="noexcept"),
Token(value="("),
Token(value="Grackle"),
Token(value="("),
Token(value=")"),
Token(value=")"),
]
),
access="public",
const=True,
),
],
)
]
)
)
def test_class_volatile():
content = """
class Foo
{
public:
private:
volatile bool myToShutDown;
};
"""
data = parse_string(content, cleandoc=True)
assert data == ParsedData(
namespace=NamespaceScope(
classes=[
ClassScope(
class_decl=ClassDecl(
typename=PQName(
segments=[NameSpecifier(name="Foo")], classkey="class"
)
),
fields=[
Field(
access="private",
type=Type(
typename=PQName(
segments=[FundamentalSpecifier(name="bool")]
),
volatile=True,
),
name="myToShutDown",
)
],
)
]
)
)
| true | true |
f71f6e862b3a393d8f1a1757bbce7092bfb70ae4 | 33,635 | py | Python | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | demisto_sdk/commands/common/tests/pack_unique_files_test.py | guiguitodelperuu/demisto-sdk | 3eb0206593bc955a64c6594d717c04e52e254e1d | [
"MIT"
] | null | null | null | import json
import os
import click
import pytest
import requests_mock
from click.testing import CliRunner
from git import GitCommandError
from demisto_sdk.__main__ import main
from demisto_sdk.commands.common import tools
from demisto_sdk.commands.common.constants import (PACK_METADATA_DESC,
PACK_METADATA_SUPPORT,
PACK_METADATA_TAGS,
PACK_METADATA_USE_CASES,
PACKS_README_FILE_NAME,
XSOAR_SUPPORT)
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.hook_validations.pack_unique_files import \
PackUniqueFilesValidator
from demisto_sdk.commands.common.legacy_git_tools import git_path
from TestSuite.test_tools import ChangeCWD
VALIDATE_CMD = "validate"
PACK_METADATA_PARTNER = {
"name": "test",
"description": "test",
"support": "partner",
"currentVersion": "1.0.1",
"author": "bar",
"categories": [
"Data Enrichment & Threat Intelligence"
],
"tags": [],
"useCases": [],
"keywords": [],
"price": 2,
"email": "some@mail.com",
"url": "https://www.paloaltonetworks.com/cortex"
}
README_INPUT_RESULTS_LIST = [
('', False),
(' ', False),
('\t\t\n ', False),
('Text', True),
]
class TestPackUniqueFilesValidator:
FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files', 'Packs'))
FAKE_PACK_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files',
'fake_pack'))
FAKE_PATH_NAME = 'fake_pack'
validator = PackUniqueFilesValidator(FAKE_PATH_NAME)
validator.pack_path = FAKE_PACK_PATH
def restart_validator(self):
self.validator.pack_path = ''
self.validator = PackUniqueFilesValidator(self.FAKE_PATH_NAME)
self.validator.pack_path = self.FAKE_PACK_PATH
def test_is_error_added_name_only(self):
self.validator._add_error(('boop', '101'), 'file_name')
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_error_added_full_path(self):
self.validator._add_error(('boop', '101'), f'{self.validator.pack_path}/file/name')
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_file_exist(self):
assert self.validator._is_pack_file_exists(PACKS_README_FILE_NAME)
assert not self.validator._is_pack_file_exists('boop')
self.validator._errors = []
def test_parse_file_into_list(self):
assert ['boop', 'sade', ''] == self.validator._parse_file_into_list(PACKS_README_FILE_NAME)
assert not self.validator._parse_file_into_list('boop')
self.validator._errors = []
def test_validate_pack_unique_files(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_pack_metadata(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_partner_contribute_pack_metadata_no_mail_and_url(self, mocker, repo):
"""
Given
- Partner contributed pack without email and url.
When
- Running validate on it.
Then
- Ensure validate found errors.
"""
pack_metadata_no_email_and_url = PACK_METADATA_PARTNER.copy()
pack_metadata_no_email_and_url['email'] = ''
pack_metadata_no_email_and_url['url'] = ''
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_no_email_and_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_no_email_and_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'Contributed packs must include email or url' in result.stdout
@pytest.mark.parametrize('url, is_valid', [
('https://github.com/pont_to_repo', False),
('some_support_url', True),
('https://github.com/pont_to_repo/issues', True),
])
def test_validate_partner_pack_metadata_url(self, mocker, repo, url, is_valid):
"""
Given
- Partner contributed pack with an is_valid url.
When
- Running validate on it.
Then
- Ensure validate finds errors accordingly.
"""
pack_metadata_changed_url = PACK_METADATA_PARTNER.copy()
pack_metadata_changed_url['url'] = url
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_changed_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_changed_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
error_text = 'The metadata URL leads to a GitHub repo instead of a support page.'
if is_valid:
assert error_text not in result.stdout
else:
assert error_text in result.stdout
def test_validate_partner_contribute_pack_metadata_price_change(self, mocker, repo):
"""
Given
- Partner contributed pack where price has changed.
When
- Running validate on it.
Then
- Ensure validate found errors.
"""
pack_metadata_price_changed = PACK_METADATA_PARTNER.copy()
pack_metadata_price_changed['price'] = 3
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file',
return_value=PACK_METADATA_PARTNER)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_price_changed))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_price_changed)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'The pack price was changed from 2 to 3 - revert the change' in result.stdout
def test_check_timestamp_format(self):
"""
Given
- timestamps in various formats.
When
- Running check_timestamp_format on them.
Then
- Ensure True for iso format and False for any other format.
"""
fake_validator = PackUniqueFilesValidator('fake')
good_format_timestamp = '2020-04-14T00:00:00Z'
missing_z = '2020-04-14T00:00:00'
missing_t = '2020-04-14 00:00:00Z'
only_date = '2020-04-14'
with_hyphen = '2020-04-14T00-00-00Z'
assert fake_validator.check_timestamp_format(good_format_timestamp)
assert not fake_validator.check_timestamp_format(missing_t)
assert not fake_validator.check_timestamp_format(missing_z)
assert not fake_validator.check_timestamp_format(only_date)
assert not fake_validator.check_timestamp_format(with_hyphen)
def test_validate_pack_dependencies_invalid_id_set(self, mocker, repo):
"""
Given
- An invalid id set error being raised
When
- Running validate_pack_dependencies.
Then
- Ensure that the validation fails and that the invalid id set error is printed.
"""
self.restart_validator()
def error_raising_function(*args, **kwargs):
raise ValueError("Couldn't find any items for pack 'PackID'. make sure your spelling is correct.")
mocker.patch(
'demisto_sdk.commands.common.hook_validations.pack_unique_files.get_core_pack_list',
side_effect=error_raising_function
)
assert not self.validator.validate_pack_dependencies()
assert Errors.invalid_id_set()[0] in self.validator.get_errors()
def test_validate_core_pack_dependencies(self):
"""
Given
- A list of non-core packs
When
- Running validate_core_pack_dependencies.
Then
- Ensure that the validation fails and that the invalid core pack dependencies error is printed.
"""
self.restart_validator()
dependencies_packs = {'dependency_pack_1': {'mandatory': True, 'display_name': 'dependency pack 1'},
'dependency_pack_2': {'mandatory': False, 'display_name': 'dependency pack 2'},
'dependency_pack_3': {'mandatory': True, 'display_name': 'dependency pack 3'}}
assert not self.validator.validate_core_pack_dependencies(dependencies_packs)
assert Errors.invalid_core_pack_dependencies('fake_pack', ['dependency_pack_1', 'dependency_pack_3'])[0] \
in self.validator.get_errors()
def test_validate_pack_dependencies_skip_id_set_creation(self, capsys):
"""
Given
- skip_id_set_creation flag set to true.
- No id_set file exists
When
- Running validate_pack_dependencies.
Then
- Ensure that the validation passes and that the skipping message is printed.
"""
self.restart_validator()
self.validator.skip_id_set_creation = True
res = self.validator.validate_pack_dependencies()
self.validator.skip_id_set_creation = False # reverting to default for next tests
assert res
assert "No first level dependencies found" in capsys.readouterr().out
@pytest.mark.parametrize('usecases, is_valid, branch_usecases', [
([], True, []),
(['Phishing', 'Malware'], True, []),
(['NonApprovedUsecase', 'Case Management'], False, []),
(['NewUseCase'], True, ['NewUseCase']),
(['NewUseCase1, NewUseCase2'], False, ['NewUseCase1'])
])
def test_is_approved_usecases(self, repo, usecases, is_valid, branch_usecases, mocker):
"""
Given:
- Case A: Pack without usecases
- Case B: Pack with approved usecases (Phishing and Malware)
- Case C: Pack with non-approved usecase (NonApprovedUsecase) and approved usecase (Case Management)
- Case D: Pack with approved usecase (NewUseCase) located in my branch only
- Case E: Pack with non-approved usecase (NewUseCase2) and approved usecase (NewUseCase1)
located in my branch only
When:
- Validating approved usecases
Then:
- Case A: Ensure validation passes as there are no usecases to verify
- Case B: Ensure validation passes as both usecases are approved
- Case C: Ensure validation fails as it contains a non-approved usecase (NonApprovedUsecase)
Verify expected error is printed
- Case D: Ensure validation passes as usecase is approved on the same branch
- Case E: Ensure validation fails as it contains a non-approved usecase (NewUseCase2)
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: usecases,
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: []
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_usecases}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_usecases() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved usecases:' in self.validator.get_errors()
@pytest.mark.parametrize('tags, is_valid, branch_tags', [
([], True, []),
(['Machine Learning', 'Spam'], True, []),
(['NonApprovedTag', 'GDPR'], False, []),
(['NewTag'], True, ['NewTag']),
(['NewTag1, NewTag2'], False, ['NewTag1'])
])
def test_is_approved_tags(self, repo, tags, is_valid, branch_tags, mocker):
"""
Given:
- Case A: Pack without tags
- Case B: Pack with approved tags (Machine Learning and Spam)
- Case C: Pack with non-approved tags (NonApprovedTag) and approved tags (GDPR)
- Case D: Pack with approved tags (NewTag) located in my branch only
- Case E: Pack with non-approved tags (NewTag) and approved tags (NewTag)
located in my branch only
When:
- Validating approved tags
Then:
- Case A: Ensure validation passes as there are no tags to verify
- Case B: Ensure validation passes as both tags are approved
- Case C: Ensure validation fails as it contains a non-approved tags (NonApprovedTag)
Verify expected error is printed
- Case D: Ensure validation passes as tags is approved on the same branch
- Case E: Ensure validation fails as it contains a non-approved tag (NewTag2)
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_tags}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_tags() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved tags:' in self.validator.get_errors()
@pytest.mark.parametrize('pack_content, tags, is_valid', [
("none", [], True),
("none", ["Use Case"], False),
("playbook", ["Use Case"], True),
("incident", ["Use Case"], True),
("layout", ["Use Case"], True),
("playbook", [], True),
])
def test_is_right_usage_of_usecase_tag(self, repo, pack_content, tags, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags,
})
if pack_content == "playbook":
pack.create_playbook(name="PlaybookName")
elif pack_content == "incident":
pack.create_incident_type(name="IncidentTypeName")
elif pack_content == "layout":
pack.create_layout(name="Layout")
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.is_right_usage_of_usecase_tag() == is_valid
@pytest.mark.parametrize('type, is_valid', [
('community', True),
('partner', True),
('xsoar', True),
('someName', False),
('test', False),
('developer', True)
])
def test_is_valid_support_type(self, repo, type, is_valid):
"""
Given:
- Pack with support type in the metadata file.
When:
- Running _is_valid_support_type.
Then:
- Ensure True when the support types are valid, else False with the right error message.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: type
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_valid_support_type() == is_valid
if not is_valid:
assert 'Support field should be one of the following: xsoar, partner, developer or community.' in \
self.validator.get_errors()
def test_get_master_private_repo_meta_file_running_on_master(self, mocker, repo, capsys):
"""
Given:
- A repo which runs on master branch
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'master'
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Running on master branch - skipping price change validation" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_getting_git_error(self, repo, capsys, mocker):
"""
Given:
- A repo which runs on non-master branch.
- git.show command raises GitCommandError.
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
raise GitCommandError("A", "B")
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Got an error while trying to connect to git" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_file_not_found(self, mocker, repo, capsys):
"""
Given:
- A repo which runs on non-master branch.
- git.show command returns None.
When:
- Running get_master_private_repo_meta_file.
Then:
- Ensure result is None and the appropriate skipping message is printed.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
return None
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Unable to find previous pack_metadata.json file - skipping price change validation" in \
capsys.readouterr().out
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_partner(self, mocker, text, result):
"""
Given:
- partner pack
When:
- Running test_validate_pack_readme_file_is_not_empty_partner.
Then:
- Ensure result is False for empty README.md file and True otherwise.
"""
self.validator = PackUniqueFilesValidator(self.FAKE_PACK_PATH)
self.validator.support = 'partner'
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_use_case(self, mocker, text, result):
"""
Given:
- pack with use case
When:
- Running test_validate_pack_readme_file_is_not_empty_partner.
Then:
- Ensure result is False for empty README.md file and True otherwise.
"""
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'CortexXDR'))
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
def test_validate_pack_readme_file_is_not_empty_missing_file(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack'))
assert self.validator._is_pack_file_exists(self.validator.readme_file) is False
def test_validate_pack_readme_valid_images(self, mocker):
"""
Given
- A pack README file with valid absolute image paths in it.
When
- Run validate on pack README file
Then
- Ensure:
- Validation succeed
- Valid absolute image paths were not caught
"""
from demisto_sdk.commands.common.hook_validations.readme import \
ReadMeValidator
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
mocker.patch.object(ReadMeValidator, 'check_readme_relative_image_paths', return_value=[]) # Test only absolute paths
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=200, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert result
assert 'please repair it:\n' not in errors
assert 'please repair it:\n' not in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' not in errors
def test_validate_pack_readme_invalid_images(self):
"""
Given
- A pack README file with invalid absolute and relative image paths in it.
When
- Run validate on pack README file
Then
- Ensure:
- Validation fails
- Invalid relative image paths were caught correctly
- Invalid absolute image paths were caught correctly
"""
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=404, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert not result
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: (../../doc_files/Access_investigation_-_Generic_4_5.png)' in errors
assert 'Image link was not found, either insert it or remove it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' in errors
@pytest.mark.parametrize('readme_content, is_valid', [
('Hey there, just testing', True),
('This is a test. All good!', False),
])
def test_pack_readme_is_different_then_pack_description(self, repo, readme_content, is_valid):
"""
Given:
- Case A: A unique pack readme.
- Case B: Pack readme that is equal to pack description
When:
- Validating pack readme vs pack description
Then:
- Case A: Ensure validation passes as the pack readme and pack description are different.
- Case B: Ensure validation fails as the pack readme is the same as the pack description.
Verify expected error is printed
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.readme.write_text(readme_content)
pack.pack_metadata.write_json({
PACK_METADATA_DESC: 'This is a test. All good!',
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.validate_pack_readme_and_pack_description() == is_valid
if not is_valid:
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' in self.validator.get_errors()
def test_validate_pack_readme_and_pack_description_no_readme_file(self, repo):
"""
Given:
- A pack with no readme.
When:
- Validating pack readme vs pack description
Then:
- Fail on no README file and not on descrption error.
"""
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
os.remove(pack.readme.path)
assert self.validator.validate_pack_readme_and_pack_description()
assert '"README.md" file does not exist, create one in the root of the pack' in self.validator.get_errors()
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' not in self.validator.get_errors()
def test_valid_is_pack_metadata_desc_too_long(self, repo):
"""
Given:
- Valid description length
When:
- Validating pack description length
Then:
- Ensure validation passes as the description field length is valid.
"""
pack_description = 'Hey there, just testing'
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
def test_invalid_is_pack_metadata_desc_too_long(self, mocker, repo):
"""
Given:
- Invalid description length - higher than 130
When:
- Validating pack description length
Then:
- Ensure validation passes although description field length is higher than 130
- Ensure warning will be printed.
"""
pack_description = 'This is will fail cause the description here is too long.' \
'test test test test test test test test test test test test test test test test test' \
' test test test test test'
error_desc = 'The description field of the pack_metadata.json file is longer than 130 characters.'
mocker.patch("click.secho")
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
assert error_desc in click.secho.call_args_list[0][0][0]
def test_validate_author_image_exists_valid(self, repo):
"""
Given:
- Pack with partner support and author image
When:
- Validating if author image exists
Then:
- Ensure validation passes.
"""
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
res = self.validator.validate_author_image_exists()
assert res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' not in \
self.validator.get_errors()
def test_validate_author_image_exists_invalid(self, repo):
"""
Given:
- Pack with partner support and no author image
When:
- Validating if author image exists
Then:
- Ensure validation fails.
"""
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
os.remove(author_image_path)
res = self.validator.validate_author_image_exists()
assert not res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' in \
self.validator.get_errors()
| 43.456072 | 150 | 0.648759 | import json
import os
import click
import pytest
import requests_mock
from click.testing import CliRunner
from git import GitCommandError
from demisto_sdk.__main__ import main
from demisto_sdk.commands.common import tools
from demisto_sdk.commands.common.constants import (PACK_METADATA_DESC,
PACK_METADATA_SUPPORT,
PACK_METADATA_TAGS,
PACK_METADATA_USE_CASES,
PACKS_README_FILE_NAME,
XSOAR_SUPPORT)
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.hook_validations.pack_unique_files import \
PackUniqueFilesValidator
from demisto_sdk.commands.common.legacy_git_tools import git_path
from TestSuite.test_tools import ChangeCWD
VALIDATE_CMD = "validate"
PACK_METADATA_PARTNER = {
"name": "test",
"description": "test",
"support": "partner",
"currentVersion": "1.0.1",
"author": "bar",
"categories": [
"Data Enrichment & Threat Intelligence"
],
"tags": [],
"useCases": [],
"keywords": [],
"price": 2,
"email": "some@mail.com",
"url": "https://www.paloaltonetworks.com/cortex"
}
README_INPUT_RESULTS_LIST = [
('', False),
(' ', False),
('\t\t\n ', False),
('Text', True),
]
class TestPackUniqueFilesValidator:
FILES_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files', 'Packs'))
FAKE_PACK_PATH = os.path.normpath(os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files',
'fake_pack'))
FAKE_PATH_NAME = 'fake_pack'
validator = PackUniqueFilesValidator(FAKE_PATH_NAME)
validator.pack_path = FAKE_PACK_PATH
def restart_validator(self):
self.validator.pack_path = ''
self.validator = PackUniqueFilesValidator(self.FAKE_PATH_NAME)
self.validator.pack_path = self.FAKE_PACK_PATH
def test_is_error_added_name_only(self):
self.validator._add_error(('boop', '101'), 'file_name')
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file_name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_error_added_full_path(self):
self.validator._add_error(('boop', '101'), f'{self.validator.pack_path}/file/name')
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors(True)
assert f'{self.validator.pack_path}/file/name: [101] - boop\n' in self.validator.get_errors()
self.validator._errors = []
def test_is_file_exist(self):
assert self.validator._is_pack_file_exists(PACKS_README_FILE_NAME)
assert not self.validator._is_pack_file_exists('boop')
self.validator._errors = []
def test_parse_file_into_list(self):
assert ['boop', 'sade', ''] == self.validator._parse_file_into_list(PACKS_README_FILE_NAME)
assert not self.validator._parse_file_into_list('boop')
self.validator._errors = []
def test_validate_pack_unique_files(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_pack_metadata(self, mocker):
mocker.patch.object(BaseValidator, 'check_file_flags', return_value='')
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_and_pack_description', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'validate_pack_readme_images', return_value=True)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
assert not self.validator.are_valid_files(id_set_validations=False)
fake_validator = PackUniqueFilesValidator('fake')
mocker.patch.object(fake_validator, '_read_metadata_content', return_value=dict())
assert fake_validator.are_valid_files(id_set_validations=False)
def test_validate_partner_contribute_pack_metadata_no_mail_and_url(self, mocker, repo):
pack_metadata_no_email_and_url = PACK_METADATA_PARTNER.copy()
pack_metadata_no_email_and_url['email'] = ''
pack_metadata_no_email_and_url['url'] = ''
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_no_email_and_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_no_email_and_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'Contributed packs must include email or url' in result.stdout
@pytest.mark.parametrize('url, is_valid', [
('https://github.com/pont_to_repo', False),
('some_support_url', True),
('https://github.com/pont_to_repo/issues', True),
])
def test_validate_partner_pack_metadata_url(self, mocker, repo, url, is_valid):
pack_metadata_changed_url = PACK_METADATA_PARTNER.copy()
pack_metadata_changed_url['url'] = url
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file', return_value=None)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_changed_url))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_changed_url)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
error_text = 'The metadata URL leads to a GitHub repo instead of a support page.'
if is_valid:
assert error_text not in result.stdout
else:
assert error_text in result.stdout
def test_validate_partner_contribute_pack_metadata_price_change(self, mocker, repo):
pack_metadata_price_changed = PACK_METADATA_PARTNER.copy()
pack_metadata_price_changed['price'] = 3
mocker.patch.object(tools, 'is_external_repository', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, '_is_pack_file_exists', return_value=True)
mocker.patch.object(PackUniqueFilesValidator, 'get_master_private_repo_meta_file',
return_value=PACK_METADATA_PARTNER)
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content',
return_value=json.dumps(pack_metadata_price_changed))
mocker.patch.object(BaseValidator, 'check_file_flags', return_value=None)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': []}, 'json'))
pack = repo.create_pack('PackName')
pack.pack_metadata.write_json(pack_metadata_price_changed)
with ChangeCWD(repo.path):
runner = CliRunner(mix_stderr=False)
result = runner.invoke(main, [VALIDATE_CMD, '-i', pack.path], catch_exceptions=False)
assert 'The pack price was changed from 2 to 3 - revert the change' in result.stdout
def test_check_timestamp_format(self):
fake_validator = PackUniqueFilesValidator('fake')
good_format_timestamp = '2020-04-14T00:00:00Z'
missing_z = '2020-04-14T00:00:00'
missing_t = '2020-04-14 00:00:00Z'
only_date = '2020-04-14'
with_hyphen = '2020-04-14T00-00-00Z'
assert fake_validator.check_timestamp_format(good_format_timestamp)
assert not fake_validator.check_timestamp_format(missing_t)
assert not fake_validator.check_timestamp_format(missing_z)
assert not fake_validator.check_timestamp_format(only_date)
assert not fake_validator.check_timestamp_format(with_hyphen)
def test_validate_pack_dependencies_invalid_id_set(self, mocker, repo):
self.restart_validator()
def error_raising_function(*args, **kwargs):
raise ValueError("Couldn't find any items for pack 'PackID'. make sure your spelling is correct.")
mocker.patch(
'demisto_sdk.commands.common.hook_validations.pack_unique_files.get_core_pack_list',
side_effect=error_raising_function
)
assert not self.validator.validate_pack_dependencies()
assert Errors.invalid_id_set()[0] in self.validator.get_errors()
def test_validate_core_pack_dependencies(self):
self.restart_validator()
dependencies_packs = {'dependency_pack_1': {'mandatory': True, 'display_name': 'dependency pack 1'},
'dependency_pack_2': {'mandatory': False, 'display_name': 'dependency pack 2'},
'dependency_pack_3': {'mandatory': True, 'display_name': 'dependency pack 3'}}
assert not self.validator.validate_core_pack_dependencies(dependencies_packs)
assert Errors.invalid_core_pack_dependencies('fake_pack', ['dependency_pack_1', 'dependency_pack_3'])[0] \
in self.validator.get_errors()
def test_validate_pack_dependencies_skip_id_set_creation(self, capsys):
self.restart_validator()
self.validator.skip_id_set_creation = True
res = self.validator.validate_pack_dependencies()
self.validator.skip_id_set_creation = False # reverting to default for next tests
assert res
assert "No first level dependencies found" in capsys.readouterr().out
@pytest.mark.parametrize('usecases, is_valid, branch_usecases', [
([], True, []),
(['Phishing', 'Malware'], True, []),
(['NonApprovedUsecase', 'Case Management'], False, []),
(['NewUseCase'], True, ['NewUseCase']),
(['NewUseCase1, NewUseCase2'], False, ['NewUseCase1'])
])
def test_is_approved_usecases(self, repo, usecases, is_valid, branch_usecases, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: usecases,
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: []
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_usecases}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_usecases() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved usecases:' in self.validator.get_errors()
@pytest.mark.parametrize('tags, is_valid, branch_tags', [
([], True, []),
(['Machine Learning', 'Spam'], True, []),
(['NonApprovedTag', 'GDPR'], False, []),
(['NewTag'], True, ['NewTag']),
(['NewTag1, NewTag2'], False, ['NewTag1'])
])
def test_is_approved_tags(self, repo, tags, is_valid, branch_tags, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags
})
mocker.patch.object(tools, 'is_external_repository', return_value=False)
mocker.patch.object(tools, 'get_dict_from_file', return_value=({'approved_list': branch_tags}, 'json'))
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_approved_tags() == is_valid
if not is_valid:
assert 'The pack metadata contains non approved tags:' in self.validator.get_errors()
@pytest.mark.parametrize('pack_content, tags, is_valid', [
("none", [], True),
("none", ["Use Case"], False),
("playbook", ["Use Case"], True),
("incident", ["Use Case"], True),
("layout", ["Use Case"], True),
("playbook", [], True),
])
def test_is_right_usage_of_usecase_tag(self, repo, pack_content, tags, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: XSOAR_SUPPORT,
PACK_METADATA_TAGS: tags,
})
if pack_content == "playbook":
pack.create_playbook(name="PlaybookName")
elif pack_content == "incident":
pack.create_incident_type(name="IncidentTypeName")
elif pack_content == "layout":
pack.create_layout(name="Layout")
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.is_right_usage_of_usecase_tag() == is_valid
@pytest.mark.parametrize('type, is_valid', [
('community', True),
('partner', True),
('xsoar', True),
('someName', False),
('test', False),
('developer', True)
])
def test_is_valid_support_type(self, repo, type, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json({
PACK_METADATA_USE_CASES: [],
PACK_METADATA_SUPPORT: type
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator._is_valid_support_type() == is_valid
if not is_valid:
assert 'Support field should be one of the following: xsoar, partner, developer or community.' in \
self.validator.get_errors()
def test_get_master_private_repo_meta_file_running_on_master(self, mocker, repo, capsys):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'master'
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Running on master branch - skipping price change validation" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_getting_git_error(self, repo, capsys, mocker):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
raise GitCommandError("A", "B")
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Got an error while trying to connect to git" in capsys.readouterr().out
def test_get_master_private_repo_meta_file_file_not_found(self, mocker, repo, capsys):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.pack_metadata.write_json(PACK_METADATA_PARTNER)
class MyRepo:
active_branch = 'not-master'
class gitClass:
def show(self, var):
return None
git = gitClass()
mocker.patch('demisto_sdk.commands.common.hook_validations.pack_unique_files.Repo', return_value=MyRepo)
res = self.validator.get_master_private_repo_meta_file(str(pack.pack_metadata.path))
assert not res
assert "Unable to find previous pack_metadata.json file - skipping price change validation" in \
capsys.readouterr().out
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_partner(self, mocker, text, result):
self.validator = PackUniqueFilesValidator(self.FAKE_PACK_PATH)
self.validator.support = 'partner'
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
@pytest.mark.parametrize('text, result', README_INPUT_RESULTS_LIST)
def test_validate_pack_readme_file_is_not_empty_use_case(self, mocker, text, result):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'CortexXDR'))
mocker.patch.object(PackUniqueFilesValidator, '_read_file_content', return_value=text)
assert self.validator.validate_pack_readme_file_is_not_empty() == result
def test_validate_pack_readme_file_is_not_empty_missing_file(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack'))
assert self.validator._is_pack_file_exists(self.validator.readme_file) is False
def test_validate_pack_readme_valid_images(self, mocker):
from demisto_sdk.commands.common.hook_validations.readme import \
ReadMeValidator
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
mocker.patch.object(ReadMeValidator, 'check_readme_relative_image_paths', return_value=[]) # Test only absolute paths
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=200, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=200, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert result
assert 'please repair it:\n' not in errors
assert 'please repair it:\n' not in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' not in errors
def test_validate_pack_readme_invalid_images(self):
self.validator = PackUniqueFilesValidator(os.path.join(self.FILES_PATH, 'DummyPack2'))
with requests_mock.Mocker() as m:
# Mock get requests
m.get('https://github.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.png',
status_code=404, text="Test1")
m.get('https://raw.githubusercontent.com/demisto/content/raw/test1.jpg',
status_code=404, text="Test1")
result = self.validator.validate_pack_readme_images()
errors = self.validator.get_errors()
assert not result
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: ' in errors
assert 'Detected the following image relative path: (../../doc_files/Access_investigation_-_Generic_4_5.png)' in errors
assert 'Image link was not found, either insert it or remove it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n' in errors
assert 'please repair it:\n(https://raw.githubusercontent.com/demisto/content/raw/test1.jpg)' in errors
@pytest.mark.parametrize('readme_content, is_valid', [
('Hey there, just testing', True),
('This is a test. All good!', False),
])
def test_pack_readme_is_different_then_pack_description(self, repo, readme_content, is_valid):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
pack.readme.write_text(readme_content)
pack.pack_metadata.write_json({
PACK_METADATA_DESC: 'This is a test. All good!',
})
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
assert self.validator.validate_pack_readme_and_pack_description() == is_valid
if not is_valid:
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' in self.validator.get_errors()
def test_validate_pack_readme_and_pack_description_no_readme_file(self, repo):
self.restart_validator()
pack_name = 'PackName'
pack = repo.create_pack(pack_name)
self.validator.pack_path = pack.path
with ChangeCWD(repo.path):
os.remove(pack.readme.path)
assert self.validator.validate_pack_readme_and_pack_description()
assert '"README.md" file does not exist, create one in the root of the pack' in self.validator.get_errors()
assert 'README.md content is equal to pack description. ' \
'Please remove the duplicate description from README.md file' not in self.validator.get_errors()
def test_valid_is_pack_metadata_desc_too_long(self, repo):
pack_description = 'Hey there, just testing'
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
def test_invalid_is_pack_metadata_desc_too_long(self, mocker, repo):
pack_description = 'This is will fail cause the description here is too long.' \
'test test test test test test test test test test test test test test test test test' \
' test test test test test'
error_desc = 'The description field of the pack_metadata.json file is longer than 130 characters.'
mocker.patch("click.secho")
assert self.validator.is_pack_metadata_desc_too_long(pack_description) is True
assert error_desc in click.secho.call_args_list[0][0][0]
def test_validate_author_image_exists_valid(self, repo):
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
res = self.validator.validate_author_image_exists()
assert res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' not in \
self.validator.get_errors()
def test_validate_author_image_exists_invalid(self, repo):
pack = repo.create_pack('MyPack')
self.validator.metadata_content = {'support': 'partner'}
self.validator.pack_path = pack.path
author_image_path = pack.author_image.path
with ChangeCWD(repo.path):
os.remove(author_image_path)
res = self.validator.validate_author_image_exists()
assert not res
assert f'Partners must provide a non-empty author image under the path {author_image_path}.' in \
self.validator.get_errors()
| true | true |
f71f6ee079b895d1562283af73f4c7cb38b99b68 | 371 | py | Python | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | src/example02/main.py | luisibanez/cssi-appengine-introduction-01 | 617c27147f8ba91bdecc7b774ccd2d3204607514 | [
"Apache-2.0"
] | null | null | null | import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
class CountHandler(webapp2.RequestHandler):
def get(self):
for i in range(1, 21):
self.response.write('Hello %d <br>' % i)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/count', CountHandler)
], debug=True)
| 23.1875 | 52 | 0.638814 | import webapp2
class MainHandler(webapp2.RequestHandler):
def get(self):
self.response.write('Hello world!')
class CountHandler(webapp2.RequestHandler):
def get(self):
for i in range(1, 21):
self.response.write('Hello %d <br>' % i)
app = webapp2.WSGIApplication([
('/', MainHandler),
('/count', CountHandler)
], debug=True)
| true | true |
f71f6f31a5c782d44a541c5b9d96b9cf0320881f | 2,317 | py | Python | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 6 | 2020-02-03T10:29:59.000Z | 2022-03-07T13:24:26.000Z | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 2 | 2020-11-23T03:04:00.000Z | 2021-09-25T00:39:00.000Z | tests/test_json.py | pestun/strace-parser | 8bcddb1670c891785c1fa798b948e9637462c474 | [
"MIT"
] | 2 | 2020-04-23T03:25:04.000Z | 2021-10-21T23:07:21.000Z | from importlib.resources import read_text
import pytest
from lark import Token, Tree
from strace_parser.json_transformer import to_json
from strace_parser.parser import get_parser
from . import data
def assert_fully_serialized(obj):
def _assert_fully_serialized(obj):
assert not isinstance(obj, Tree), original
assert not isinstance(obj, Token), original
if isinstance(obj, dict):
for k, v in obj.items():
_assert_fully_serialized(k)
_assert_fully_serialized(v)
elif isinstance(obj, list):
for v in obj:
_assert_fully_serialized(v)
else:
assert isinstance(
obj, (str, float, bool)
), f"Unexpected type {obj} in {original}"
original = obj
_assert_fully_serialized(obj)
@pytest.mark.parametrize("line", read_text(data, "samples.txt").splitlines())
def test_json_fully_transforms(line):
tree = get_parser().parse(line + "\n")
result = to_json(tree)
assert_fully_serialized(result)
def test_json_transformer():
text = (
"1577836800.000000 connect("
r'0<\x01\x23\x45>, {sa_family=AF_UNIX, sun_path="\x01\x23\x45"}, 123)'
" = -123 ENOENT (No such file or directory) <0.000001>\n"
)
parser = get_parser()
tree = parser.parse(text)
result = to_json(tree)
assert len(result) == 1
line = result[0]
assert {
"timestamp": 1577836800.000000,
"type": "syscall",
"args": [
{"type": "other", "value": r"0<\x01\x23\x45>"},
{
"type": "braced",
"value": [
{
"type": "key_value",
"key": "sa_family",
"value": {"type": "other", "value": "AF_UNIX"},
},
{
"type": "key_value",
"key": "sun_path",
"value": {"type": "other", "value": r'"\x01\x23\x45"'},
},
],
},
{"type": "other", "value": "123"},
],
"name": "connect",
"result": "-123 ENOENT (No such file or directory) <0.000001>",
} == line, f"Did not match {tree.pretty()}"
| 30.893333 | 79 | 0.518774 | from importlib.resources import read_text
import pytest
from lark import Token, Tree
from strace_parser.json_transformer import to_json
from strace_parser.parser import get_parser
from . import data
def assert_fully_serialized(obj):
def _assert_fully_serialized(obj):
assert not isinstance(obj, Tree), original
assert not isinstance(obj, Token), original
if isinstance(obj, dict):
for k, v in obj.items():
_assert_fully_serialized(k)
_assert_fully_serialized(v)
elif isinstance(obj, list):
for v in obj:
_assert_fully_serialized(v)
else:
assert isinstance(
obj, (str, float, bool)
), f"Unexpected type {obj} in {original}"
original = obj
_assert_fully_serialized(obj)
@pytest.mark.parametrize("line", read_text(data, "samples.txt").splitlines())
def test_json_fully_transforms(line):
tree = get_parser().parse(line + "\n")
result = to_json(tree)
assert_fully_serialized(result)
def test_json_transformer():
text = (
"1577836800.000000 connect("
r'0<\x01\x23\x45>, {sa_family=AF_UNIX, sun_path="\x01\x23\x45"}, 123)'
" = -123 ENOENT (No such file or directory) <0.000001>\n"
)
parser = get_parser()
tree = parser.parse(text)
result = to_json(tree)
assert len(result) == 1
line = result[0]
assert {
"timestamp": 1577836800.000000,
"type": "syscall",
"args": [
{"type": "other", "value": r"0<\x01\x23\x45>"},
{
"type": "braced",
"value": [
{
"type": "key_value",
"key": "sa_family",
"value": {"type": "other", "value": "AF_UNIX"},
},
{
"type": "key_value",
"key": "sun_path",
"value": {"type": "other", "value": r'"\x01\x23\x45"'},
},
],
},
{"type": "other", "value": "123"},
],
"name": "connect",
"result": "-123 ENOENT (No such file or directory) <0.000001>",
} == line, f"Did not match {tree.pretty()}"
| true | true |
f71f6f77797715e2642a7242a6f13d06b57a1ac6 | 6,833 | py | Python | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | loops/__init__.py | fenhl/python-loops | ea36e3b1ad68c2257071724a1f760b0e352bb29c | [
"MIT"
] | null | null | null | import datetime
import threading
import time
try:
from loops.version import __version__
except ImportError:
__version__ = None
class IterThread(threading.Thread):
"""Helper class used in loops."""
def __init__(self, iterator):
super().__init__()
self.daemon = True
self.iterator = iterator
self.stopped = False
def run(self):
try:
self.value = next(self.iterator)
except StopIteration:
self.stopped = True
class Loop(threading.Thread):
"""Generic loop thread that periodically checks if it should stop while waiting for the iterable to yield.
Keyword-only arguments:
iterable -- The iterable to be looped over. By default, self.get_iterable is called.
on_exception -- What to do when an exception occurs in process_value. If given, must be an iterable of actions, which will be done in order. Possible actions are 'log_stdout' (write traceback to sys.stdout), 'log_stderr' (write traceback to sys.stderr), or 'raise' (the default; lets the exception through to threading's default handling). Set to an empty iterable to ignore exceptions and continue the loop.
process_value -- A function which will be called with each yielded value as argument. Defaults to self.process_value.
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value or the stop signal. Defaults to half a second.
"""
def __init__(self, *, iterable=None, on_exception=('raise',), process_value=None, sleep_length=datetime.timedelta(seconds=0.5)):
super().__init__()
if iterable is None:
self.iterable = self.iterable()
else:
self.iterable = iterable
self.on_exception = tuple(on_exception)
if process_value is not None:
self.process_value = process_value
self.stopped = False
self.sleep_length = sleep_length
@staticmethod
def iterable():
"""The iterable to be looped over. Must be overridden in a subclass, or by passing the `iterable' keyword argument to the constructor."""
raise NotImplementedError('iterable must be overwritten in subclasses, or set explicitly')
def run(self):
iterator = iter(self.iterable)
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while not self.stopped:
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
try:
self.process_value(iter_thread.value)
except:
for exception_action in self.on_exception:
if exception_action == 'log_stdout':
traceback.print_exc(file=sys.stdout)
elif exception_action == 'log_stderr':
traceback.print_exc(file=sys.stderr)
elif exception_action == 'raise':
raise
else:
raise ValueError('Unrecognized exception action: {!r}'.format(exception_action))
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
continue
time.sleep(self.sleep_length.total_seconds())
@staticmethod
def process_value(value):
"""Will be called with each yielded value as argument. Must be overridden in a subclass, or by passing the `process_value' keyword argument to the constructor."""
raise NotImplementedError('process_value must be overwritten in subclasses, or set explicitly')
def start(self):
self.stopped = False
super().start()
def stop(self):
self.stopped = True
def timeout_single(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
"""This function creates an iterator that yields from the given iterable, but aborts when the iterable takes too long to yield a value.
Required arguments:
iterable -- The iterable to yield from.
timeout -- A datetime.timedelta representing the maximum time the iterable may take to produce a single value. If any iteration step takes longer than this, the iteration is aborted.
Optional arguments:
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value. Will be truncated to the remainder of the timeout. Defaults to half a second.
Yields:
The values from `iterable', until it is exhausted or `timeout' is reached.
"""
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
yield iter_thread.value
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
def timeout_total(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
"""This function creates an iterator that yields from the given iterable, but aborts after a timeout.
Required arguments:
iterable -- The iterable to yield from.
timeout -- A datetime.timedelta representing how long after iteration is started it should be aborted.
Optional arguments:
sleep_length -- A datetime.timedelta representing how long to sleep between each check for the next value. Will be truncated to the remainder of the timeout. Defaults to half a second.
Yields:
The values from `iterable', until it is exhausted or `timeout' is reached.
"""
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start() # get the first value
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped: # iterator exhausted
return
else: # iterator has yielded a value
yield iter_thread.value
iter_thread = IterThread(iterator)
iter_thread.start() # get the next value
| 46.80137 | 412 | 0.658861 | import datetime
import threading
import time
try:
from loops.version import __version__
except ImportError:
__version__ = None
class IterThread(threading.Thread):
def __init__(self, iterator):
super().__init__()
self.daemon = True
self.iterator = iterator
self.stopped = False
def run(self):
try:
self.value = next(self.iterator)
except StopIteration:
self.stopped = True
class Loop(threading.Thread):
def __init__(self, *, iterable=None, on_exception=('raise',), process_value=None, sleep_length=datetime.timedelta(seconds=0.5)):
super().__init__()
if iterable is None:
self.iterable = self.iterable()
else:
self.iterable = iterable
self.on_exception = tuple(on_exception)
if process_value is not None:
self.process_value = process_value
self.stopped = False
self.sleep_length = sleep_length
@staticmethod
def iterable():
raise NotImplementedError('iterable must be overwritten in subclasses, or set explicitly')
def run(self):
iterator = iter(self.iterable)
iter_thread = IterThread(iterator)
iter_thread.start()
while not self.stopped:
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
try:
self.process_value(iter_thread.value)
except:
for exception_action in self.on_exception:
if exception_action == 'log_stdout':
traceback.print_exc(file=sys.stdout)
elif exception_action == 'log_stderr':
traceback.print_exc(file=sys.stderr)
elif exception_action == 'raise':
raise
else:
raise ValueError('Unrecognized exception action: {!r}'.format(exception_action))
iter_thread = IterThread(iterator)
iter_thread.start()
continue
time.sleep(self.sleep_length.total_seconds())
@staticmethod
def process_value(value):
raise NotImplementedError('process_value must be overwritten in subclasses, or set explicitly')
def start(self):
self.stopped = False
super().start()
def stop(self):
self.stopped = True
def timeout_single(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
yield iter_thread.value
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
def timeout_total(iterable, timeout, sleep_length=datetime.timedelta(seconds=0.5)):
iterator = iter(iterable)
current_timeout = timeout
iter_thread = IterThread(iterator)
iter_thread.start()
while current_timeout > datetime.timedelta():
current_sleep_length = min(sleep_length, current_timeout)
time.sleep(current_sleep_length.total_seconds())
current_timeout -= current_sleep_length
if not iter_thread.is_alive():
if iter_thread.stopped:
return
else:
yield iter_thread.value
iter_thread = IterThread(iterator)
iter_thread.start()
| true | true |
f71f6ffc94e95da06954304971002720ccddd90b | 537 | py | Python | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 3 | 2018-06-03T11:55:28.000Z | 2020-01-03T02:33:22.000Z | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 14 | 2018-05-07T13:33:21.000Z | 2021-04-30T20:46:54.000Z | plugins/yt.py | ctburley/akesho-irc3 | 7d27a45f401ffcfa3a380c7de01687cbe69b874d | [
"MIT"
] | 1 | 2018-06-04T04:45:58.000Z | 2018-06-04T04:45:58.000Z | import irc3
from irc3.plugins.command import command
@irc3.plugin
class Plugin:
def __init__(self, bot):
self.bot = bot
print("yt loaded")
@irc3.event('^(@(?P<tags>\S+) )?:(?P<nick>\S+)(?P<mask>!\S+@\S+) PRIVMSG (?P<channel>\S+) :\.yt\s+(?P<target>.*?)$')
def yt(self, nick=None, mask=None, channel=None, target=None, **kw):
if self.bot.obeying_commands(channel):
target = target.strip()
self.bot.privmsg(channel, "Hey " + nick + " .yt isn't working right now, try '.gse youtube "+target+"' instead! <3")
| 35.8 | 122 | 0.621974 | import irc3
from irc3.plugins.command import command
@irc3.plugin
class Plugin:
def __init__(self, bot):
self.bot = bot
print("yt loaded")
@irc3.event('^(@(?P<tags>\S+) )?:(?P<nick>\S+)(?P<mask>!\S+@\S+) PRIVMSG (?P<channel>\S+) :\.yt\s+(?P<target>.*?)$')
def yt(self, nick=None, mask=None, channel=None, target=None, **kw):
if self.bot.obeying_commands(channel):
target = target.strip()
self.bot.privmsg(channel, "Hey " + nick + " .yt isn't working right now, try '.gse youtube "+target+"' instead! <3")
| true | true |
f71f70018ea2bb974a7995e741772da0a860e199 | 11,666 | py | Python | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | mesh_voxel_color/color_pil_cupy.py | naysok/Mesh_Voxel_Color | 9ca3549822ada1be67efcb3e47cf4c193d54cbaa | [
"MIT"
] | null | null | null | import sys
sys.path.append("C:\\Users\\ysoky\\Documents\\Mesh_Voxel_Color\\_module_\\Mesh_Contour")
import math
import cupy as cp
import random
from PIL import Image, ImageDraw, ImageOps, ImageEnhance
from mesh_contour import stl_parser
sp = stl_parser.StlParser()
from .import util
ut = util.Util()
class ColorPILCupy():
###############################
#### ###
#### I/O + Utilities ###
#### ###
###############################
def remap_number_cp(self, arr, old_min, old_max, target_min, target_max):
new_arr = (arr - old_min) / (old_max - old_min) * (target_max - target_min) + target_min
return new_arr
def get_points_from_stl(self, file_path):
### Point From STL
pts = sp.stl2points(file_path)
return pts
def get_points_from_stl_np(self, file_path, volume_size, canvas_size):
### Cupy
### Point From STL
pts = sp.stl2points(file_path)
pts_format = [pts]
# print(pts_format)
pts_cp = cp.array(pts_format)
pts_cp_remap = self.remap_number_cp(pts_cp, 0, volume_size, 0, canvas_size)
# print(pts_cp)
return pts_cp_remap
def get_points_from_txt_np(self, file_path, volume_size, canvas_size):
### Cupy
with open(file_path) as f:
lines = f.readlines()
xyz_list = []
for line in lines:
elm = line.split(",")
xyz =[float(elm[0]), float(elm[1]), float(elm[2])]
xyz_list.append(xyz)
xyz_list = [xyz_list]
pts_cp = cp.array(xyz_list)
pts_cp_remap = self.remap_number_cp(pts_cp, 0, volume_size, 0, canvas_size)
# print("pts_cp_remap.shape :", pts_np_remap.shape)
# print(pts_cp_remap)
return pts_cp_remap
################################################################################
######################################
#### ###
#### Image Processing (PIL) ###
#### ###
######################################
def open_image(self, path):
img = Image.open(path)
return img
def export_image(self, img, path):
img.save(path, quality=100)
print("Export : {}".format(path))
def create_canvas(self, canvas_size):
new = Image.new("RGB", (canvas_size, canvas_size), (255, 255, 255))
return new
def create_canvas_alpha(self, canvas_size):
new = Image.new("RGBA", (canvas_size, canvas_size), (0, 0, 0, 0))
return new
################################################################################
###########################
#### ###
#### Math (Cupy) ###
#### ###
###########################
def clac_all_distance(self, pos, pts):
### Calc Distance with Cupy
### Generate Vector
v = pos - pts
# print("v.shape :", v.shape)
# print(v)
vt = v.T
### Calc Distance
d = cp.sqrt((vt[0] * vt[0]) + (vt[1] * vt[1]) + (vt[2] * vt[2]))
# print("d.shape :", d.shape)
### Select Min Value
dm_cp = cp.amin(d, axis=0)
# print("dm.shape :", dm_cp.shape)
return dm_cp
def gen_disctance_list(self, w, h, height, pts_cp):
### Generate Distance-List
# print("Distance")
px_list = []
for i in range(w):
for j in range(h):
px_list.append([[j, i, height]])
### pos-numpy array (from Image)
pos_cp = cp.array(px_list)
# print("pos.shape :", pos_cp.shape)
### Separate Process
### https://qiita.com/kazuki_hayakawa/items/557edd922f9f1fafafe0
SPLIT = 250
pos_cp_split = cp.array_split(pos_cp, SPLIT)
# print(len(pos_cp_split))
dist_tmp = []
for i in range(SPLIT):
tmp_p = pos_cp_split[i]
# print("pts.shape :", tmp_p.shape)
### pts-numpy array (from STL)
# print("pts.shape :", pts_cp.shape)
###
d = self.clac_all_distance(tmp_p, pts_cp)
dist_tmp.append(d)
dist_list = cp.concatenate(dist_tmp, 0)
# print(len(dist_list))
return dist_list
def gen_disctance_list_ds(self, w, h, height, downsampling_xy, pts_cp):
### Generate Distance-List
### with DownSampling
# print("Distance")
px_list = []
for i in range(w):
for j in range(h):
px = [j * downsampling_xy, i * downsampling_xy, height]
px_list.append([px])
### pos-numpy array (from Image)
pos_cp = cp.array(px_list)
# print(pos_cp)
# print("pos.shape :", pos_cp.shape)
### Separate Process
### https://qiita.com/kazuki_hayakawa/items/557edd922f9f1fafafe0
SPLIT = 250
pos_cp_split = cp.array_split(pos_cp, SPLIT)
# print(len(pos_cp_split))
dist_tmp = []
for i in range(SPLIT):
tmp_p = pos_cp_split[i]
# print("pts.shape :", tmp_p.shape)
### pts-numpy array (from STL)
# print("pts.shape :", pts_cp.shape)
###
d = self.clac_all_distance(tmp_p, pts_cp)
dist_tmp.append(d)
dist_list = cp.concatenate(dist_tmp, 0)
# print(len(dist_list))
return dist_list
################################################################################
####################
### ###
### Draw ###
### ###
####################
def scan_image_calc_color(self, file_path, height, pts_cp, downsampling_xy):
### Open Image
img_src = self.open_image(file_path)
w, h = img_src.size
### DownSampling
ww = int(w / downsampling_xy)
hh = int(h / downsampling_xy)
img = img_src.resize((ww, hh), Image.LANCZOS)
### Read Shape
px = img.getdata()
px_cp = cp.array(px)
# print("px_cp.shape :", px_cp.shape)
### Create Result Canvas
img_tmp = self.create_canvas_alpha(ww)
img_result = self.create_canvas_alpha(w)
### Segment Contour True/False
px_seg_0 = cp.amax(px_cp)
### Contour : False
if px_seg_0 < 127:
### Export None-Image
px_result = [(0, 0, 0, 0) for i in range(w) for j in range(h)]
img_result.putdata(tuple(px_result))
return img_result
### Contour : True
else:
### Running on Cuda
# print("Running on Cuda !!")
################################################################################################
###########################
### ###
### Calc Distance ###
### ###
###########################
# print("Distance")
### [X] Clac Distance
# dist_list = self.gen_disctance_list(w, h, height, pts_cp)
### [O] Clac Distance with DownSampling
dist_list = self.gen_disctance_list_ds(ww, hh, height, downsampling_xy, pts_cp)
################################################################################################
############################################
### ###
### Generate Color From Distance ###
### ###
############################################
# print("Color")
### Define Colors
################################################################################################
### Offset Pattern (Small)
dist_src = dist_list.tolist()
# print("len(dist_src) :", len(dist_src))
clrs = []
amp = 1 / 2
for d in dist_src:
c = int((math.sin(d * amp) + 1) * (1 / 2) * 255)
cc = 255 - c
clrs.append([c, c, cc, 255])
clrs_tuple = tuple(map(tuple, clrs))
### Generate New Image
img_tmp.putdata(tuple(clrs_tuple))
################################################################################################
"""
### Offset Pattern (Large)
dist_src = dist_list.tolist()
# print("len(dist_src) :", len(dist_src))
clrs = []
for d in dist_src:
th = 30
if d < (th * 1):
clrs.append([255, 0, 0, 255])
elif d < (th * 2):
clrs.append([0, 255, 0, 255])
elif d < (th * 3):
clrs.append([0, 0, 255, 255])
else:
clrs.append([255, 255, 255, 255])
clrs_tuple = tuple(map(tuple, clrs))
### Generate New Image
img_tmp.putdata(tuple(clrs_tuple))
"""
################################################################################################
"""
### Test Distance Map
dist_remap = self.remap_number_cp(dist_list, 0, 200, 0, 255)
dist_remap = dist_remap.astype('int64')
# print("dist_remap.shape :", dist_remap.shape)
### Fill Array (255)
alpha_array = cp.ones(dist_list.shape) * 255
alpha_array = alpha_array.astype('int64')
dist_img = cp.stack([dist_remap, dist_remap, dist_remap, alpha_array])
dist_img = dist_img.T
# print("dist_img.shape :", dist_img.shape)
# print(dist_img)
dist_4 = dist_img.tolist()
dist_4 = tuple(map(tuple, dist_4))
# print("type(dist_4) :", type(dist_4))
### Generate New Image
img_tmp.putdata(tuple(dist_4))
"""
################################################################################################
#########################
### ###
### Composite ###
### ###
#########################
# print("Composite")
### Scaling
img_dist = img_tmp.resize((w, h), Image.LANCZOS)
### Create Canvas for Composite
img_canvas = self.create_canvas_alpha(w)
### Define Mask
img_mask = img_src.convert("L")
### Composite
img_result = Image.composite(img_dist, img_canvas, img_mask)
### Flip
### Image Coordination >> Rhino Coordination
img_flip = ImageOps.flip(img_result)
return img_flip | 27.193473 | 109 | 0.407252 | import sys
sys.path.append("C:\\Users\\ysoky\\Documents\\Mesh_Voxel_Color\\_module_\\Mesh_Contour")
import math
import cupy as cp
import random
from PIL import Image, ImageDraw, ImageOps, ImageEnhance
from mesh_contour import stl_parser
sp = stl_parser.StlParser()
from .import util
ut = util.Util()
class ColorPILCupy():
| true | true |
f71f7031c4f8fd46d4b8fe54a23ba04cced48350 | 644 | py | Python | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | coronavirus/json_update.py | StevenHuang2020/WebSpider | 40ab36416e061da3eb98a3174f18f50260b2e2d3 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
# Date: 27/Apr/2020
# Author: Steven Huang, Auckland, NZ
# License: MIT License
"""
Description: Update json file
"""
import json
import datetime
def write_file(file, content):
with open(file, 'w', newline='\n', encoding='utf-8') as f:
f.write(content)
def get_datetime():
daytime = datetime.datetime.now()
return str(daytime.strftime("%Y-%m-%d %H:%M:%S"))
def update_json(file=r'update.json'):
info = {"schemaVersion": 1, "label": "Last update", "message": "2020-01-01 01:01:01"}
info["message"] = get_datetime()
# print(json.dumps(info))
write_file(file, json.dumps(info))
| 23 | 89 | 0.641304 |
import json
import datetime
def write_file(file, content):
with open(file, 'w', newline='\n', encoding='utf-8') as f:
f.write(content)
def get_datetime():
daytime = datetime.datetime.now()
return str(daytime.strftime("%Y-%m-%d %H:%M:%S"))
def update_json(file=r'update.json'):
info = {"schemaVersion": 1, "label": "Last update", "message": "2020-01-01 01:01:01"}
info["message"] = get_datetime()
write_file(file, json.dumps(info))
| true | true |
f71f706d2b3fdf3882c5261d9237067d22214993 | 694 | py | Python | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | src/decisionengine_modules/glideinwms/tests/test_UniversalFrontendParams.py | BrunoCoimbra/decisionengine_modules | bfd14644eb2e16b72b75fdcc3ebe8ad1323b904f | [
"Apache-2.0"
] | null | null | null | from decisionengine_modules.glideinwms.tests.fixtures import ( # noqa: F401
gwms_module_config,
gwms_module_invalid_config,
gwms_src_dir,
)
from decisionengine_modules.glideinwms.UniversalFrontendParams import UniversalFrontendParams
def test_instantiation(gwms_src_dir, gwms_module_config): # noqa: F811
params = UniversalFrontendParams(gwms_src_dir, gwms_module_config)
assert params.subparams["frontend_name"] == "mock_frontend"
def test_config_error(gwms_src_dir, gwms_module_invalid_config): # noqa: F811
try:
_ = UniversalFrontendParams(gwms_src_dir, gwms_module_invalid_config)
except Exception as e:
assert isinstance(e, RuntimeError)
| 36.526316 | 93 | 0.792507 | from decisionengine_modules.glideinwms.tests.fixtures import (
gwms_module_config,
gwms_module_invalid_config,
gwms_src_dir,
)
from decisionengine_modules.glideinwms.UniversalFrontendParams import UniversalFrontendParams
def test_instantiation(gwms_src_dir, gwms_module_config):
params = UniversalFrontendParams(gwms_src_dir, gwms_module_config)
assert params.subparams["frontend_name"] == "mock_frontend"
def test_config_error(gwms_src_dir, gwms_module_invalid_config):
try:
_ = UniversalFrontendParams(gwms_src_dir, gwms_module_invalid_config)
except Exception as e:
assert isinstance(e, RuntimeError)
| true | true |
f71f73422050b5b292bd93215895e5ecf77f8aa9 | 4,482 | py | Python | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | app/run.py | imisi-akande/disaster-response-pipeline | d691e643c57e45b226ca3cb2c0b4a708c7edfe8b | [
"MIT"
] | null | null | null | import json
import plotly
import pandas as pd
import nltk
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk import pos_tag, word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar
from sklearn.base import BaseEstimator, TransformerMixin
import joblib
from sqlalchemy import create_engine
app = Flask(__name__)
class StartingVerbExtractor(BaseEstimator, TransformerMixin):
def starting_verb(self, text):
sentence_list = nltk.sent_tokenize(text)
for sentence in sentence_list:
pos_tags = nltk.pos_tag(tokenize(sentence))
first_word, first_tag = pos_tags[0]
if first_tag in ['VB', 'VBP'] or first_word == 'RT':
return True
return False
def fit(self, X, y=None):
return self
def transform(self, X):
X_tagged = pd.Series(X).apply(self.starting_verb)
return pd.DataFrame(X_tagged)
def tokenize(text):
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
# load data
engine = create_engine('sqlite:///../data/disaster_response.db')
df = pd.read_sql_table('disaster_response_table', engine)
# load model
model = joblib.load("../models/classifier.pkl")
# index webpage displays cool visuals and receives user input text for model
@app.route('/')
@app.route('/index')
def index():
# extract data needed for visuals
# TODO: Below is an example - modify to extract data for your own visuals
genre_counts = df.groupby('genre').count()['message']
genre_percent = round(100*genre_counts/genre_counts.sum(), 2)
genre_names = list(genre_counts.index)
category_names = df.iloc[:,4:].columns
category_boolean = (df.iloc[:,4:] != 0).sum().values
# create visuals
# TODO: Below is an example - modify to create your own visuals
graphs = [
# GRAPH 1 - genre graph
{
"data": [
{
"type": "pie",
"uid": "f4de1f",
"hole": 0.4,
"name": "Genre",
"pull": 0,
"domain": {
"x": genre_percent,
"y": genre_names
},
"marker": {
"colors": [
"#7fc97f",
"#bc5090",
"#ffa600"
]
},
"textinfo": "label+value",
"hoverinfo": "all",
"labels": genre_names,
"values": genre_counts
}
],
"layout": {
"title": "Count and Percentage of Messages by Genre"
}
},
# GRAPH 2 - category graph
{
'data': [
Bar(
x=category_names,
y=category_boolean
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category",
'tickangle': 35
}
}
}
]
# encode plotly graphs in JSON
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
# render web page with plotly graphs
return render_template('master.html', ids=ids, graphJSON=graphJSON)
# web page that handles user query and displays model results
@app.route('/go')
def go():
# save user input in query
query = request.args.get('query', '')
# use model to predict classification for query
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df.columns[4:], classification_labels))
# This will render the go.html Please see that file.
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=5000, debug=True)
if __name__ == '__main__':
main() | 28.367089 | 77 | 0.56805 | import json
import plotly
import pandas as pd
import nltk
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize, sent_tokenize
from nltk import pos_tag, word_tokenize
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import word_tokenize
from flask import Flask
from flask import render_template, request, jsonify
from plotly.graph_objs import Bar
from sklearn.base import BaseEstimator, TransformerMixin
import joblib
from sqlalchemy import create_engine
app = Flask(__name__)
class StartingVerbExtractor(BaseEstimator, TransformerMixin):
def starting_verb(self, text):
sentence_list = nltk.sent_tokenize(text)
for sentence in sentence_list:
pos_tags = nltk.pos_tag(tokenize(sentence))
first_word, first_tag = pos_tags[0]
if first_tag in ['VB', 'VBP'] or first_word == 'RT':
return True
return False
def fit(self, X, y=None):
return self
def transform(self, X):
X_tagged = pd.Series(X).apply(self.starting_verb)
return pd.DataFrame(X_tagged)
def tokenize(text):
tokens = word_tokenize(text)
lemmatizer = WordNetLemmatizer()
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
engine = create_engine('sqlite:///../data/disaster_response.db')
df = pd.read_sql_table('disaster_response_table', engine)
model = joblib.load("../models/classifier.pkl")
@app.route('/')
@app.route('/index')
def index():
genre_counts = df.groupby('genre').count()['message']
genre_percent = round(100*genre_counts/genre_counts.sum(), 2)
genre_names = list(genre_counts.index)
category_names = df.iloc[:,4:].columns
category_boolean = (df.iloc[:,4:] != 0).sum().values
graphs = [
{
"data": [
{
"type": "pie",
"uid": "f4de1f",
"hole": 0.4,
"name": "Genre",
"pull": 0,
"domain": {
"x": genre_percent,
"y": genre_names
},
"marker": {
"colors": [
"#7fc97f",
"#bc5090",
"#ffa600"
]
},
"textinfo": "label+value",
"hoverinfo": "all",
"labels": genre_names,
"values": genre_counts
}
],
"layout": {
"title": "Count and Percentage of Messages by Genre"
}
},
{
'data': [
Bar(
x=category_names,
y=category_boolean
)
],
'layout': {
'title': 'Distribution of Message Categories',
'yaxis': {
'title': "Count"
},
'xaxis': {
'title': "Category",
'tickangle': 35
}
}
}
]
ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)]
graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder)
return render_template('master.html', ids=ids, graphJSON=graphJSON)
@app.route('/go')
def go():
query = request.args.get('query', '')
classification_labels = model.predict([query])[0]
classification_results = dict(zip(df.columns[4:], classification_labels))
return render_template(
'go.html',
query=query,
classification_result=classification_results
)
def main():
app.run(host='0.0.0.0', port=5000, debug=True)
if __name__ == '__main__':
main() | true | true |
f71f7408f54375e5147ae5b03a495305fdff73de | 1,853 | py | Python | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | null | null | null | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | 4 | 2017-11-01T16:25:49.000Z | 2018-08-22T13:50:23.000Z | mercury_agent/procedures/inspector.py | jr0d/mercury-agent | 12b75ecc951d3ab5cd15c5213df2412b108cf47c | [
"Apache-2.0"
] | 5 | 2017-10-19T12:40:15.000Z | 2018-08-21T20:18:54.000Z | # Copyright 2015 Jared Rodriguez (jared.rodriguez@rackspace.com)
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mercury_agent.capabilities import capability
from mercury_agent.configuration import get_configuration
from mercury_agent.inspector import inspect
from mercury_agent.inspector.inspect import global_device_info
from mercury_agent.inspector.inspectors import health
@capability('inspector', description='Run inspector')
def inspector():
"""
Manually run inspectors
:return: results
"""
return inspect.inspect()
@capability('check_hardware', description='Check hardware for errors')
def check_hardware():
"""
Checks hardware for inconsistencies and defects. Returns a list of discovered critical errors.
:return:
"""
configuration = get_configuration().agent
errors = []
_health_data = health.system_health_inspector(global_device_info)
if _health_data['corrected_hardware_event_count'] >= configuration.hardware.mce_threshold:
errors.append(
'MCE count is {} which is above the configured threshold of {}'.format(
_health_data['corrected_hardware_event_count'],
configuration.hardware.mce_threshold))
return {
'errors': errors,
'error_count': len(errors)
}
| 36.333333 | 98 | 0.729628 |
from mercury_agent.capabilities import capability
from mercury_agent.configuration import get_configuration
from mercury_agent.inspector import inspect
from mercury_agent.inspector.inspect import global_device_info
from mercury_agent.inspector.inspectors import health
@capability('inspector', description='Run inspector')
def inspector():
return inspect.inspect()
@capability('check_hardware', description='Check hardware for errors')
def check_hardware():
configuration = get_configuration().agent
errors = []
_health_data = health.system_health_inspector(global_device_info)
if _health_data['corrected_hardware_event_count'] >= configuration.hardware.mce_threshold:
errors.append(
'MCE count is {} which is above the configured threshold of {}'.format(
_health_data['corrected_hardware_event_count'],
configuration.hardware.mce_threshold))
return {
'errors': errors,
'error_count': len(errors)
}
| true | true |
f71f755bceeeb2c38e3122cc3e6f50cb403624cb | 453 | py | Python | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | examples/user/user_playlists.py | LorenzoCavatorta/spotify.py | 7f375f030fbac4ef3dbbd577a898b4d72f37b72b | [
"MIT"
] | null | null | null | import asyncio
import spotify
client = spotify.Client('someid', 'somesecret')
async def main():
# You can use a user with a http presence
user = await client.user_from_token('sometoken')
# Or you can get a generic user
user = await client.get_user(user_id)
# returns a list of spotify.Playlist objects
playlists = await user.get_playlists()
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
| 25.166667 | 55 | 0.715232 | import asyncio
import spotify
client = spotify.Client('someid', 'somesecret')
async def main():
user = await client.user_from_token('sometoken')
user = await client.get_user(user_id)
playlists = await user.get_playlists()
if __name__ == '__main__':
asyncio.get_event_loop().run_until_complete(main())
| true | true |
f71f7596ed6264518815e5191c7f2d43b4922fcc | 2,012 | py | Python | pylearn2/scripts/datasets/make_cifar10_whitened.py | Menerve/pylearn2 | ad7bcfda3294404aebd71f5a5c4a8623d401a98e | [
"BSD-3-Clause"
] | 3 | 2016-01-23T10:18:39.000Z | 2019-02-28T06:22:45.000Z | pylearn2/scripts/datasets/make_cifar10_whitened.py | Menerve/pylearn2 | ad7bcfda3294404aebd71f5a5c4a8623d401a98e | [
"BSD-3-Clause"
] | null | null | null | pylearn2/scripts/datasets/make_cifar10_whitened.py | Menerve/pylearn2 | ad7bcfda3294404aebd71f5a5c4a8623d401a98e | [
"BSD-3-Clause"
] | null | null | null | """
This script makes a dataset of 32x32 approximately whitened CIFAR-10 images.
"""
from pylearn2.utils import serial
from pylearn2.datasets import preprocessing
from pylearn2.utils import string_utils
import numpy as np
from pylearn2.datasets.cifar10 import CIFAR10
data_dir = string_utils.preprocess('${PYLEARN2_DATA_PATH}/cifar10')
print 'Loading CIFAR-10 train dataset...'
train = CIFAR10(which_set = 'train')
print "Preparing output directory..."
output_dir = data_dir + '/pylearn2_whitened'
serial.mkdir( output_dir )
README = open(output_dir + '/README','w')
README.write("""
The .pkl files in this directory may be opened in python using
cPickle, pickle, or pylearn2.serial.load.
train.pkl, and test.pkl each contain
a pylearn2 Dataset object defining a labeled
dataset of a 32x32 approximately whitened version of the STL-10
dataset. train.pkl contains labeled train examples. test.pkl
contains labeled test examples.
preprocessor.pkl contains a pylearn2 ZCA object that was used
to approximately whiten the images. You may want to use this
object later to preprocess other images.
They were created with the pylearn2 script make_cifar10_whitened.py.
All other files in this directory, including this README, were
created by the same script and are necessary for the other files
to function correctly.
""")
README.close()
print "Learning the preprocessor and preprocessing the unsupervised train data..."
preprocessor = preprocessing.ZCA()
train.apply_preprocessor(preprocessor = preprocessor, can_fit = True)
print 'Saving the unsupervised data'
train.use_design_loc(output_dir+'/train.npy')
serial.save(output_dir + '/train.pkl', train)
print "Loading the test data"
test = CIFAR10(which_set = 'test')
print "Preprocessing the test data"
test.apply_preprocessor(preprocessor = preprocessor, can_fit = False)
print "Saving the test data"
test.use_design_loc(output_dir+'/test.npy')
serial.save(output_dir+'/test.pkl', test)
serial.save(output_dir + '/preprocessor.pkl',preprocessor)
| 30.953846 | 82 | 0.789264 | """
This script makes a dataset of 32x32 approximately whitened CIFAR-10 images.
"""
from pylearn2.utils import serial
from pylearn2.datasets import preprocessing
from pylearn2.utils import string_utils
import numpy as np
from pylearn2.datasets.cifar10 import CIFAR10
data_dir = string_utils.preprocess('${PYLEARN2_DATA_PATH}/cifar10')
print 'Loading CIFAR-10 train dataset...'
train = CIFAR10(which_set = 'train')
print "Preparing output directory..."
output_dir = data_dir + '/pylearn2_whitened'
serial.mkdir( output_dir )
README = open(output_dir + '/README','w')
README.write("""
The .pkl files in this directory may be opened in python using
cPickle, pickle, or pylearn2.serial.load.
train.pkl, and test.pkl each contain
a pylearn2 Dataset object defining a labeled
dataset of a 32x32 approximately whitened version of the STL-10
dataset. train.pkl contains labeled train examples. test.pkl
contains labeled test examples.
preprocessor.pkl contains a pylearn2 ZCA object that was used
to approximately whiten the images. You may want to use this
object later to preprocess other images.
They were created with the pylearn2 script make_cifar10_whitened.py.
All other files in this directory, including this README, were
created by the same script and are necessary for the other files
to function correctly.
""")
README.close()
print "Learning the preprocessor and preprocessing the unsupervised train data..."
preprocessor = preprocessing.ZCA()
train.apply_preprocessor(preprocessor = preprocessor, can_fit = True)
print 'Saving the unsupervised data'
train.use_design_loc(output_dir+'/train.npy')
serial.save(output_dir + '/train.pkl', train)
print "Loading the test data"
test = CIFAR10(which_set = 'test')
print "Preprocessing the test data"
test.apply_preprocessor(preprocessor = preprocessor, can_fit = False)
print "Saving the test data"
test.use_design_loc(output_dir+'/test.npy')
serial.save(output_dir+'/test.pkl', test)
serial.save(output_dir + '/preprocessor.pkl',preprocessor)
| false | true |
f71f75b68bb7f3fa7cd5a31932f2aebd38d239e8 | 8,668 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 3 | 2020-06-23T02:25:27.000Z | 2021-09-07T18:48:11.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 510 | 2019-07-17T16:11:19.000Z | 2021-08-02T08:38:32.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/aio/operations/_load_balancer_outbound_rules_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 5 | 2019-09-04T12:51:37.000Z | 2020-09-16T07:28:40.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
"""LoadBalancerOutboundRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
"""Gets all the outbound rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either LoadBalancerOutboundRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2021_02_01.models.LoadBalancerOutboundRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.LoadBalancerOutboundRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'} # type: ignore
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs
) -> "_models.OutboundRule":
"""Gets the specified load balancer outbound rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:param outbound_rule_name: The name of the outbound rule.
:type outbound_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OutboundRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.OutboundRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.OutboundRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'} # type: ignore
| 48.424581 | 206 | 0.671666 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class LoadBalancerOutboundRulesOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
load_balancer_name: str,
**kwargs
) -> AsyncIterable["_models.LoadBalancerOutboundRuleListResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('LoadBalancerOutboundRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules'}
async def get(
self,
resource_group_name: str,
load_balancer_name: str,
outbound_rule_name: str,
**kwargs
) -> "_models.OutboundRule":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'),
'outboundRuleName': self._serialize.url("outbound_rule_name", outbound_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('OutboundRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/loadBalancers/{loadBalancerName}/outboundRules/{outboundRuleName}'}
| true | true |
f71f76fbbb3977874071bfc11924aee5822e4bea | 2,317 | py | Python | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null | itng/common/templatetags/ng_utils.py | NoviSystems/ng-utils | 29d20ba65fe2078694d18e6a33f7a448b26fa297 | [
"BSD-3-Clause"
] | null | null | null |
import re
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.encoding import force_text
from ordered_set import OrderedSet
register = template.Library()
@register.filter
def required(field):
"""
Return 'required' as a string if the BoundField's underlying field is required.
"""
return "required" if field.field.required else ""
@register.filter
def add_class(value, css_classes):
"""
Add a single or multiple css classes to a form widget. To add multiple classes, pass
them as a whitespace delimited string. eg, {{ field|add_class:"foo bar" }}
"""
if not css_classes:
return value
widget = value.field.widget
orig_classes = OrderedSet(widget.attrs.get('class', '').split())
new_classes = OrderedSet(css_classes.split())
widget.attrs['class'] = " ".join(orig_classes | new_classes)
return value
@register.simple_tag(takes_context=True)
def isactive(context, url, active='active', inactive='', exact=False):
"""
A ternary tag for whether a URL is 'active'. An active URL is defined as matching
the current request URL. The default behavior is to match the beginning of the URL.
For example, if `url` is '/some/path' and the current request URL is
'/some/path/subpath', then the URL is considered active. If `exact` is set to True,
then the URL's must match exactly.
Example::
{% url 'named-url' as named_url %}
<div class="{% isactive named_url 'active' 'inactive' %}">
</div>
"""
request_url = context['request'].path_info
if (request_url == url if exact else request_url.startswith(url)):
return active
return inactive
# def ifactive
# refer to {% ifequal %} implementation because it doesn't perform {% if %} condition parsing
# Originally from: https://djangosnippets.org/snippets/1519/
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE)
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE)
@register.filter
def an(text):
text = force_text(text)
match = not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text)
return '%s %s' % ('an' if match else 'a', text)
| 31.739726 | 156 | 0.686664 |
import re
from django import template
from django.template.defaultfilters import stringfilter
from django.utils.encoding import force_text
from ordered_set import OrderedSet
register = template.Library()
@register.filter
def required(field):
return "required" if field.field.required else ""
@register.filter
def add_class(value, css_classes):
if not css_classes:
return value
widget = value.field.widget
orig_classes = OrderedSet(widget.attrs.get('class', '').split())
new_classes = OrderedSet(css_classes.split())
widget.attrs['class'] = " ".join(orig_classes | new_classes)
return value
@register.simple_tag(takes_context=True)
def isactive(context, url, active='active', inactive='', exact=False):
request_url = context['request'].path_info
if (request_url == url if exact else request_url.startswith(url)):
return active
return inactive
# Originally from: https://djangosnippets.org/snippets/1519/
CONSONANT_SOUND = re.compile(r'''one(![ir])''', re.IGNORECASE | re.VERBOSE)
VOWEL_SOUND = re.compile(r'''[aeio]|u([aeiou]|[^n][^aeiou]|ni[^dmnl]|nil[^l])|h(ier|onest|onou?r|ors\b|our(!i))|[fhlmnrsx]\b''', re.IGNORECASE | re.VERBOSE)
@register.filter
def an(text):
text = force_text(text)
match = not CONSONANT_SOUND.match(text) and VOWEL_SOUND.match(text)
return '%s %s' % ('an' if match else 'a', text)
| true | true |
f71f78bb67acd2a761bf282de28af8274e07ab9d | 1,636 | py | Python | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | Largest_Range.py | Le-bruit-de-nos-pas/python-functions | 0d86f924087da228ef46f6b984239b4ec8b7b305 | [
"MIT"
] | null | null | null | array_to_analyze = [11,7,3,4,2,5,1,0]
def largestRange(array_to_analyze):
# create a dictionary / hash table to keep track if we've seen the number already
elements = {x:0 for x in array_to_analyze} # set them all to "0"
#how many places have we moved to the left and right
left = 0
right = 0
#for each number
for entry in array_to_analyze:
#if the number has not been seen yet
if elements[entry] == 0:
left_count = entry-1 # start moving to the left
right_count = entry +1 # and the right
# if this left exists
while left_count in elements:
elements[left_count] = 1 # add it to the dictionary
left_count = left_count-1 #keep moving left if the previous number existed in the array
left_count = left_count +1
# if this right exists
while right_count in elements:
elements[right_count] = 1 # add it to the dictionary
right_count = right_count+1 #keep moving right if the previous number existed in the array
right_count = right_count -1
#if it doesn't exist, subtract 1 because we've added one to check a new number
#but it doesn't exist so we need to set it back to the very last number verified
#if there was any different from or we still stay at 0,0, return that sub-array
if (right-left) <= (right_count-left_count):
right = right_count
left = left_count
return[left, right]
# all good
print(largestRange(array_to_analyze)) | 38.046512 | 106 | 0.620416 | array_to_analyze = [11,7,3,4,2,5,1,0]
def largestRange(array_to_analyze):
elements = {x:0 for x in array_to_analyze} # set them all to "0"
#how many places have we moved to the left and right
left = 0
right = 0
#for each number
for entry in array_to_analyze:
#if the number has not been seen yet
if elements[entry] == 0:
left_count = entry-1 # start moving to the left
right_count = entry +1 # and the right
# if this left exists
while left_count in elements:
elements[left_count] = 1 # add it to the dictionary
left_count = left_count-1 #keep moving left if the previous number existed in the array
left_count = left_count +1
# if this right exists
while right_count in elements:
elements[right_count] = 1 # add it to the dictionary
right_count = right_count+1 #keep moving right if the previous number existed in the array
right_count = right_count -1
#if it doesn't exist, subtract 1 because we've added one to check a new number
#but it doesn't exist so we need to set it back to the very last number verified
if (right-left) <= (right_count-left_count):
right = right_count
left = left_count
return[left, right]
print(largestRange(array_to_analyze)) | true | true |
f71f7a46733f735693deb2dee446ef1ebe2704f2 | 2,218 | py | Python | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | 1 | 2021-03-24T02:52:34.000Z | 2021-03-24T02:52:34.000Z | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | null | null | null | notifier.py | gkumar7/vaccine-notifier | 3177fcf7fa0eef38779e544db95844ac5b6edbdd | [
"MIT"
] | null | null | null | import time
from datetime import datetime
from math import radians, cos, sin, asin, sqrt
import requests
url = "https://www.vaccinespotter.org/api/v0/states/IL.json"
minutes = 1
center = {'lat': 0.0, 'lon': 0.0}
max_distance = 50
found = []
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 3956
return c * r
def get_distance(data):
return data['distance']
def sound(data):
print("FOUND! {}".format(data))
# GPIO.output(23, GPIO.HIGH)
# time.sleep(10)
# GPIO.output(23, GPIO.LOW)
def run():
print("{} - Running".format(datetime.now()))
# GPIO.setwarnings(False)
# GPIO.setmode(GPIO.BCM)
# GPIO.setup(23, GPIO.OUT)
# GPIO.output(23, GPIO.LOW)
resp = requests.get(url)
data = resp.json()
for feature in data['features']:
coordinates = feature['geometry']['coordinates']
if coordinates[0] is None or coordinates[1] is None:
continue
pharmacy_loc = {'lat': coordinates[1], 'lon': coordinates[0]}
props = feature['properties']
distance = haversine(center['lon'], center['lat'], pharmacy_loc['lon'], pharmacy_loc['lat'])
if props['appointments_available'] and distance <= max_distance:
found.append({
"name": props['name'],
"url": props['url'],
"address": props['address'],
"city": props['city'],
"state": props['state'],
"zip": props['postal_code'],
"distance": distance
})
found.sort(key=get_distance)
if len(found):
sound(found)
# GPIO.cleanup()
def main():
while True:
run()
print("{} - Sleeping for {} minutes".format(datetime.now(), minutes))
time.sleep(minutes * 60)
if __name__ == '__main__':
main()
| 27.04878 | 100 | 0.576646 | import time
from datetime import datetime
from math import radians, cos, sin, asin, sqrt
import requests
url = "https://www.vaccinespotter.org/api/v0/states/IL.json"
minutes = 1
center = {'lat': 0.0, 'lon': 0.0}
max_distance = 50
found = []
def haversine(lon1, lat1, lon2, lat2):
lon1, lat1, lon2, lat2 = map(radians, [lon1, lat1, lon2, lat2])
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat / 2) ** 2 + cos(lat1) * cos(lat2) * sin(dlon / 2) ** 2
c = 2 * asin(sqrt(a))
r = 3956
return c * r
def get_distance(data):
return data['distance']
def sound(data):
print("FOUND! {}".format(data))
def run():
print("{} - Running".format(datetime.now()))
resp = requests.get(url)
data = resp.json()
for feature in data['features']:
coordinates = feature['geometry']['coordinates']
if coordinates[0] is None or coordinates[1] is None:
continue
pharmacy_loc = {'lat': coordinates[1], 'lon': coordinates[0]}
props = feature['properties']
distance = haversine(center['lon'], center['lat'], pharmacy_loc['lon'], pharmacy_loc['lat'])
if props['appointments_available'] and distance <= max_distance:
found.append({
"name": props['name'],
"url": props['url'],
"address": props['address'],
"city": props['city'],
"state": props['state'],
"zip": props['postal_code'],
"distance": distance
})
found.sort(key=get_distance)
if len(found):
sound(found)
def main():
while True:
run()
print("{} - Sleeping for {} minutes".format(datetime.now(), minutes))
time.sleep(minutes * 60)
if __name__ == '__main__':
main()
| true | true |
f71f7aaa0bb10df8c141305e95139c15bca2394f | 4,249 | py | Python | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | null | null | null | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | 23 | 2018-12-04T11:50:37.000Z | 2022-03-17T11:30:39.000Z | tests/test_histogram2d.py | ess-dmsc/JustBinIt | dc8242ed44f03e92f60618c96596025ec8cbc40e | [
"BSD-2-Clause"
] | 2 | 2019-07-24T11:13:41.000Z | 2020-08-04T18:33:22.000Z | import numpy as np
import pytest
from just_bin_it.histograms.histogram2d import Histogram2d
IRRELEVANT_TOPIC = "some-topic"
class TestHistogram2dFunctionality:
@pytest.fixture(autouse=True)
def prepare(self):
self.pulse_time = 1234
self.num_bins = (5, 10)
self.tof_range = (0, 10)
self.det_range = (0, 5)
self.data = np.array([x for x in range(self.num_bins[0])])
self.hist = Histogram2d("topic", self.num_bins, self.tof_range, self.det_range)
def test_if_single_value_for_num_bins_then_value_used_for_both_x_and_y(self):
num_bins = 5
hist = Histogram2d("topic", num_bins, self.tof_range, self.det_range)
assert len(hist.x_edges) == num_bins + 1
assert len(hist.y_edges) == num_bins + 1
assert hist.shape == (num_bins, num_bins)
def test_on_construction_histogram_is_uninitialised(self):
assert self.hist.x_edges is not None
assert self.hist.y_edges is not None
assert self.hist.shape == self.num_bins
assert len(self.hist.x_edges) == self.num_bins[0] + 1
assert len(self.hist.y_edges) == self.num_bins[1] + 1
assert self.hist.x_edges[0] == self.data[0]
assert self.hist.x_edges[-1] == 10
assert self.hist.y_edges[0] == self.data[0]
assert self.hist.y_edges[-1] == 5
assert self.hist.data.sum() == 0
def test_adding_data_to_initialised_histogram_new_data_is_added(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
# Add the data again
self.hist.add_data(self.pulse_time, self.data, self.data)
# Sum should be double
assert self.hist.data.sum() == first_sum * 2
def test_adding_data_outside_initial_bins_is_ignored(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
x_edges = self.hist.x_edges[:]
y_edges = self.hist.y_edges[:]
# Add data that is outside the edges
new_data = np.array([x + self.num_bins[0] + 1 for x in range(self.num_bins[0])])
self.hist.add_data(self.pulse_time, new_data, new_data)
# Sum should not change
assert self.hist.data.sum() == first_sum
# Edges should not change
assert np.array_equal(self.hist.x_edges, x_edges)
assert np.array_equal(self.hist.y_edges, y_edges)
def test_if_no_id_supplied_then_defaults_to_empty_string(self):
assert self.hist.identifier == ""
def test_id_supplied_then_is_set(self):
example_id = "abcdef"
hist = Histogram2d(
"topic1",
self.num_bins,
self.tof_range,
self.det_range,
identifier=example_id,
)
assert hist.identifier == example_id
def test_only_data_with_correct_source_is_added(self):
hist = Histogram2d(
"topic", self.num_bins, self.tof_range, self.det_range, source="source1"
)
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="OTHER")
assert hist.data.sum() == 10
def test_clearing_histogram_data_clears_histogram(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
assert self.hist.data.sum() == 0
def test_after_clearing_histogram_can_add_data(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.shape == self.num_bins
assert self.hist.data.sum() == 5
def test_adding_empty_data_does_nothing(self):
self.hist.add_data(self.pulse_time, [], [])
assert self.hist.data.sum() == 0
def test_histogram_keeps_track_of_last_pulse_time_processed(self):
self.hist.add_data(1234, self.data, self.data)
self.hist.add_data(1235, self.data, self.data)
self.hist.add_data(1236, self.data, self.data)
assert self.hist.last_pulse_time == 1236
| 36.62931 | 88 | 0.65992 | import numpy as np
import pytest
from just_bin_it.histograms.histogram2d import Histogram2d
IRRELEVANT_TOPIC = "some-topic"
class TestHistogram2dFunctionality:
@pytest.fixture(autouse=True)
def prepare(self):
self.pulse_time = 1234
self.num_bins = (5, 10)
self.tof_range = (0, 10)
self.det_range = (0, 5)
self.data = np.array([x for x in range(self.num_bins[0])])
self.hist = Histogram2d("topic", self.num_bins, self.tof_range, self.det_range)
def test_if_single_value_for_num_bins_then_value_used_for_both_x_and_y(self):
num_bins = 5
hist = Histogram2d("topic", num_bins, self.tof_range, self.det_range)
assert len(hist.x_edges) == num_bins + 1
assert len(hist.y_edges) == num_bins + 1
assert hist.shape == (num_bins, num_bins)
def test_on_construction_histogram_is_uninitialised(self):
assert self.hist.x_edges is not None
assert self.hist.y_edges is not None
assert self.hist.shape == self.num_bins
assert len(self.hist.x_edges) == self.num_bins[0] + 1
assert len(self.hist.y_edges) == self.num_bins[1] + 1
assert self.hist.x_edges[0] == self.data[0]
assert self.hist.x_edges[-1] == 10
assert self.hist.y_edges[0] == self.data[0]
assert self.hist.y_edges[-1] == 5
assert self.hist.data.sum() == 0
def test_adding_data_to_initialised_histogram_new_data_is_added(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.data.sum() == first_sum * 2
def test_adding_data_outside_initial_bins_is_ignored(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
first_sum = self.hist.data.sum()
x_edges = self.hist.x_edges[:]
y_edges = self.hist.y_edges[:]
new_data = np.array([x + self.num_bins[0] + 1 for x in range(self.num_bins[0])])
self.hist.add_data(self.pulse_time, new_data, new_data)
assert self.hist.data.sum() == first_sum
assert np.array_equal(self.hist.x_edges, x_edges)
assert np.array_equal(self.hist.y_edges, y_edges)
def test_if_no_id_supplied_then_defaults_to_empty_string(self):
assert self.hist.identifier == ""
def test_id_supplied_then_is_set(self):
example_id = "abcdef"
hist = Histogram2d(
"topic1",
self.num_bins,
self.tof_range,
self.det_range,
identifier=example_id,
)
assert hist.identifier == example_id
def test_only_data_with_correct_source_is_added(self):
hist = Histogram2d(
"topic", self.num_bins, self.tof_range, self.det_range, source="source1"
)
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="source1")
hist.add_data(self.pulse_time, self.data, self.data, source="OTHER")
assert hist.data.sum() == 10
def test_clearing_histogram_data_clears_histogram(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
assert self.hist.data.sum() == 0
def test_after_clearing_histogram_can_add_data(self):
self.hist.add_data(self.pulse_time, self.data, self.data)
self.hist.clear_data()
self.hist.add_data(self.pulse_time, self.data, self.data)
assert self.hist.shape == self.num_bins
assert self.hist.data.sum() == 5
def test_adding_empty_data_does_nothing(self):
self.hist.add_data(self.pulse_time, [], [])
assert self.hist.data.sum() == 0
def test_histogram_keeps_track_of_last_pulse_time_processed(self):
self.hist.add_data(1234, self.data, self.data)
self.hist.add_data(1235, self.data, self.data)
self.hist.add_data(1236, self.data, self.data)
assert self.hist.last_pulse_time == 1236
| true | true |
f71f7e5bf94980d2547f9d71b092b8666b476e67 | 17,709 | py | Python | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 486 | 2016-05-28T18:51:54.000Z | 2022-03-20T17:30:31.000Z | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 42 | 2018-05-25T15:57:08.000Z | 2021-01-17T18:39:59.000Z | contrib/tools/python3/src/Lib/wave.py | HeyLey/catboost | f472aed90604ebe727537d9d4a37147985e10ec2 | [
"Apache-2.0"
] | 46 | 2016-05-28T18:52:03.000Z | 2021-06-01T07:57:51.000Z | """Stuff to parse WAVE files.
Usage.
Reading WAVE files:
f = wave.open(file, 'r')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods read(), seek(), and close().
When the setpos() and rewind() methods are not used, the seek()
method is not necessary.
This returns an instance of a class with the following public methods:
getnchannels() -- returns number of audio channels (1 for
mono, 2 for stereo)
getsampwidth() -- returns sample width in bytes
getframerate() -- returns sampling frequency
getnframes() -- returns number of audio frames
getcomptype() -- returns compression type ('NONE' for linear samples)
getcompname() -- returns human-readable version of
compression type ('not compressed' linear samples)
getparams() -- returns a namedtuple consisting of all of the
above in the above order
getmarkers() -- returns None (for compatibility with the
aifc module)
getmark(id) -- raises an error since the mark does not
exist (for compatibility with the aifc module)
readframes(n) -- returns at most n frames of audio
rewind() -- rewind to the beginning of the audio stream
setpos(pos) -- seek to the specified position
tell() -- return the current position
close() -- close the instance (make it unusable)
The position returned by tell() and the position given to setpos()
are compatible and have nothing to do with the actual position in the
file.
The close() method is called automatically when the class instance
is destroyed.
Writing WAVE files:
f = wave.open(file, 'w')
where file is either the name of a file or an open file pointer.
The open file pointer must have methods write(), tell(), seek(), and
close().
This returns an instance of a class with the following public methods:
setnchannels(n) -- set the number of channels
setsampwidth(n) -- set the sample width
setframerate(n) -- set the frame rate
setnframes(n) -- set the number of frames
setcomptype(type, name)
-- set the compression type and the
human-readable compression type
setparams(tuple)
-- set all parameters at once
tell() -- return current position in output file
writeframesraw(data)
-- write audio frames without pathing up the
file header
writeframes(data)
-- write audio frames and patch up the file header
close() -- patch up the file header and close the
output file
You should set the parameters before the first writeframesraw or
writeframes. The total number of frames does not need to be set,
but when it is set to the correct value, the header does not have to
be patched up.
It is best to first set all parameters, perhaps possibly the
compression type, and then write audio frames using writeframesraw.
When all frames have been written, either call writeframes(b'') or
close() to patch up the sizes in the header.
The close() method is called automatically when the class instance
is destroyed.
"""
import builtins
__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'i'
import audioop
import struct
import sys
from chunk import Chunk
from collections import namedtuple
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
class Wave_read:
"""Variables used in this class:
These variables are available to the user though appropriate
methods of this class:
_file -- the open file with methods read(), close(), and seek()
set through the __init__() method
_nchannels -- the number of audio channels
available through the getnchannels() method
_nframes -- the number of audio frames
available through the getnframes() method
_sampwidth -- the number of bytes per audio sample
available through the getsampwidth() method
_framerate -- the sampling frequency
available through the getframerate() method
_comptype -- the AIFF-C compression type ('NONE' if AIFF)
available through the getcomptype() method
_compname -- the human-readable AIFF-C compression type
available through the getcomptype() method
_soundpos -- the position in the audio stream
available through the tell() method, set through the
setpos() method
These variables are used internally only:
_fmt_chunk_read -- 1 iff the FMT chunk has been read
_data_seek_needed -- 1 iff positioned correctly in audio
file for readframes()
_data_chunk -- instantiation of a chunk class for the DATA chunk
_framesize -- size of one frame in the file
"""
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != b'RIFF':
raise Error('file does not start with RIFF id')
if self._file.read(4) != b'WAVE':
raise Error('not a WAVE file')
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == b'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == b'data':
if not self._fmt_chunk_read:
raise Error('data chunk before fmt chunk')
self._data_chunk = chunk
self._nframes = chunk.chunksize // self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error('fmt chunk and/or data chunk missing')
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'rb')
self._i_opened_the_file = f
# else, assume it is an open file object already
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return _wave_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
return None
def getmark(self, id):
raise Error('no marks')
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return b''
data = self._data_chunk.read(nframes * self._framesize)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
#
# Internal methods.
#
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from('<HHLLH', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack_from('<H', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) // 8
else:
raise Error('unknown format: %r' % (wFormatTag,))
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
"""Variables used in this class:
These variables are user settable through appropriate methods
of this class:
_file -- the open file with methods write(), close(), tell(), seek()
set through the __init__() method
_comptype -- the AIFF-C compression type ('NONE' in AIFF)
set through the setcomptype() or setparams() method
_compname -- the human-readable AIFF-C compression type
set through the setcomptype() or setparams() method
_nchannels -- the number of audio channels
set through the setnchannels() or setparams() method
_sampwidth -- the number of bytes per audio sample
set through the setsampwidth() or setparams() method
_framerate -- the sampling frequency
set through the setframerate() or setparams() method
_nframes -- the number of audio frames written to the header
set through the setnframes() or setparams() method
These variables are used internally only:
_datalength -- the size of the audio samples written to the header
_nframeswritten -- the number of frames actually written
_datawritten -- the size of the audio samples actually written
"""
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'wb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._headerwritten = False
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
#
# User visible methods.
#
def setnchannels(self, nchannels):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error('bad frame rate')
self._framerate = int(round(framerate))
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE',):
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error('not all parameters set')
return _wave_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
raise Error('setmark() not supported')
def getmark(self, id):
raise Error('no marks')
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
if not isinstance(data, (bytes, bytearray)):
data = memoryview(data).cast('B')
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
self._file.write(data)
self._datawritten += len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
try:
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
finally:
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
#
# Internal methods.
#
def _ensure_header_written(self, datasize):
if not self._headerwritten:
if not self._nchannels:
raise Error('# channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('sampling rate not specified')
self._write_header(datasize)
def _write_header(self, initlength):
assert not self._headerwritten
self._file.write(b'RIFF')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
self._file.write(struct.pack('<L4s4sLHHLLHH4s',
36 + self._datalength, b'WAVE', b'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, b'data'))
if self._form_length_pos is not None:
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<L', self._datalength))
self._headerwritten = True
def _patchheader(self):
assert self._headerwritten
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<L', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<L', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open # B/W compatibility
| 34.998024 | 130 | 0.61257 |
import builtins
__all__ = ["open", "openfp", "Error", "Wave_read", "Wave_write"]
class Error(Exception):
pass
WAVE_FORMAT_PCM = 0x0001
_array_fmts = None, 'b', 'h', None, 'i'
import audioop
import struct
import sys
from chunk import Chunk
from collections import namedtuple
_wave_params = namedtuple('_wave_params',
'nchannels sampwidth framerate nframes comptype compname')
class Wave_read:
def initfp(self, file):
self._convert = None
self._soundpos = 0
self._file = Chunk(file, bigendian = 0)
if self._file.getname() != b'RIFF':
raise Error('file does not start with RIFF id')
if self._file.read(4) != b'WAVE':
raise Error('not a WAVE file')
self._fmt_chunk_read = 0
self._data_chunk = None
while 1:
self._data_seek_needed = 1
try:
chunk = Chunk(self._file, bigendian = 0)
except EOFError:
break
chunkname = chunk.getname()
if chunkname == b'fmt ':
self._read_fmt_chunk(chunk)
self._fmt_chunk_read = 1
elif chunkname == b'data':
if not self._fmt_chunk_read:
raise Error('data chunk before fmt chunk')
self._data_chunk = chunk
self._nframes = chunk.chunksize // self._framesize
self._data_seek_needed = 0
break
chunk.skip()
if not self._fmt_chunk_read or not self._data_chunk:
raise Error('fmt chunk and/or data chunk missing')
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'rb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def getfp(self):
return self._file
def rewind(self):
self._data_seek_needed = 1
self._soundpos = 0
def close(self):
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def tell(self):
return self._soundpos
def getnchannels(self):
return self._nchannels
def getnframes(self):
return self._nframes
def getsampwidth(self):
return self._sampwidth
def getframerate(self):
return self._framerate
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def getparams(self):
return _wave_params(self.getnchannels(), self.getsampwidth(),
self.getframerate(), self.getnframes(),
self.getcomptype(), self.getcompname())
def getmarkers(self):
return None
def getmark(self, id):
raise Error('no marks')
def setpos(self, pos):
if pos < 0 or pos > self._nframes:
raise Error('position not in range')
self._soundpos = pos
self._data_seek_needed = 1
def readframes(self, nframes):
if self._data_seek_needed:
self._data_chunk.seek(0, 0)
pos = self._soundpos * self._framesize
if pos:
self._data_chunk.seek(pos, 0)
self._data_seek_needed = 0
if nframes == 0:
return b''
data = self._data_chunk.read(nframes * self._framesize)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
if self._convert and data:
data = self._convert(data)
self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
return data
def _read_fmt_chunk(self, chunk):
wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack_from('<HHLLH', chunk.read(14))
if wFormatTag == WAVE_FORMAT_PCM:
sampwidth = struct.unpack_from('<H', chunk.read(2))[0]
self._sampwidth = (sampwidth + 7) // 8
else:
raise Error('unknown format: %r' % (wFormatTag,))
self._framesize = self._nchannels * self._sampwidth
self._comptype = 'NONE'
self._compname = 'not compressed'
class Wave_write:
def __init__(self, f):
self._i_opened_the_file = None
if isinstance(f, str):
f = builtins.open(f, 'wb')
self._i_opened_the_file = f
try:
self.initfp(f)
except:
if self._i_opened_the_file:
f.close()
raise
def initfp(self, file):
self._file = file
self._convert = None
self._nchannels = 0
self._sampwidth = 0
self._framerate = 0
self._nframes = 0
self._nframeswritten = 0
self._datawritten = 0
self._datalength = 0
self._headerwritten = False
def __del__(self):
self.close()
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def setnchannels(self, nchannels):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if nchannels < 1:
raise Error('bad # of channels')
self._nchannels = nchannels
def getnchannels(self):
if not self._nchannels:
raise Error('number of channels not set')
return self._nchannels
def setsampwidth(self, sampwidth):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if sampwidth < 1 or sampwidth > 4:
raise Error('bad sample width')
self._sampwidth = sampwidth
def getsampwidth(self):
if not self._sampwidth:
raise Error('sample width not set')
return self._sampwidth
def setframerate(self, framerate):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if framerate <= 0:
raise Error('bad frame rate')
self._framerate = int(round(framerate))
def getframerate(self):
if not self._framerate:
raise Error('frame rate not set')
return self._framerate
def setnframes(self, nframes):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self._nframes = nframes
def getnframes(self):
return self._nframeswritten
def setcomptype(self, comptype, compname):
if self._datawritten:
raise Error('cannot change parameters after starting to write')
if comptype not in ('NONE',):
raise Error('unsupported compression type')
self._comptype = comptype
self._compname = compname
def getcomptype(self):
return self._comptype
def getcompname(self):
return self._compname
def setparams(self, params):
nchannels, sampwidth, framerate, nframes, comptype, compname = params
if self._datawritten:
raise Error('cannot change parameters after starting to write')
self.setnchannels(nchannels)
self.setsampwidth(sampwidth)
self.setframerate(framerate)
self.setnframes(nframes)
self.setcomptype(comptype, compname)
def getparams(self):
if not self._nchannels or not self._sampwidth or not self._framerate:
raise Error('not all parameters set')
return _wave_params(self._nchannels, self._sampwidth, self._framerate,
self._nframes, self._comptype, self._compname)
def setmark(self, id, pos, name):
raise Error('setmark() not supported')
def getmark(self, id):
raise Error('no marks')
def getmarkers(self):
return None
def tell(self):
return self._nframeswritten
def writeframesraw(self, data):
if not isinstance(data, (bytes, bytearray)):
data = memoryview(data).cast('B')
self._ensure_header_written(len(data))
nframes = len(data) // (self._sampwidth * self._nchannels)
if self._convert:
data = self._convert(data)
if self._sampwidth != 1 and sys.byteorder == 'big':
data = audioop.byteswap(data, self._sampwidth)
self._file.write(data)
self._datawritten += len(data)
self._nframeswritten = self._nframeswritten + nframes
def writeframes(self, data):
self.writeframesraw(data)
if self._datalength != self._datawritten:
self._patchheader()
def close(self):
try:
if self._file:
self._ensure_header_written(0)
if self._datalength != self._datawritten:
self._patchheader()
self._file.flush()
finally:
self._file = None
file = self._i_opened_the_file
if file:
self._i_opened_the_file = None
file.close()
def _ensure_header_written(self, datasize):
if not self._headerwritten:
if not self._nchannels:
raise Error('# channels not specified')
if not self._sampwidth:
raise Error('sample width not specified')
if not self._framerate:
raise Error('sampling rate not specified')
self._write_header(datasize)
def _write_header(self, initlength):
assert not self._headerwritten
self._file.write(b'RIFF')
if not self._nframes:
self._nframes = initlength // (self._nchannels * self._sampwidth)
self._datalength = self._nframes * self._nchannels * self._sampwidth
try:
self._form_length_pos = self._file.tell()
except (AttributeError, OSError):
self._form_length_pos = None
self._file.write(struct.pack('<L4s4sLHHLLHH4s',
36 + self._datalength, b'WAVE', b'fmt ', 16,
WAVE_FORMAT_PCM, self._nchannels, self._framerate,
self._nchannels * self._framerate * self._sampwidth,
self._nchannels * self._sampwidth,
self._sampwidth * 8, b'data'))
if self._form_length_pos is not None:
self._data_length_pos = self._file.tell()
self._file.write(struct.pack('<L', self._datalength))
self._headerwritten = True
def _patchheader(self):
assert self._headerwritten
if self._datawritten == self._datalength:
return
curpos = self._file.tell()
self._file.seek(self._form_length_pos, 0)
self._file.write(struct.pack('<L', 36 + self._datawritten))
self._file.seek(self._data_length_pos, 0)
self._file.write(struct.pack('<L', self._datawritten))
self._file.seek(curpos, 0)
self._datalength = self._datawritten
def open(f, mode=None):
if mode is None:
if hasattr(f, 'mode'):
mode = f.mode
else:
mode = 'rb'
if mode in ('r', 'rb'):
return Wave_read(f)
elif mode in ('w', 'wb'):
return Wave_write(f)
else:
raise Error("mode must be 'r', 'rb', 'w', or 'wb'")
openfp = open
| true | true |
f71f7e70877767a16cf3a649cd197af3470937c5 | 2,541 | py | Python | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | interface/Movie.py | BrickText/JHROM | d99b907e0837d8dcc57ab474e9435891736f0dda | [
"MIT"
] | null | null | null | from database.queries.insert_queries import INSERT_MOVIE
from database.queries.update_queries import UPDATE_MOVIE
from database.queries.delete_queries import DELETE_MOVIE
from database.queries.select_queries import SELECT_MOVIES_ORDERED_BY_RATING,\
SELECT_PROJECTION_FOR_MOVIE, \
SELECT_MOVIE_BY_ID
from database.connection.execute_query import execute_query
from settings.SharedVariables import SharedVariables
from prettytable import PrettyTable
class Movies:
def __init__(self):
try:
self.data = execute_query(SELECT_MOVIES_ORDERED_BY_RATING, [])
except Exception:
print("Database not initilized or connected")
def __str__(self):
t = PrettyTable(SharedVariables.movie_col)
for row in self.data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def get_movie(id):
try:
data = execute_query(SELECT_MOVIE_BY_ID, [id, ])
except Exception:
print("Database not initilized or connected")
t = PrettyTable(SharedVariables.movie_col)
for row in data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def add_movie(name, rating):
try:
execute_query(INSERT_MOVIE, [name, rating, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def delete_movie(id):
try:
execute_query(DELETE_MOVIE, [id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def update_movie(id, name, rating):
try:
execute_query(UPDATE_MOVIE, [name, rating, id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def movie_projections(id):
try:
data = execute_query(SELECT_PROJECTION_FOR_MOVIE, [id, ])
t = PrettyTable(SharedVariables.projection_col)
for row in data:
t.add_row([row[0], row[1], row[2], row[3], (100 - row[4])])
return str(t)
except Exception:
print("Database not initilized or connected!")
if __name__ == '__main__':
from database.connection.database_connection import Database
SharedVariables.database = Database()
Movies.add_movie("Baywatch", 10)
print(Movies.get_movie(2))
| 32.164557 | 77 | 0.63046 | from database.queries.insert_queries import INSERT_MOVIE
from database.queries.update_queries import UPDATE_MOVIE
from database.queries.delete_queries import DELETE_MOVIE
from database.queries.select_queries import SELECT_MOVIES_ORDERED_BY_RATING,\
SELECT_PROJECTION_FOR_MOVIE, \
SELECT_MOVIE_BY_ID
from database.connection.execute_query import execute_query
from settings.SharedVariables import SharedVariables
from prettytable import PrettyTable
class Movies:
def __init__(self):
try:
self.data = execute_query(SELECT_MOVIES_ORDERED_BY_RATING, [])
except Exception:
print("Database not initilized or connected")
def __str__(self):
t = PrettyTable(SharedVariables.movie_col)
for row in self.data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def get_movie(id):
try:
data = execute_query(SELECT_MOVIE_BY_ID, [id, ])
except Exception:
print("Database not initilized or connected")
t = PrettyTable(SharedVariables.movie_col)
for row in data:
t.add_row([row[0], row[1], row[2]])
return str(t)
@staticmethod
def add_movie(name, rating):
try:
execute_query(INSERT_MOVIE, [name, rating, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def delete_movie(id):
try:
execute_query(DELETE_MOVIE, [id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def update_movie(id, name, rating):
try:
execute_query(UPDATE_MOVIE, [name, rating, id, ], commit=True)
except Exception:
print("Database not initilized or connected")
@staticmethod
def movie_projections(id):
try:
data = execute_query(SELECT_PROJECTION_FOR_MOVIE, [id, ])
t = PrettyTable(SharedVariables.projection_col)
for row in data:
t.add_row([row[0], row[1], row[2], row[3], (100 - row[4])])
return str(t)
except Exception:
print("Database not initilized or connected!")
if __name__ == '__main__':
from database.connection.database_connection import Database
SharedVariables.database = Database()
Movies.add_movie("Baywatch", 10)
print(Movies.get_movie(2))
| true | true |
f71f7f0a14770a0fbed65f68d8dd2ab2c222a92a | 5,067 | py | Python | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 10 | 2015-09-30T02:46:48.000Z | 2021-07-23T05:03:38.000Z | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 9 | 2019-07-04T11:10:31.000Z | 2021-09-23T21:11:42.000Z | cardinal_pythonlib/cmdline.py | RudolfCardinal/pythonlib | 4c583ad1aae3c1166a4e6f964df87eb6c02a73cb | [
"Apache-2.0"
] | 4 | 2017-07-17T15:17:44.000Z | 2021-07-23T05:03:41.000Z | #!/usr/bin/env python
# cardinal_pythonlib/cmdline.py
"""
===============================================================================
Original code copyright (C) 2009-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of cardinal_pythonlib.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===============================================================================
**Functions for manipulating command-line parameters.**
"""
import re
# import shlex
import subprocess
import sys
from typing import List, Union
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]:
"""
As per
https://stackoverflow.com/questions/33560364/python-windows-parsing-command-lines-with-shlex.
Multi-platform variant of ``shlex.split()`` for command-line splitting.
For use with ``subprocess``, for ``argv`` injection etc. Using fast REGEX.
Args:
s:
string to split
platform:
- ``'this'`` = auto from current platform;
- ``1`` = POSIX;
- ``0`` = Windows/CMD
- (other values reserved)
""" # noqa
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa
elif platform == 0: # Windows/CMD
re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)''' # noqa
else:
raise AssertionError(f"unknown platform {platform!r}")
args = []
accu = None # collects pieces of one arg
for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s):
if word:
pass # most frequent
elif esc:
word = esc[1]
elif white or pipe:
if accu is not None:
args.append(accu)
if pipe:
args.append(pipe)
accu = None
continue
elif fail:
raise ValueError("invalid or incomplete shell string")
elif qs:
word = qs.replace(r'\"', '"').replace(r'\\', '\\')
# ... raw strings can't end in single backslashes;
# https://stackoverflow.com/questions/647769/why-cant-pythons-raw-string-literals-end-with-a-single-backslash # noqa
if platform == 0:
word = word.replace('""', '"')
else:
word = qss # may be even empty; must be last
accu = (accu or '') + word
if accu is not None:
args.append(accu)
return args
def cmdline_quote_posix(seq: List[str]) -> str:
"""
Quotes arguments for POSIX, producing a single string suitable for
copying/pasting.
Based on subprocess.list2cmdline().
"""
result = [] # type: List[str]
for arg in seq:
bs_buf = [] # type: List[str]
# Add a space to separate this argument from the others
if result:
result.append(' ')
# Modified here: quote arguments with "*"
needquote = (" " in arg) or ("\t" in arg) or ("*" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf) * 2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
def cmdline_quote(args: List[str], platform: Union[int, str] = 'this') -> str:
"""
Convert a list of command-line arguments to a suitably quoted command-line
string that should be copy/pastable into a comand prompt.
"""
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
return cmdline_quote_posix(args)
elif platform == 0: # Windows/CMD
return subprocess.list2cmdline(args)
else:
raise AssertionError(f"unknown platform {platform!r}")
| 32.273885 | 129 | 0.532465 |
import re
import subprocess
import sys
from typing import List, Union
def cmdline_split(s: str, platform: Union[int, str] = 'this') -> List[str]:
if platform == 'this':
platform = (sys.platform != 'win32')
if platform == 1:
re_cmd_lex = r'''"((?:\\["\\]|[^"])*)"|'([^']*)'|(\\.)|(&&?|\|\|?|\d?\>|[<])|([^\s'"\\&|<>]+)|(\s+)|(.)''' # noqa
elif platform == 0: # Windows/CMD
re_cmd_lex = r'''"((?:""|\\["\\]|[^"])*)"?()|(\\\\(?=\\*")|\\")|(&&?|\|\|?|\d?>|[<])|([^\s"&|<>]+)|(\s+)|(.)'''
else:
raise AssertionError(f"unknown platform {platform!r}")
args = []
accu = None
for qs, qss, esc, pipe, word, white, fail in re.findall(re_cmd_lex, s):
if word:
pass
elif esc:
word = esc[1]
elif white or pipe:
if accu is not None:
args.append(accu)
if pipe:
args.append(pipe)
accu = None
continue
elif fail:
raise ValueError("invalid or incomplete shell string")
elif qs:
word = qs.replace(r'\"', '"').replace(r'\\', '\\')
# https://stackoverflow.com/questions/647769/why-cant-pythons-raw-string-literals-end-with-a-single-backslash # noqa
if platform == 0:
word = word.replace('""', '"')
else:
word = qss # may be even empty; must be last
accu = (accu or '') + word
if accu is not None:
args.append(accu)
return args
def cmdline_quote_posix(seq: List[str]) -> str:
result = [] # type: List[str]
for arg in seq:
bs_buf = [] # type: List[str]
# Add a space to separate this argument from the others
if result:
result.append(' ')
# Modified here: quote arguments with "*"
needquote = (" " in arg) or ("\t" in arg) or ("*" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf) * 2)
bs_buf = []
result.append('\\"')
else:
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
def cmdline_quote(args: List[str], platform: Union[int, str] = 'this') -> str:
if platform == 'this':
platform = (sys.platform != 'win32') # RNC: includes 64-bit Windows
if platform == 1: # POSIX
return cmdline_quote_posix(args)
elif platform == 0: # Windows/CMD
return subprocess.list2cmdline(args)
else:
raise AssertionError(f"unknown platform {platform!r}")
| true | true |
f71f809f8758a5472aea90c604d0f3c9e8cb4804 | 25,645 | py | Python | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | autolabeling.py | MGH-LMIC/CXR-autolabeling | 74eac30bb6eaa6c1d5a8b343743024ef6bd9db7d | [
"Apache-2.0"
] | null | null | null | import re
import pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cm as mpl_color_map
from tqdm import tqdm
from pathlib import Path
from prettytable import PrettyTable
from scipy.ndimage import gaussian_filter
from sklearn.metrics import roc_curve, precision_recall_curve
import torch
import torchnet as tnt
import torch.nn.functional as F
from utils import logger
from environment import TestEnvironment, initialize, print_label_name
from gradcam import GradCam, save_class_activation_images
from data import CxrDataset, EXT_DATA_BASE
from atlasmethod import EX_AI
import time
ATLAS_GEN = False
atlas_name = 'cardiomegaly'
# 'cardiomegaly', 'atelectasis', 'pulmonary_edema', 'pneumonia', 'pleural_effusion'
class Tester:
def __init__(self, env, pt_runtime="test", fn_net=None, fl_gradcam=False, cls_gradcam=None, id_prob=None, fl_ensemble=False, fl_exai=False, f_name='sim', f_csv=None):
self.env = env
self.pt_runtime = pt_runtime
self.fl_prob = False if id_prob == None else True
self.id_prob = id_prob
self.f_name = f_name
self.fl_ensemble = fl_ensemble
# for multiple class and binary label tasks
self.pf_metric = {
'loss': [],
'accuracy': [],
'sensitivity': [],
'specificity': [],
'auc_score': [],
'ap_score': [],
'mse_score': []
}
self.fn_net = fn_net
self.fl_gradcam = fl_gradcam
self.cls_gradcam = cls_gradcam
self.th_gradcam = 0.5
self.fl_gradcam_save = True
#explainable methods
self.fl_exai = fl_exai
if self.fl_exai:
self.fl_gradcam = True
self.cls_gradcam = [
'Hilar/mediastinum>Cardiomegaly>.',
'Lung density>Increased lung density>Atelectasis',
'Lung density>Increased lung density>Pulmonary edema',
'Lung density>Increased lung density>pneumonia',
'Pleura>Pleural effusion>.'
]
self.th_gradcam = 0.5
self.ex_method = EX_AI(env, pt_runtime=pt_runtime, thr=0.5, f_name=f_name, ext_data_csv=f_csv)
def load(self):
pt_file = self.pt_runtime.joinpath(f'train.pkl')
with open(pt_file, 'rb') as f:
self.pf_metric = pickle.load(f)
def test_evaluation(self, epoch=1, fl_save=False):
if self.fn_net == None:
pt_model = self.pt_runtime.joinpath(f'model_epoch_{epoch:04d}.pth.tar')
else:
pt_model = self.pt_runtime.joinpath(str(self.fn_net))
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, _, _ = self.test(epoch, self.env.test_loader, fl_save=fl_save)
if False:
self.algorithm_attribution(self.env.gradcam_loader)
if self.fl_gradcam:
_, _, _ = self.gradcam_data(self.env.gradcam_loader)
def test_ensemble_evaluation(self, epoch=1, fl_save=False, n_ens=1):
predict = []
target = []
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
if ATLAS_GEN:
gradcam_df = pd.DataFrame(columns=[f'{x:03d}' for x in range(256)])
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
#logger.info(f'network to test: {self.env.model}')
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, pred, tar = self.test(epoch, self.env.test_loader, fl_save=False)
predict.append(pred)
target.append(tar)
# evaluate ensemble's performance
prob_ens = self.ensemble_performance(predict, target, n_ens, fl_save=fl_save)
if self.fl_exai:
prob_in = pd.DataFrame(prob_ens.cpu().numpy()[:,1:])
prob_in['PATH'] = self.env.test_loader.dataset.entries['PATH']
self.ex_method.input_preparation(prob_in)
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
start = time.time()
_, _, cam = self.gradcam_data(self.env.gradcam_loader, prob_ens=prob_ens)
#review_cam
#cams *= cam
cams += cam
end = time.time()
print(f'{k:02d} model gradcam time: {end-start} sec')
_, _, cams = self.gradcam_data(self.env.gradcam_loader, ens_flg=True, cams_ens=cams, prob_ens=prob_ens)
if self.fl_exai:
start = time.time()
self.ex_method.run(cams)
end = time.time()
print(f'self-annotation time: {end-start} sec')
if ATLAS_GEN:
for k in range(len(self.env.gradcam_loader)):
gradcam_df.loc[k] = cams[k].flatten()
print(f"[{atlas_name}]Atlas generation: {k:5d}")
gradcam_df['PATH'] = self.env.gradcam_loader.dataset.entries['PATH']
gradcam_df.to_csv(self.pt_runtime.joinpath(f'gradcam_atlas_{atlas_name}.csv'), index=False)
def ensemble_performance(self, predict, target, n_ens, fl_save=False):
pred_ens = torch.zeros(predict[0].shape).to(self.env.device)
#pred_ens = np.zeros(predict[0].shape)
for i in range(n_ens):
pred_ens += torch.from_numpy(predict[i]).to(self.env.device)
pred_ens /= n_ens
targ_ens = torch.from_numpy(target[0]).to(self.env.device)
aucs, aps = self.AUC_AP_metric(pred_ens, targ_ens)
correct, total = self.ACC_metric(pred_ens, targ_ens)
self.Per_print(correct=correct, total=total, aucs=aucs, aps=aps)
if fl_save:
test_set = self.env.test_loader.dataset
labels = self.env.labels
self.roc_evaluation(test_set, pred_ens, targ_ens, labels)
return pred_ens
def AUC_AP_metric(self, output, target):
out_dim = output.shape[1]
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
return aucs, aps
def MSE__metric(self, output, target):
out_dim = 1
mses = [tnt.meter.MSEMeter() for i in range(out_dim)]
mses[0].add(output[:, -1], target[:, -1])
return mses
def ACC_metric(self, output, target):
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0.5, ones, zeros)
correct = pred.eq(mask_tar.int()).sum().item()
total = len(mask_tar)
return correct, total
def Per_print(self, correct=None, total=None, aucs=None, aps=None, mses=None):
labels = self.env.labels
out_dim = len(aucs)
percent = 100. * correct / total
logger.info(f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
#p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
p.add_row([f'E-{labels[i]}', f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"Ensemble-performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average acu scores")
ave_auc = 0
ave_ap = 0
def test(self, epoch, test_loader, fl_save=False):
test_set = test_loader.dataset
out_dim = self.env.out_dim
labels = self.env.labels
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
CxrDataset.eval()
self.env.model.eval()
with torch.no_grad():
correct = 0
total = 0
predict_seq = torch.FloatTensor().to(self.env.device)
target_seq = torch.FloatTensor().to(self.env.device)
tqdm_desc = f'testing '
t = tqdm(enumerate(test_loader), total=len(test_loader), desc=tqdm_desc,
dynamic_ncols=True)
for bt_idx, tp_data in t:
output, target = self.test_batch(tp_data)
# Network outputs
predict_seq = torch.cat((predict_seq, F.sigmoid(output)), dim=0)
target_seq = torch.cat((target_seq, target), dim=0)
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0., ones, zeros)
correct += pred.eq(mask_tar.int()).sum().item()
total += len(mask_tar)
#pred = torch.where(output > 0., ones, zeros)
#correct += pred.eq(target.int()).sum().item()
#total = len(test_loader.sampler) * out_dim
percent = 100. * correct / total
logger.info(f"val epoch {epoch:03d}: "
f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
if fl_save:
self.roc_evaluation(test_set, predict_seq, target_seq, labels)
if self.fl_prob:
self.df_prob = pd.DataFrame()
self.df_prob['PATH_CHECK'] = test_set.entries['PATH']
self.df_prob['PROB'] = predict_seq.cpu().numpy()[:, self.id_prob]
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average auc scores")
ave_auc = 0
ave_ap = 0
self.pf_metric[f'accuracy'].append((epoch, correct / total))
self.pf_metric[f'auc_score'].append((epoch, ave_auc))
self.pf_metric[f'ap_score'].append((epoch, ave_ap))
return ave_auc, predict_seq.cpu().numpy(), target_seq.cpu().numpy()
def mask_pred(self, output, target):
mask_one = torch.ones(output.shape, dtype=torch.uint8, device=self.env.device)
mask_zero = torch.zeros(output.shape, dtype=torch.uint8, device=self.env.device)
#mask = torch.where(target == -1, mask_zero, mask_one)
mask = torch.where(target == -1, mask_zero, mask_one).bool()
mask_output = output.masked_select(mask.to(self.env.device))
mask_target = target.masked_select(mask.to(self.env.device))
return mask_output, mask_target
def test_batch(self, tp_data, fl_input=False):
# to support different types of models.
if self.env.type == 0:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#data, target = data.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data)
elif self.env.type == 1:
data1 = tp_data[0]
data2 = tp_data[1]
target = tp_data[2]
data1, data2, target = data1.to(self.env.device), data2.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data1, data2)
elif self.env.type == 3:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#network output
output = self.env.model(data, info)
if fl_input == False:
return output, target
else:
return data, info, output
def gradcam_data(self, test_loader, hmp_dims=(512,512), ens_flg=False, cams_ens=None, prob_ens=None):
# threshold to draw a heatmap
out_dim = self.env.out_dim
CxrDataset.eval()
self.env.model.eval()
#with torch.no_grad():
gradcam_res_list = []
gradcam_path_list = []
cams = np.zeros((len(test_loader), len(self.cls_gradcam), 16, 16))
grad_cam = GradCam(self.env.model, self.env.type)
for batch_idx, (data, target, info) in enumerate(test_loader):
#data, target = data.to(self.env.device), target.to(self.env.device)
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
# Grad CAM
#grad_cam = GradCam(self.env.model, self.env.type)
if self.cls_gradcam == None:
gradcam_res, gradcam_path = self.gradcam_save_maxcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
else:
if self.fl_ensemble:
cam = self.gradcam_save_argcls_ens(grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=ens_flg, cams_ens=cams_ens, prob_ens=prob_ens)
else:
gradcam_res, gradcam_path = self.gradcam_save_argcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
try:
if self.fl_ensemble:
cams[batch_idx, :, :, :] = cam
else:
gradcam_res_list.append(gradcam_res.tolist())
gradcam_path_list.append(gradcam_path)
except AttributeError as e:
print("No GradCam result?")
if False:
self.gradcam_thumbnail()
return gradcam_res_list, gradcam_path_list, cams
def gradcam_save_maxcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info)
else:
cam, prob, tcls = grad_cam.generate_cam(data)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
if prob >= self.th_gradcam:
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
path_name = "_".join([path_name, label_name])
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image"), path_name, hmp_dims)
return cam_rs, path_name
else:
cam_rs = save_class_activation_images(data, noPlotflg, self.pt_runtime.joinpath("gradcam_image"), path_name, hmp_dims)
return None, None
def gradcam_save_argcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info, target_class=id_tcls)
else:
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
cam, prob, tcls, _ = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
label_name = label_name.strip('>.').split('>')[-1]
#path_name = "_".join([f'{int(prob*1000):04d}', path_name, label_name])
if prob >= self.th_gradcam:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image_{label_name}"), path_name, hmp_dims)
cam_list=[]
path_list=[]
path_list.append(path_name)
return cam_list, path_list
def gradcam_save_argcls_ens(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=False, cams_ens=None, prob_ens=None):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
cams = np.zeros((len(self.cls_gradcam), 16, 16))
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ens_flg == True:
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, ens_flg=True, ens_cam=cams_ens[batch_idx, i, :, :])
cams[i, :, :] = cam_low
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
label_name = print_label_name[tcls]
if ATLAS_GEN:
label_name = f"ATLAS_{atlas_name}"
#if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ATLAS_GEN:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"{label_name}"), path_name, hmp_dims)
else:
if '/' in path_name:
self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}/{path_name}").parent.mkdir(parents=True, exist_ok=True)
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}"), path_name, hmp_dims)
else:
#review_cam
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, th_cam=0.5)
cams[i, :, :] = cam_low
return cams
def roc_evaluation(self, test_set, predict_seq, target_seq, labels):
out_dim = self.env.out_dim
df_data = pd.DataFrame()
df_data['PATH'] = test_set.entries['PATH']
for i in range(out_dim):
df_data[f'{labels[i]}'] = predict_seq.cpu().numpy()[:, i]
df_data[f'{labels[i]}_GT'] = target_seq.cpu().numpy()[:, i]
t = self.pt_runtime.joinpath('roc_result')
Path.mkdir(t, parents=True, exist_ok=True)
df_data.to_excel(t.joinpath('save_predicted_probabilities.xlsx'))
roc_dim = out_dim
for i in range(roc_dim):
mask_out, mask_tar = self.mask_pred(predict_seq[:, i], target_seq[:, i])
if mask_tar.cpu().numpy().size != 0 :
fpr, tpr, thresholds = roc_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
pre, rec, thresholds_pr = precision_recall_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
#logger.debug(f"{predict_seq.cpu().numpy()}")
df = pd.DataFrame()
df[f'specificity'] = 1-fpr
df[f'sensitivity'] = tpr
df[f'thresholds'] = thresholds
label_name = print_label_name[i]
df.to_excel(t.joinpath(f'save_{i:03d}_{label_name}_sensitivity_specificity.xlsx'))
del df
if False:
# ROC plot
fig, (ax1, ax2) = plt.subplots(1,2)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = 'ROC curve')
ax1.set_title(f'ROC curve for {labels[i]}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
# PR plot
ax2.plot(rec, pre, color = 'darkorange', lw = 2, label = 'Precision-Recall curve')
ax2.set_title(f'Precision-Recall curve')
ax2.set(xlabel='Recall', ylabel='Precision')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
else:
# ROC plot
fig, ax1 = plt.subplots(1,1)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = f'{label_name}')
ax1.set_title(f'ROC curve for {label_name}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
def save_prob(self, input_file, save_path):
df = pd.read_csv(input_file)
df.insert(6, 'prob', self.df_prob.PROB)
df.insert(6, 'path_check', self.df_prob.PATH_CHECK)
df.to_excel(save_path)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Testng Our Explainable AI Model on CXR")
parser.add_argument('--cuda', default=None, type=str, help="use GPUs with its device ids, separated by commas")
args = parser.parse_args()
args.in_dim = 1
args.out_dim = 21
args.labels = None
args.paths = None
args.runtime_dir = 'autolabeling'
args.type = 0
args.pretr_net = 'pa_feat_model'
args.gradcam = False
args.gradcam_cls = None
args.fl_save = False
args.id_prob = None
args.test_csv = 'autolabeling_5_features_490_cases.csv'
args.arch = None
args.Nens = 6
args.exai = True
args.simname = 'Outputs'
args.seed = -1
runtime_path, device = initialize(args, fl_demo=True)
fl_ensemble = False if args.Nens == 1 else True
# start training
env = TestEnvironment(device, mtype=args.type, in_dim=args.in_dim, out_dim=args.out_dim, name_labels=args.labels, name_paths=args.paths, testset_csv=args.test_csv, name_model=args.arch, r_seed=args.seed)
t = Tester(env, pt_runtime=runtime_path, fn_net=args.pretr_net, fl_gradcam=args.gradcam, cls_gradcam=args.gradcam_cls, id_prob=args.id_prob, fl_ensemble=fl_ensemble, fl_exai=args.exai, f_name=args.simname, f_csv=args.test_csv)
if(fl_ensemble):
t.test_ensemble_evaluation(fl_save=args.fl_save, n_ens = args.Nens)
else:
t.test_evaluation(fl_save=args.fl_save)
| 40.259027 | 230 | 0.569429 | import re
import pickle
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.cm as mpl_color_map
from tqdm import tqdm
from pathlib import Path
from prettytable import PrettyTable
from scipy.ndimage import gaussian_filter
from sklearn.metrics import roc_curve, precision_recall_curve
import torch
import torchnet as tnt
import torch.nn.functional as F
from utils import logger
from environment import TestEnvironment, initialize, print_label_name
from gradcam import GradCam, save_class_activation_images
from data import CxrDataset, EXT_DATA_BASE
from atlasmethod import EX_AI
import time
ATLAS_GEN = False
atlas_name = 'cardiomegaly'
class Tester:
def __init__(self, env, pt_runtime="test", fn_net=None, fl_gradcam=False, cls_gradcam=None, id_prob=None, fl_ensemble=False, fl_exai=False, f_name='sim', f_csv=None):
self.env = env
self.pt_runtime = pt_runtime
self.fl_prob = False if id_prob == None else True
self.id_prob = id_prob
self.f_name = f_name
self.fl_ensemble = fl_ensemble
self.pf_metric = {
'loss': [],
'accuracy': [],
'sensitivity': [],
'specificity': [],
'auc_score': [],
'ap_score': [],
'mse_score': []
}
self.fn_net = fn_net
self.fl_gradcam = fl_gradcam
self.cls_gradcam = cls_gradcam
self.th_gradcam = 0.5
self.fl_gradcam_save = True
self.fl_exai = fl_exai
if self.fl_exai:
self.fl_gradcam = True
self.cls_gradcam = [
'Hilar/mediastinum>Cardiomegaly>.',
'Lung density>Increased lung density>Atelectasis',
'Lung density>Increased lung density>Pulmonary edema',
'Lung density>Increased lung density>pneumonia',
'Pleura>Pleural effusion>.'
]
self.th_gradcam = 0.5
self.ex_method = EX_AI(env, pt_runtime=pt_runtime, thr=0.5, f_name=f_name, ext_data_csv=f_csv)
def load(self):
pt_file = self.pt_runtime.joinpath(f'train.pkl')
with open(pt_file, 'rb') as f:
self.pf_metric = pickle.load(f)
def test_evaluation(self, epoch=1, fl_save=False):
if self.fn_net == None:
pt_model = self.pt_runtime.joinpath(f'model_epoch_{epoch:04d}.pth.tar')
else:
pt_model = self.pt_runtime.joinpath(str(self.fn_net))
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, _, _ = self.test(epoch, self.env.test_loader, fl_save=fl_save)
if False:
self.algorithm_attribution(self.env.gradcam_loader)
if self.fl_gradcam:
_, _, _ = self.gradcam_data(self.env.gradcam_loader)
def test_ensemble_evaluation(self, epoch=1, fl_save=False, n_ens=1):
predict = []
target = []
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
if ATLAS_GEN:
gradcam_df = pd.DataFrame(columns=[f'{x:03d}' for x in range(256)])
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
try:
self.load()
except:
logger.debug('there is no pkl to load.')
_, pred, tar = self.test(epoch, self.env.test_loader, fl_save=False)
predict.append(pred)
target.append(tar)
prob_ens = self.ensemble_performance(predict, target, n_ens, fl_save=fl_save)
if self.fl_exai:
prob_in = pd.DataFrame(prob_ens.cpu().numpy()[:,1:])
prob_in['PATH'] = self.env.test_loader.dataset.entries['PATH']
self.ex_method.input_preparation(prob_in)
if self.fl_gradcam:
cams = np.ones((len(self.env.gradcam_loader), len(self.cls_gradcam), 16, 16))
for k in range(n_ens):
pt_model = self.pt_runtime.joinpath(str(self.fn_net)+f'_{k:02d}.pth.tar')
self.env.load_model(pt_model)
start = time.time()
_, _, cam = self.gradcam_data(self.env.gradcam_loader, prob_ens=prob_ens)
#review_cam
#cams *= cam
cams += cam
end = time.time()
print(f'{k:02d} model gradcam time: {end-start} sec')
_, _, cams = self.gradcam_data(self.env.gradcam_loader, ens_flg=True, cams_ens=cams, prob_ens=prob_ens)
if self.fl_exai:
start = time.time()
self.ex_method.run(cams)
end = time.time()
print(f'self-annotation time: {end-start} sec')
if ATLAS_GEN:
for k in range(len(self.env.gradcam_loader)):
gradcam_df.loc[k] = cams[k].flatten()
print(f"[{atlas_name}]Atlas generation: {k:5d}")
gradcam_df['PATH'] = self.env.gradcam_loader.dataset.entries['PATH']
gradcam_df.to_csv(self.pt_runtime.joinpath(f'gradcam_atlas_{atlas_name}.csv'), index=False)
def ensemble_performance(self, predict, target, n_ens, fl_save=False):
pred_ens = torch.zeros(predict[0].shape).to(self.env.device)
#pred_ens = np.zeros(predict[0].shape)
for i in range(n_ens):
pred_ens += torch.from_numpy(predict[i]).to(self.env.device)
pred_ens /= n_ens
targ_ens = torch.from_numpy(target[0]).to(self.env.device)
aucs, aps = self.AUC_AP_metric(pred_ens, targ_ens)
correct, total = self.ACC_metric(pred_ens, targ_ens)
self.Per_print(correct=correct, total=total, aucs=aucs, aps=aps)
if fl_save:
test_set = self.env.test_loader.dataset
labels = self.env.labels
self.roc_evaluation(test_set, pred_ens, targ_ens, labels)
return pred_ens
def AUC_AP_metric(self, output, target):
out_dim = output.shape[1]
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
return aucs, aps
def MSE__metric(self, output, target):
out_dim = 1
mses = [tnt.meter.MSEMeter() for i in range(out_dim)]
mses[0].add(output[:, -1], target[:, -1])
return mses
def ACC_metric(self, output, target):
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0.5, ones, zeros)
correct = pred.eq(mask_tar.int()).sum().item()
total = len(mask_tar)
return correct, total
def Per_print(self, correct=None, total=None, aucs=None, aps=None, mses=None):
labels = self.env.labels
out_dim = len(aucs)
percent = 100. * correct / total
logger.info(f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
#p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
p.add_row([f'E-{labels[i]}', f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"Ensemble-performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average acu scores")
ave_auc = 0
ave_ap = 0
def test(self, epoch, test_loader, fl_save=False):
test_set = test_loader.dataset
out_dim = self.env.out_dim
labels = self.env.labels
aucs = [tnt.meter.AUCMeter() for i in range(out_dim)]
aps = [tnt.meter.APMeter() for i in range(out_dim)]
CxrDataset.eval()
self.env.model.eval()
with torch.no_grad():
correct = 0
total = 0
predict_seq = torch.FloatTensor().to(self.env.device)
target_seq = torch.FloatTensor().to(self.env.device)
tqdm_desc = f'testing '
t = tqdm(enumerate(test_loader), total=len(test_loader), desc=tqdm_desc,
dynamic_ncols=True)
for bt_idx, tp_data in t:
output, target = self.test_batch(tp_data)
# Network outputs
predict_seq = torch.cat((predict_seq, F.sigmoid(output)), dim=0)
target_seq = torch.cat((target_seq, target), dim=0)
for i in range(out_dim):
mask_out, mask_tar = self.mask_pred(output[:, i], target[:, i])
try:
aucs[i].add(mask_out, mask_tar)
aps[i].add(mask_out, mask_tar)
except:
continue
mask_out, mask_tar = self.mask_pred(output, target)
ones = torch.ones(mask_out.shape).int().to(self.env.device)
zeros = torch.zeros(mask_out.shape).int().to(self.env.device)
pred = torch.where(mask_out > 0., ones, zeros)
correct += pred.eq(mask_tar.int()).sum().item()
total += len(mask_tar)
#pred = torch.where(output > 0., ones, zeros)
#correct += pred.eq(target.int()).sum().item()
#total = len(test_loader.sampler) * out_dim
percent = 100. * correct / total
logger.info(f"val epoch {epoch:03d}: "
f"accuracy {correct}/{total} "
f"({percent:.2f}%)")
p = PrettyTable()
p.field_names = ["findings", "auroc score", "ap score"]
auc_cnt = out_dim
for i in range(out_dim):
try:
p.add_row([labels[i], f"{aucs[i].value()[0]:.4f}", f"{aps[i].value()[0]:.4f}"])
except:
p.add_row([labels[i], "-", "-"])
if fl_save:
self.roc_evaluation(test_set, predict_seq, target_seq, labels)
if self.fl_prob:
self.df_prob = pd.DataFrame()
self.df_prob['PATH_CHECK'] = test_set.entries['PATH']
self.df_prob['PROB'] = predict_seq.cpu().numpy()[:, self.id_prob]
try:
list_aucs=[]
for k in aucs:
if type(k.value()) == tuple:
if np.isnan(k.value()[0]) == False:
list_aucs.append(k.value()[0])
list_aps=[]
for k in aps:
if type(k.value()) == torch.Tensor:
if np.isnan(k.value()[0]) == False:
list_aps.append(k.value()[0])
ave_auc = np.mean(list_aucs)
ave_ap = np.mean(list_aps)
tbl_str = p.get_string(title=f"performance (avg auc {ave_auc:.4f}, mean ap {ave_ap:.4f})")
logger.info(f"\n{tbl_str}")
except:
print("We cannot calcuate average auc scores")
ave_auc = 0
ave_ap = 0
self.pf_metric[f'accuracy'].append((epoch, correct / total))
self.pf_metric[f'auc_score'].append((epoch, ave_auc))
self.pf_metric[f'ap_score'].append((epoch, ave_ap))
return ave_auc, predict_seq.cpu().numpy(), target_seq.cpu().numpy()
def mask_pred(self, output, target):
mask_one = torch.ones(output.shape, dtype=torch.uint8, device=self.env.device)
mask_zero = torch.zeros(output.shape, dtype=torch.uint8, device=self.env.device)
#mask = torch.where(target == -1, mask_zero, mask_one)
mask = torch.where(target == -1, mask_zero, mask_one).bool()
mask_output = output.masked_select(mask.to(self.env.device))
mask_target = target.masked_select(mask.to(self.env.device))
return mask_output, mask_target
def test_batch(self, tp_data, fl_input=False):
# to support different types of models.
if self.env.type == 0:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#data, target = data.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data)
elif self.env.type == 1:
data1 = tp_data[0]
data2 = tp_data[1]
target = tp_data[2]
data1, data2, target = data1.to(self.env.device), data2.to(self.env.device), target.to(self.env.device)
#network output
output = self.env.model(data1, data2)
elif self.env.type == 3:
data = tp_data[0]
target = tp_data[1]
info = tp_data[2]
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
#network output
output = self.env.model(data, info)
if fl_input == False:
return output, target
else:
return data, info, output
def gradcam_data(self, test_loader, hmp_dims=(512,512), ens_flg=False, cams_ens=None, prob_ens=None):
# threshold to draw a heatmap
out_dim = self.env.out_dim
CxrDataset.eval()
self.env.model.eval()
#with torch.no_grad():
gradcam_res_list = []
gradcam_path_list = []
cams = np.zeros((len(test_loader), len(self.cls_gradcam), 16, 16))
grad_cam = GradCam(self.env.model, self.env.type)
for batch_idx, (data, target, info) in enumerate(test_loader):
#data, target = data.to(self.env.device), target.to(self.env.device)
data, target, info = data.to(self.env.device), target.to(self.env.device), info.to(self.env.device)
# Grad CAM
#grad_cam = GradCam(self.env.model, self.env.type)
if self.cls_gradcam == None:
gradcam_res, gradcam_path = self.gradcam_save_maxcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
else:
if self.fl_ensemble:
cam = self.gradcam_save_argcls_ens(grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=ens_flg, cams_ens=cams_ens, prob_ens=prob_ens)
else:
gradcam_res, gradcam_path = self.gradcam_save_argcls(grad_cam, data, test_loader, batch_idx, hmp_dims, info)
try:
if self.fl_ensemble:
cams[batch_idx, :, :, :] = cam
else:
gradcam_res_list.append(gradcam_res.tolist())
gradcam_path_list.append(gradcam_path)
except AttributeError as e:
print("No GradCam result?")
if False:
self.gradcam_thumbnail()
return gradcam_res_list, gradcam_path_list, cams
def gradcam_save_maxcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info)
else:
cam, prob, tcls = grad_cam.generate_cam(data)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
if prob >= self.th_gradcam:
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
path_name = "_".join([path_name, label_name])
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image"), path_name, hmp_dims)
return cam_rs, path_name
else:
cam_rs = save_class_activation_images(data, noPlotflg, self.pt_runtime.joinpath("gradcam_image"), path_name, hmp_dims)
return None, None
def gradcam_save_argcls(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
if self.env.type == 3:
cam, prob, tcls = grad_cam.generate_cam(data, info, target_class=id_tcls)
else:
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
cam, prob, tcls, _ = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w)
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
target_class = self.env.labels[tcls]
label_list = re.split(' \- |\/| ', target_class)
label_name = "_".join(label_list)
label_name = label_name.strip('>.').split('>')[-1]
#path_name = "_".join([f'{int(prob*1000):04d}', path_name, label_name])
if prob >= self.th_gradcam:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"gradcam_image_{label_name}"), path_name, hmp_dims)
cam_list=[]
path_list=[]
path_list.append(path_name)
return cam_list, path_list
def gradcam_save_argcls_ens(self, grad_cam, data, test_loader, batch_idx, hmp_dims, info, ens_flg=False, cams_ens=None, prob_ens=None):
if self.cls_gradcam[0] == 'all':
self.cls_gradcam = self.env.labels
cams = np.zeros((len(self.cls_gradcam), 16, 16))
for i, nm_tcls in enumerate(self.cls_gradcam):
## need to implement to find index among self.env.labels from string of target class
## code start here!!!!
id_tcls = self.env.labels.index(nm_tcls)
cam_w = self.env.model.module.main.classifier.weight[id_tcls].cpu().detach().numpy()
if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ens_flg == True:
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, ens_flg=True, ens_cam=cams_ens[batch_idx, i, :, :])
cams[i, :, :] = cam_low
noPlotflg = np.array([-1])
# when we draw gradcam, we have to batch size as 1.
file_name = test_loader.dataset.entries['PATH'][batch_idx]
path_name = file_name.split(".")[0]
label_name = print_label_name[tcls]
if ATLAS_GEN:
label_name = f"ATLAS_{atlas_name}"
#if prob_ens[batch_idx, id_tcls].item() >= self.th_gradcam:
if ATLAS_GEN:
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"{label_name}"), path_name, hmp_dims)
else:
if '/' in path_name:
self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}/{path_name}").parent.mkdir(parents=True, exist_ok=True)
cam_rs = save_class_activation_images(data, cam, self.pt_runtime.joinpath(f"explain_sample/{self.f_name}/{label_name}"), path_name, hmp_dims)
else:
#review_cam
cam, prob, tcls, cam_low = grad_cam.generate_cam(data, target_class=id_tcls, cam_w=cam_w, th_cam=0.5)
cams[i, :, :] = cam_low
return cams
def roc_evaluation(self, test_set, predict_seq, target_seq, labels):
out_dim = self.env.out_dim
df_data = pd.DataFrame()
df_data['PATH'] = test_set.entries['PATH']
for i in range(out_dim):
df_data[f'{labels[i]}'] = predict_seq.cpu().numpy()[:, i]
df_data[f'{labels[i]}_GT'] = target_seq.cpu().numpy()[:, i]
t = self.pt_runtime.joinpath('roc_result')
Path.mkdir(t, parents=True, exist_ok=True)
df_data.to_excel(t.joinpath('save_predicted_probabilities.xlsx'))
roc_dim = out_dim
for i in range(roc_dim):
mask_out, mask_tar = self.mask_pred(predict_seq[:, i], target_seq[:, i])
if mask_tar.cpu().numpy().size != 0 :
fpr, tpr, thresholds = roc_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
pre, rec, thresholds_pr = precision_recall_curve(mask_tar.cpu().numpy(), mask_out.cpu().numpy())
#logger.debug(f"{predict_seq.cpu().numpy()}")
df = pd.DataFrame()
df[f'specificity'] = 1-fpr
df[f'sensitivity'] = tpr
df[f'thresholds'] = thresholds
label_name = print_label_name[i]
df.to_excel(t.joinpath(f'save_{i:03d}_{label_name}_sensitivity_specificity.xlsx'))
del df
if False:
# ROC plot
fig, (ax1, ax2) = plt.subplots(1,2)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = 'ROC curve')
ax1.set_title(f'ROC curve for {labels[i]}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
# PR plot
ax2.plot(rec, pre, color = 'darkorange', lw = 2, label = 'Precision-Recall curve')
ax2.set_title(f'Precision-Recall curve')
ax2.set(xlabel='Recall', ylabel='Precision')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
else:
# ROC plot
fig, ax1 = plt.subplots(1,1)
ax1.plot(fpr, tpr, color = 'darkorange', lw = 2, label = f'{label_name}')
ax1.set_title(f'ROC curve for {label_name}')
ax1.set(xlabel='False positive rate', ylabel='True positive rate')
plt.savefig(t.joinpath(f'{i:03d}_{label_name}_curve.png'))
def save_prob(self, input_file, save_path):
df = pd.read_csv(input_file)
df.insert(6, 'prob', self.df_prob.PROB)
df.insert(6, 'path_check', self.df_prob.PATH_CHECK)
df.to_excel(save_path)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Testng Our Explainable AI Model on CXR")
parser.add_argument('--cuda', default=None, type=str, help="use GPUs with its device ids, separated by commas")
args = parser.parse_args()
args.in_dim = 1
args.out_dim = 21
args.labels = None
args.paths = None
args.runtime_dir = 'autolabeling'
args.type = 0
args.pretr_net = 'pa_feat_model'
args.gradcam = False
args.gradcam_cls = None
args.fl_save = False
args.id_prob = None
args.test_csv = 'autolabeling_5_features_490_cases.csv'
args.arch = None
args.Nens = 6
args.exai = True
args.simname = 'Outputs'
args.seed = -1
runtime_path, device = initialize(args, fl_demo=True)
fl_ensemble = False if args.Nens == 1 else True
# start training
env = TestEnvironment(device, mtype=args.type, in_dim=args.in_dim, out_dim=args.out_dim, name_labels=args.labels, name_paths=args.paths, testset_csv=args.test_csv, name_model=args.arch, r_seed=args.seed)
t = Tester(env, pt_runtime=runtime_path, fn_net=args.pretr_net, fl_gradcam=args.gradcam, cls_gradcam=args.gradcam_cls, id_prob=args.id_prob, fl_ensemble=fl_ensemble, fl_exai=args.exai, f_name=args.simname, f_csv=args.test_csv)
if(fl_ensemble):
t.test_ensemble_evaluation(fl_save=args.fl_save, n_ens = args.Nens)
else:
t.test_evaluation(fl_save=args.fl_save)
| true | true |
f71f8112d97cf0d0c960835f729f2a0a204f5395 | 6,801 | py | Python | src/python/tests/core/bot/tasks/task_creation_test.py | stplaydog/clusterfuzz | faa957d265641c031631c36f701c1dc76704a5c7 | [
"Apache-2.0"
] | null | null | null | src/python/tests/core/bot/tasks/task_creation_test.py | stplaydog/clusterfuzz | faa957d265641c031631c36f701c1dc76704a5c7 | [
"Apache-2.0"
] | 2 | 2021-03-31T19:59:19.000Z | 2021-05-20T22:08:07.000Z | src/python/tests/core/bot/tasks/task_creation_test.py | hixio-mh/clusterfuzz | 3f9a69ed71a4420b1a1df8864dd7f3fc1d5b6e07 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for task_creation."""
import mock
import unittest
from bot.tasks import task_creation
from datastore import data_types
from tests.test_libs import helpers
from tests.test_libs import mock_config
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class RequestBisectionTest(unittest.TestCase):
"""Tests request_bisection."""
def setUp(self):
helpers.patch(self, [
'build_management.build_manager.get_primary_bucket_path',
'build_management.build_manager.get_revisions_list',
'build_management.revisions.get_component_range_list',
'config.local_config.ProjectConfig',
'google_cloud_utils.blobs.read_key',
'google_cloud_utils.pubsub.PubSubClient.publish',
])
data_types.FuzzTarget(
id='libFuzzer_proj_target',
engine='libFuzzer',
project='proj',
binary='target').put()
self.testcase = data_types.Testcase(
crash_type='crash-type',
security_flag=True,
bug_information='1337',
job_type='libfuzzer_asan_proj',
fuzzer_name='libFuzzer',
overridden_fuzzer_name='libFuzzer_proj_target',
regression='123:456',
fixed='123:456',
crash_revision=3,
additional_metadata='{"last_tested_crash_revision": 4}')
self.testcase.put()
self.mock.read_key.return_value = b'reproducer'
self.mock.get_component_range_list.return_value = [
{
'link_text': 'old:new',
},
]
self.mock.ProjectConfig.return_value = mock_config.MockConfig({
'bisect_service': {
'pubsub_topic': '/projects/project/topics/topic',
}
})
def _test(self, sanitizer, old_commit='old', new_commit='new'):
"""Test task publication."""
task_creation.request_bisection(self.testcase.key.id())
publish_calls = self.mock.publish.call_args_list
bisect_types = ('regressed', 'fixed')
self.assertEqual(2, len(publish_calls))
for bisect_type, publish_call in zip(bisect_types, publish_calls):
topic = publish_call[0][1]
message = publish_call[0][2][0]
self.assertEqual('/projects/project/topics/topic', topic)
self.assertEqual(b'reproducer', message.data)
self.assertDictEqual({
'crash_type': 'crash-type',
'security': 'True',
'fuzz_target': 'target',
'new_commit': new_commit,
'old_commit': old_commit,
'project_name': 'proj',
'sanitizer': sanitizer,
'testcase_id': '1',
'issue_id': '1337',
'type': bisect_type,
}, message.attributes)
testcase = self.testcase.key.get()
self.assertTrue(testcase.get_metadata('requested_regressed_bisect'))
self.assertTrue(testcase.get_metadata('requested_fixed_bisect'))
def test_request_bisection_asan(self):
"""Basic regressed test (asan)."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.put()
self._test('address')
def test_request_bisection_msan(self):
"""Basic regressed test (asan)."""
self.testcase.job_type = 'libfuzzer_msan_proj'
self.testcase.put()
self._test('memory')
def test_request_bisection_ubsan(self):
"""Basic regressed test (ubsan)."""
self.testcase.job_type = 'libfuzzer_ubsan_proj'
self.testcase.put()
self._test('undefined')
def test_request_bisection_blackbox(self):
"""Test request bisection for blackbox."""
self.testcase.job_type = 'blackbox'
self.testcase.overridden_fuzzer_name = None
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_non_security(self):
"""Test request bisection for non-security testcases."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.security_flag = False
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_flaky(self):
"""Test request bisection for flaky testcases."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.one_time_crasher_flag = True
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_no_bug(self):
"""Test request bisection for testcases with no bug attached."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.bug_information = ''
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_invalid_range(self):
"""Test request bisection for testcases with no bug attached."""
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.regression = 'NA'
self.testcase.fixed = 'NA'
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_once_only(self):
"""Test request bisection for testcases isn't repeated if already
requested."""
self.testcase.set_metadata('requested_regressed_bisect', True)
self.testcase.set_metadata('requested_fixed_bisect', True)
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_single_commit_range(self):
"""Request bisection with a single commit (invalid range)."""
self.mock.get_primary_bucket_path.return_value = 'bucket'
self.mock.get_revisions_list.return_value = list(range(6))
self.mock.get_component_range_list.return_value = [
{
'link_text': 'one',
},
]
task_creation.request_bisection(self.testcase.key.id())
self._test('address', old_commit='one', new_commit='one')
self.mock.get_component_range_list.assert_has_calls([
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(0, 3, 'libfuzzer_asan_proj'),
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(4, 5, 'libfuzzer_asan_proj'),
])
| 36.762162 | 74 | 0.704161 |
import mock
import unittest
from bot.tasks import task_creation
from datastore import data_types
from tests.test_libs import helpers
from tests.test_libs import mock_config
from tests.test_libs import test_utils
@test_utils.with_cloud_emulators('datastore')
class RequestBisectionTest(unittest.TestCase):
def setUp(self):
helpers.patch(self, [
'build_management.build_manager.get_primary_bucket_path',
'build_management.build_manager.get_revisions_list',
'build_management.revisions.get_component_range_list',
'config.local_config.ProjectConfig',
'google_cloud_utils.blobs.read_key',
'google_cloud_utils.pubsub.PubSubClient.publish',
])
data_types.FuzzTarget(
id='libFuzzer_proj_target',
engine='libFuzzer',
project='proj',
binary='target').put()
self.testcase = data_types.Testcase(
crash_type='crash-type',
security_flag=True,
bug_information='1337',
job_type='libfuzzer_asan_proj',
fuzzer_name='libFuzzer',
overridden_fuzzer_name='libFuzzer_proj_target',
regression='123:456',
fixed='123:456',
crash_revision=3,
additional_metadata='{"last_tested_crash_revision": 4}')
self.testcase.put()
self.mock.read_key.return_value = b'reproducer'
self.mock.get_component_range_list.return_value = [
{
'link_text': 'old:new',
},
]
self.mock.ProjectConfig.return_value = mock_config.MockConfig({
'bisect_service': {
'pubsub_topic': '/projects/project/topics/topic',
}
})
def _test(self, sanitizer, old_commit='old', new_commit='new'):
task_creation.request_bisection(self.testcase.key.id())
publish_calls = self.mock.publish.call_args_list
bisect_types = ('regressed', 'fixed')
self.assertEqual(2, len(publish_calls))
for bisect_type, publish_call in zip(bisect_types, publish_calls):
topic = publish_call[0][1]
message = publish_call[0][2][0]
self.assertEqual('/projects/project/topics/topic', topic)
self.assertEqual(b'reproducer', message.data)
self.assertDictEqual({
'crash_type': 'crash-type',
'security': 'True',
'fuzz_target': 'target',
'new_commit': new_commit,
'old_commit': old_commit,
'project_name': 'proj',
'sanitizer': sanitizer,
'testcase_id': '1',
'issue_id': '1337',
'type': bisect_type,
}, message.attributes)
testcase = self.testcase.key.get()
self.assertTrue(testcase.get_metadata('requested_regressed_bisect'))
self.assertTrue(testcase.get_metadata('requested_fixed_bisect'))
def test_request_bisection_asan(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.put()
self._test('address')
def test_request_bisection_msan(self):
self.testcase.job_type = 'libfuzzer_msan_proj'
self.testcase.put()
self._test('memory')
def test_request_bisection_ubsan(self):
self.testcase.job_type = 'libfuzzer_ubsan_proj'
self.testcase.put()
self._test('undefined')
def test_request_bisection_blackbox(self):
self.testcase.job_type = 'blackbox'
self.testcase.overridden_fuzzer_name = None
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_non_security(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.security_flag = False
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_flaky(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.one_time_crasher_flag = True
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_no_bug(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.bug_information = ''
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_invalid_range(self):
self.testcase.job_type = 'libfuzzer_asan_proj'
self.testcase.regression = 'NA'
self.testcase.fixed = 'NA'
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_bisection_once_only(self):
self.testcase.set_metadata('requested_regressed_bisect', True)
self.testcase.set_metadata('requested_fixed_bisect', True)
self.testcase.put()
task_creation.request_bisection(self.testcase.key.id())
self.assertEqual(0, self.mock.publish.call_count)
def test_request_single_commit_range(self):
self.mock.get_primary_bucket_path.return_value = 'bucket'
self.mock.get_revisions_list.return_value = list(range(6))
self.mock.get_component_range_list.return_value = [
{
'link_text': 'one',
},
]
task_creation.request_bisection(self.testcase.key.id())
self._test('address', old_commit='one', new_commit='one')
self.mock.get_component_range_list.assert_has_calls([
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(0, 3, 'libfuzzer_asan_proj'),
mock.call(123, 456, 'libfuzzer_asan_proj'),
mock.call(4, 5, 'libfuzzer_asan_proj'),
])
| true | true |
f71f83d71e89545d5f222b0941888734de4afcee | 1,798 | py | Python | benchmark/memory_profile_tool.py | coolteemf/coolteemf-deformetrica | f965d6ecc0d04f243e487468a9dafe9fe864eed2 | [
"MIT"
] | 2 | 2022-03-04T11:19:30.000Z | 2022-03-08T04:47:22.000Z | benchmark/memory_profile_tool.py | lepennec/Deformetrica_multiscale | dbcb69962dd02f14dde5d63a9abc1de69112f273 | [
"MIT"
] | null | null | null | benchmark/memory_profile_tool.py | lepennec/Deformetrica_multiscale | dbcb69962dd02f14dde5d63a9abc1de69112f273 | [
"MIT"
] | 1 | 2022-03-07T09:52:52.000Z | 2022-03-07T09:52:52.000Z | import resource
import sys
import time
from threading import Thread
from memory_profiler import memory_usage
import GPUtil
import torch
# _cudart = ctypes.CDLL('libcudart.so')
#
#
# def start_cuda_profile():
# # As shown at http://docs.nvidia.com/cuda/cuda-runtime-api/group__CUDART__PROFILER.html,
# # the return value will unconditionally be 0. This check is just in case it changes in
# # the future.
# ret = _cudart.cudaProfilerStart()
# if ret != 0:
# raise Exception("cudaProfilerStart() returned %d" % ret)
#
#
# def stop_cuda_profile():
# ret = _cudart.cudaProfilerStop()
# if ret != 0:
# raise Exception("cudaProfilerStop() returned %d" % ret)
class MemoryProfiler(Thread):
def __init__(self, freq=0.1):
Thread.__init__(self)
self.freq = freq
self.run_flag = True
self.data = {'ram': []}
def run(self):
# logger.info('MemoryProfiler::run()')
while self.run_flag:
self.data['ram'].append(self.current_ram_usage())
time.sleep(self.freq)
def stop(self):
# logger.info('MemoryProfiler::stop()')
self.run_flag = False
self.join()
return dict(self.data)
def clear(self):
self.data.clear()
@staticmethod
def current_ram_usage():
return memory_usage(-1, interval=0)[0] # -1 is for current process
def start_memory_profile(freq=0.001):
ret = MemoryProfiler(freq)
ret.start()
return ret
def stop_memory_profile(memory_profiler):
return memory_profiler.stop()
def stop_and_clear_memory_profile(memory_profiler):
ret = memory_profiler.stop()
clear_memory_profile(memory_profiler)
return ret
def clear_memory_profile(memory_profiler):
memory_profiler.clear()
| 23.350649 | 94 | 0.660178 | import resource
import sys
import time
from threading import Thread
from memory_profiler import memory_usage
import GPUtil
import torch
': []}
def run(self):
while self.run_flag:
self.data['ram'].append(self.current_ram_usage())
time.sleep(self.freq)
def stop(self):
self.run_flag = False
self.join()
return dict(self.data)
def clear(self):
self.data.clear()
@staticmethod
def current_ram_usage():
return memory_usage(-1, interval=0)[0]
def start_memory_profile(freq=0.001):
ret = MemoryProfiler(freq)
ret.start()
return ret
def stop_memory_profile(memory_profiler):
return memory_profiler.stop()
def stop_and_clear_memory_profile(memory_profiler):
ret = memory_profiler.stop()
clear_memory_profile(memory_profiler)
return ret
def clear_memory_profile(memory_profiler):
memory_profiler.clear()
| true | true |
f71f84709e6e370286285ed6bcfe99e6b5009b1b | 436 | py | Python | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | 2 | 2019-03-19T21:44:11.000Z | 2019-04-16T21:41:50.000Z | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | null | null | null | lessrpc_stub/StubConstants.py | MoujiRPC/mouji_stub_py2x | 3f8d7c0ccdfade7f80020528ca9ddb52556def6c | [
"MIT"
] | null | null | null | '''
Created on Nov 7, 2017
@author: Salim
'''
CONF_PARAM_NAME_SERVER_URL = "CONF.NAMESERVER.URL"
CONF_PARAM_NAME_SERVER_PORT = "CONF.NAMESERVER.PORT"
RPC_PROTOCOL = "http://"
LESS_RPC_REQUEST_PING = "/ping"
LESS_RPC_REQUEST_EXECUTE = "/execute"
LESS_RPC_REQUEST_SERVICE = "/service"
LESS_RPC_REQUEST_INFO = "/info"
HTTP_PROTOCOL = "http://"
HTTPS_PROTOCOL = "http://"
HTTP_WAIT_TIME_SHORT = 5
HTTP_WAIT_TIME_LONG = 60 * 60 * 5
| 17.44 | 52 | 0.743119 |
CONF_PARAM_NAME_SERVER_URL = "CONF.NAMESERVER.URL"
CONF_PARAM_NAME_SERVER_PORT = "CONF.NAMESERVER.PORT"
RPC_PROTOCOL = "http://"
LESS_RPC_REQUEST_PING = "/ping"
LESS_RPC_REQUEST_EXECUTE = "/execute"
LESS_RPC_REQUEST_SERVICE = "/service"
LESS_RPC_REQUEST_INFO = "/info"
HTTP_PROTOCOL = "http://"
HTTPS_PROTOCOL = "http://"
HTTP_WAIT_TIME_SHORT = 5
HTTP_WAIT_TIME_LONG = 60 * 60 * 5
| true | true |
f71f85094dbcb9fd0be92bb6eec98b8e5363d046 | 100,399 | py | Python | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | src/sardana/macroserver/macros/scan.py | aureocarneiro/sardana | 43644c9966d73c7a9023b53e97b530f3ea0dfb39 | [
"CC-BY-3.0"
] | null | null | null | ##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""
Macro library containning scan macros for the macros server Tango device
server as part of the Sardana project.
"""
__all__ = ["a2scan", "a3scan", "a4scan", "amultiscan", "aNscan", "ascan",
"d2scan", "d3scan", "d4scan", "dmultiscan", "dNscan", "dscan",
"fscan", "mesh", "timescan", "rscan", "r2scan", "r3scan",
"a2scanc", "a3scanc", "a4scanc", "ascanc",
"d2scanc", "d3scanc", "d4scanc", "dscanc",
"meshc",
"a2scanct", "a3scanct", "a4scanct", "ascanct", "meshct",
"scanhist", "getCallable", "UNCONSTRAINED",
"scanstats"]
__docformat__ = 'restructuredtext'
import os
import copy
import datetime
import numpy
from taurus.core.util import SafeEvaluator
from sardana.macroserver.msexception import UnknownEnv
from sardana.macroserver.macro import Hookable, Macro, Type, Table, List
from sardana.macroserver.scan.gscan import SScan, CTScan, HScan, \
MoveableDesc, CSScan, TScan
from sardana.util.motion import MotionPath
from sardana.util.tree import BranchNode
UNCONSTRAINED = "unconstrained"
StepMode = 's'
# TODO: change it to be more verbose e.g. ContinuousSwMode
ContinuousMode = 'c'
ContinuousHwTimeMode = 'ct'
HybridMode = 'h'
def getCallable(repr):
"""
returns a function .
Ideas: repr could be an URL for a file where the function is contained,
or be evaluable code, or a pickled function object,...
In any case, the return from it should be a callable of the form:
f(x1,x2) where x1, x2 are points in the moveable space and the return value
of f is True if the movement from x1 to x2 is allowed. False otherwise
"""
if repr == UNCONSTRAINED:
return lambda x1, x2: True
else:
return lambda: None
# TODO: remove starts
def _calculate_positions(moveable_node, start, end):
'''Function to calculate starting and ending positions on the physical
motors level.
:param moveable_node: (BaseNode) node representing a moveable.
Can be a BranchNode representing a PseudoMotor,
or a LeafNode representing a PhysicalMotor).
:param start: (float) starting position of the moveable
:param end: (float) ending position of the moveable
:return: (list<(float,float)>) a list of tuples comprising starting
and ending positions. List order is important and preserved.'''
start_positions = []
end_positions = []
if isinstance(moveable_node, BranchNode):
pseudo_node = moveable_node
moveable = pseudo_node.data
moveable_nodes = moveable_node.children
starts = moveable.calcPhysical(start)
ends = moveable.calcPhysical(end)
for moveable_node, start, end in zip(moveable_nodes, starts,
ends):
_start_positions, _end_positions = _calculate_positions(
moveable_node,
start, end)
start_positions += _start_positions
end_positions += _end_positions
else:
start_positions = [start]
end_positions = [end]
return start_positions, end_positions
class aNscan(Hookable):
"""N-dimensional scan. This is **not** meant to be called by the user,
but as a generic base to construct ascan, a2scan, a3scan,..."""
hints = {'scan': 'aNscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
def _prepare(self, motorlist, startlist, endlist, scan_length, integ_time,
mode=StepMode, latency_time=0, **opts):
self.motors = motorlist
self.starts = numpy.array(startlist, dtype='d')
self.finals = numpy.array(endlist, dtype='d')
self.mode = mode
self.integ_time = integ_time
self.opts = opts
if len(self.motors) == self.starts.size == self.finals.size:
self.N = self.finals.size
else:
raise ValueError(
'Moveablelist, startlist and endlist must all be same length')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
if mode == StepMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
self.interv_sizes = (self.finals - self.starts) / self.nr_interv
self.name = opts.get('name', 'a%iscan' % self.N)
self._gScan = SScan(self, self._stepGenerator,
moveables, env, constrains, extrainfodesc)
elif mode in [ContinuousMode, ContinuousHwTimeMode]:
# TODO: probably not 100% correct,
# the idea is to allow passing a list of waypoints
if isinstance(endlist[0], list):
self.waypoints = self.finals
else:
self.waypoints = [self.finals]
self.nr_waypoints = len(self.waypoints)
if mode == ContinuousMode:
self.slow_down = scan_length
# aNscans will only have two waypoints (the start and the final
# positions)
self.nr_waypoints = 2
self.way_lengths = (
self.finals - self.starts) / (self.nr_waypoints - 1)
self.name = opts.get('name', 'a%iscanc' % self.N)
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
elif mode == ContinuousHwTimeMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
self.name = opts.get('name', 'a%iscanct' % self.N)
self._gScan = CTScan(self, self._waypoint_generator_hwtime,
moveables,
env,
constrains,
extrainfodesc)
elif mode == HybridMode:
self.nr_interv = scan_length
self.nb_points = self.nr_interv + 1
self.interv_sizes = (self.finals - self.starts) / self.nr_interv
self.name = opts.get('name', 'a%iscanh' % self.N)
self._gScan = HScan(self, self._stepGenerator,
moveables, env, constrains, extrainfodesc)
else:
raise ValueError('invalid value for mode %s' % mode)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc. Ideally this should be done by the data setter
# but this is available in the Macro class and we inherit from it
# latter. More details in sardana-org/sardana#683.
self._data = self._gScan.data
def _stepGenerator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for point_no in range(self.nb_points):
step["positions"] = self.starts + point_no * self.interv_sizes
step["point_id"] = point_no
yield step
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
for point_no in range(self.nr_waypoints):
step["positions"] = self.starts + point_no * self.way_lengths
step["waypoint_id"] = point_no
yield step
def _waypoint_generator_hwtime(self):
# CScan in its constructor populates a list of data structures - trees.
# Each tree represent one Moveables with its hierarchy of inferior
# moveables.
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
step["positions"] = []
step["start_positions"] = []
starts = self.starts
for point_no, waypoint in enumerate(self.waypoints):
for start, end, moveable_tree in zip(starts, waypoint,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
step["waypoint_id"] = point_no
starts = waypoint
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
gScan = self._gScan
mode = self.mode
it = gScan.generator()
v_motors = gScan.get_virtual_motors()
curr_pos = gScan.motion.readPosition()
total_time = 0.0
if mode == StepMode:
# calculate motion time
max_step0_time, max_step_time = 0.0, 0.0
# first motion takes longer, all others should be "equal"
step0 = next(it)
for v_motor, start, stop, length in zip(v_motors, curr_pos,
step0['positions'],
self.interv_sizes):
path0 = MotionPath(v_motor, start, stop)
path = MotionPath(v_motor, 0, length)
max_step0_time = max(max_step0_time, path0.duration)
max_step_time = max(max_step_time, path.duration)
motion_time = max_step0_time + self.nr_interv * max_step_time
# calculate acquisition time
acq_time = self.nb_points * self.integ_time
total_time = motion_time + acq_time
elif mode == ContinuousMode:
total_time = gScan.waypoint_estimation()
# TODO: add time estimation for ContinuousHwTimeMode
return total_time
def getIntervalEstimation(self):
mode = self.mode
if mode in [StepMode, ContinuousHwTimeMode, HybridMode]:
return self.nr_interv
elif mode == ContinuousMode:
return self.nr_waypoints
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_records = len(scan.data.records)
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class dNscan(aNscan):
"""
same as aNscan but it interprets the positions as being relative to the
current positions and upon completion, it returns the motors to their
original positions
"""
hints = copy.deepcopy(aNscan.hints)
hints['scan'] = 'dNscan'
def _prepare(self, motorlist, startlist, endlist, scan_length,
integ_time, mode=StepMode, **opts):
self._motion = self.getMotion([m.getName() for m in motorlist])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
starts = numpy.array(startlist, dtype='d') + self.originalPositions
finals = numpy.array(endlist, dtype='d') + self.originalPositions
aNscan._prepare(self, motorlist, starts, finals,
scan_length, integ_time, mode=mode, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class ascan(aNscan, Macro):
"""
Do an absolute scan of the specified motor.
ascan scans one motor, as specified by motor. The motor starts at the
position given by start_pos and ends at the position given by final_pos.
The step size is (start_pos-final_pos)/nr_interv. The number of data
points collected will be nr_interv+1. Count time is given by time which
if positive, specifies seconds and if negative, specifies monitor counts.
"""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class a2scan(aNscan, Macro):
"""
two-motor scan.
a2scan scans two motors, as specified by motor1 and motor2.
Each motor moves the same number of intervals with starting and ending
positions given by start_pos1 and final_pos1, start_pos2 and final_pos2,
respectively. The step size for each motor is:
(start_pos-final_pos)/nr_interv
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class a3scan(aNscan, Macro):
"""three-motor scan .
a3scan scans three motors, as specified by motor1, motor2 and motor3.
Each motor moves the same number of intervals with starting and ending
positions given by start_pos1 and final_pos1, start_pos2 and final_pos2,
start_pos3 and final_pos3, respectively.
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class a4scan(aNscan, Macro):
"""four-motor scan .
a4scan scans four motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor moves the same number of intervals with starting and ending
positions given by start_posN and final_posN (for N=1,2,3,4).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class amultiscan(aNscan, Macro):
"""
Multiple motor scan.
amultiscan scans N motors, as specified by motor1, motor2,...,motorN.
Each motor moves the same number of intervals with starting and ending
positions given by start_posN and final_posN (for N=1,2,...).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dmultiscan(dNscan, Macro):
"""
Multiple motor scan relative to the starting positions.
dmultiscan scans N motors, as specified by motor1, motor2,...,motorN.
Each motor moves the same number of intervals If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,...)
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
"""
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dscan(dNscan, Macro):
"""motor scan relative to the starting position.
dscan scans one motor, as specified by motor. If motor motor is at a
position X before the scan begins, it will be scanned from X+start_pos
to X+final_pos. The step size is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1. Count time is
given by time which if positive, specifies seconds and if negative,
specifies monitor counts. """
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class d2scan(dNscan, Macro):
"""two-motor scan relative to the starting position.
d2scan scans two motors, as specified by motor1 and motor2.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class d3scan(dNscan, Macro):
"""three-motor scan .
d3scan scans three motors, as specified by motor1, motor2 and motor3.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,3)
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class d4scan(dNscan, Macro):
"""four-motor scan relative to the starting positions
a4scan scans four motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor moves the same number of intervals. If each motor is at a
position X before the scan begins, it will be scanned from X+start_posN
to X+final_posN (where N is one of 1,2,3,4).
The step size for each motor is (start_pos-final_pos)/nr_interv.
The number of data points collected will be nr_interv+1.
Count time is given by time which if positive, specifies seconds and
if negative, specifies monitor counts.
Upon termination, the motors are returned to their starting positions.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class mesh(Macro, Hookable):
"""2d grid scan.
The mesh scan traces out a grid using motor1 and motor2.
The first motor scans from m1_start_pos to m1_final_pos using the specified
number of intervals. The second motor similarly scans from m2_start_pos
to m2_final_pos. Each point is counted for for integ_time seconds
(or monitor counts, if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
"""
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
self.nb_points = (m1_nr_interv + 1) * (m2_nr_interv + 1)
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.name = opts.get('name', 'mesh')
generator = self._generator
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m,
min_value=min(start, final),
max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
point_no = 1
m1_space = numpy.linspace(m1start, m1end, points1)
m1_space_inv = numpy.linspace(m1end, m1start, points1)
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
space = m1_space
if i % 2 != 0 and self.bidirectional_mode:
space = m1_space_inv
for m1pos in space:
step["positions"] = numpy.array([m1pos, m2pos])
# TODO: maybe another ID would be better? (e.g. "(A,B)")
step["point_id"] = point_no
point_no += 1
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class dmesh(mesh):
"""
2d relative grid scan.
The relative mesh scan traces out a grid using motor1 and motor2.
If first motor is at the position X before the scan begins, it will
be scanned from X+m1_start_pos to X+m1_final_pos using the specified
m1_nr_interv number of intervals. If the second motor is
at the position Y before the scan begins, it will be scanned
from Y+m2_start_pos to Y+m2_final_pos using the specified m2_nr_interv
number of intervals.
Each point is counted for the integ_time seconds (or monitor counts,
if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
Upon scan completion, it returns the motors to their original positions.
"""
hints = copy.deepcopy(mesh.hints)
hints['scan'] = 'dmesh'
env = copy.deepcopy(mesh.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
mesh.prepare(self, m1, start1, final1, m1_nr_interv,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class fscan(Macro, Hookable):
"""
N-dimensional scan along user defined paths.
The motion path for each motor is defined through the evaluation of a
user-supplied function that is evaluated as a function of the independent
variables.
-independent variables are supplied through the indepvar string.
The syntax for indepvar is "x=expresion1,y=expresion2,..."
-If no indep vars need to be defined, write "!" or "*" or "None"
-motion path for motor is generated by evaluating the corresponding
function 'func'
-Count time is given by integ_time. If integ_time is a scalar, then
the same integ_time is used for all points. If it evaluates as an array
(with same length as the paths), fscan will assign a different integration
time to each acquisition point.
-If integ_time is positive, it specifies seconds and if negative, specifies
monitor counts.
IMPORTANT Notes:
-no spaces are allowed in the indepvar string.
-all funcs must evaluate to the same number of points
>>> fscan "x=[1,3,5,7,9],y=arange(5)" 0.1 motor1 x**2 motor2 sqrt(y*x+3)
>>> fscan "x=[1,3,5,7,9],y=arange(5)" "[0.1,0.2,0.3,0.4,0.5]" motor1 x**2 \
motor2 sqrt(y*x+3)
"""
# ['integ_time', Type.String, None, 'Integration time']
hints = {'scan': 'fscan',
'allowsHooks': ('pre-scan', 'pre-move', 'post-move', 'pre-acq',
'post-acq', 'post-step', 'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['indepvars', Type.String, None, 'Independent Variables'],
['integ_time', Type.String, None, 'Integration time'],
['motor_funcs',
[['motor', Type.Moveable, None, 'motor'],
['func', Type.String, None, 'curve defining path']],
None, 'List of motor and path curves']
]
def prepare(self, *args, **opts):
if args[0].lower() in ["!", "*", "none", None]:
indepvars = {}
else:
indepvars = SafeEvaluator({'dict': dict}).eval(
'dict(%s)' % args[0]) # create a dict containing the indepvars
self.motors = [item[0] for item in args[2]]
self.funcstrings = [item[1] for item in args[2]]
globals_lst = [dict(list(zip(indepvars, values)))
for values in zip(*list(indepvars.values()))]
self.paths = [[SafeEvaluator(globals).eval(
func) for globals in globals_lst] for func in self.funcstrings]
self._integ_time = numpy.array(eval(args[1]), dtype='d')
self.opts = opts
if len(self.motors) == len(self.paths) > 0:
self.N = len(self.motors)
else:
raise ValueError(
'Moveable and func lists must be non-empty and same length')
npoints = len(self.paths[0])
try:
# if everything is OK, the following lines should return a 2D array
# n which each motor path is a row.
# Typical failure is due to shape mismatch due to inconsistent
# input
self.paths = numpy.array(self.paths, dtype='d')
self.paths.reshape((self.N, npoints))
except Exception: # shape mismatch?
# try to give a meaningful description of the error
for p, fs in zip(self.paths, self.funcstrings):
if len(p) != npoints:
raise ValueError('"%s" and "%s" yield different number '
'of points (%i vs %i)' %
(self.funcstrings[0], fs, npoints,
len(p)))
raise # the problem wasn't a shape mismatch
self._nb_points = npoints
if self._integ_time.size == 1:
self._integ_time = self._integ_time * \
numpy.ones(self._nb_points) # extend integ_time
elif self._integ_time.size != self._nb_points:
raise ValueError('time_integ must either be a scalar or '
'length=npoints (%i)' % self._nb_points)
self.name = opts.get('name', 'fscan')
generator = self._generator
moveables = self.motors
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for i in range(self._nb_points):
step["positions"] = self.paths[:, i]
step["integ_time"] = self._integ_time[i]
step["point_id"] = i
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class ascanh(aNscan, Macro):
"""Do an absolute scan of the specified motor.
ascan scans one motor, as specified by motor. The motor starts at the
position given by start_pos and ends at the position given by final_pos.
The step size is (start_pos-final_pos)/nr_interv. The number of data
points collected will be nr_interv+1. Count time is given by time which
if positive, specifies seconds and if negative, specifies monitor
counts. """
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv, integ_time,
mode=HybridMode, **opts)
class rscan(Macro, Hookable):
"""rscan.
Do an absolute scan of the specified motor with different number of intervals for each region.
It uses the gscan framework.
"""
hints = {'scan': 'rscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, regions, integ_time, **opts):
self.name = 'rscan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r2scan(Macro, Hookable):
"""r2scan.
Do an absolute scan of the specified motors with different number of intervals for each region.
It uses the gscan framework. All the motors will be drived to the same position in each step
"""
hints = {'scan': 'r2scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, start_pos, regions, integ_time, **opts):
self.name = 'r2scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r3scan(Macro, Hookable):
"""r3scan.
Do an absolute scan of the specified motors with different number of
intervals for each region. It uses the gscan framework.
All the motors will be drived to the same position in each step
"""
hints = {'scan': 'r3scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['motor3', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, motor3, start_pos, regions, integ_time, **opts):
self.name = 'r3scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2, motor3]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class scanhist(Macro):
"""Shows scan history information. Give optional parameter scan number to
display details about a specific scan"""
param_def = [
['scan number', Type.Integer, -1,
'scan number. [default=-1 meaning show all scans]'],
]
def run(self, scan_number):
try:
hist = self.getEnv("ScanHistory")
except UnknownEnv:
print("No scan recorded in history")
return
if scan_number < 0:
self.show_all(hist)
else:
self.show_one(hist, scan_number)
def show_one(self, hist, scan_number):
item = None
for h in hist:
if h['serialno'] == scan_number:
item = h
break
if item is None:
self.warning("Could not find scan number %s", scan_number)
return
serialno, title = h['serialno'], h['title']
start = datetime.datetime.fromtimestamp(h['startts'])
end = datetime.datetime.fromtimestamp(h['endts'])
total_time = end - start
start, end, total_time = start.ctime(), end.ctime(), str(total_time)
scan_dir, scan_file = h['ScanDir'], h['ScanFile']
deadtime = '%.1f%%' % h['deadtime']
user = h['user']
store = "Not stored!"
if scan_dir is not None and scan_file is not None:
if isinstance(scan_file, str):
store = os.path.join(scan_dir, scan_file)
else:
store = scan_dir + os.path.sep + str(scan_file)
channels = ", ".join(h['channels'])
cols = ["#", "Title", "Start time", "End time", "Took", "Dead time",
"User", "Stored", "Channels"]
data = [serialno, title, start, end, total_time, deadtime, user, store,
channels]
table = Table([data], row_head_str=cols, row_head_fmt='%*s',
elem_fmt=['%-*s'],
col_sep=' : ')
for line in table.genOutput():
self.output(line)
def show_all(self, hist):
cols = "#", "Title", "Start time", "End time", "Stored"
width = -1, -1, -1, -1, -1
out = List(cols, max_col_width=width)
today = datetime.datetime.today().date()
for h in hist:
start = datetime.datetime.fromtimestamp(h['startts'])
if start.date() == today:
start = start.time().strftime("%H:%M:%S")
else:
start = start.strftime("%Y-%m-%d %H:%M:%S")
end = datetime.datetime.fromtimestamp(h['endts'])
if end.date() == today:
end = end.time().strftime("%H:%M:%S")
else:
end = end.strftime("%Y-%m-%d %H:%M:%S")
scan_file = h['ScanFile']
store = "Not stored!"
if scan_file is not None:
store = ", ".join(scan_file)
row = h['serialno'], h['title'], start, end, store
out.appendRow(row)
for line in out.genOutput():
self.output(line)
class ascanc(aNscan, Macro):
"""Do an absolute continuous scan of the specified motor.
ascanc scans one motor, as specified by motor."""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a2scanc(aNscan, Macro):
"""two-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class a3scanc(aNscan, Macro):
"""three-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a4scanc(aNscan, Macro):
"""four-motor continuous scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class dNscanc(dNscan):
def do_restore(self):
# set velocities to maximum and then move to initial positions
for moveable in self.motors:
self._gScan.set_max_top_velocity(moveable)
dNscan.do_restore(self)
class dscanc(dNscanc, Macro):
"""continuous motor scan relative to the starting position."""
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d2scanc(dNscanc, Macro):
"""continuous two-motor scan relative to the starting positions"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d3scanc(dNscanc, Macro):
"""continuous three-motor scan"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class d4scanc(dNscanc, Macro):
"""continuous four-motor scan relative to the starting positions"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class meshc(Macro, Hookable):
"""2d grid scan. scans continuous"""
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.slow_down = slow_down
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.m2_nr_interv = m2_nr_interv
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.nr_waypoints = m2_nr_interv + 1
self.name = opts.get('name', 'meshc')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
self._gScan.frozen_motors = [m2]
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
points2 = self.m2_nr_interv + 1
m1start, m2start = self.starts
m1end, m2end = self.finals
point_no = 1
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
start, end = m1start, m1end
if i % 2 != 0 and self.bidirectional_mode:
start, end = m1end, m1start
step["start_positions"] = numpy.array([start, m2pos])
step["positions"] = numpy.array([end, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return self._gScan.waypoint_estimation()
def getIntervalEstimation(self):
return self.nr_waypoints
class dmeshc(meshc):
"""2d relative continuous grid scan.
The relative mesh scan traces out a grid using motor1 and motor2.
If first motor is at the position X before the scan begins, it will
be continuously scanned from X+m1_start_pos to X+m1_final_pos.
If the second motor is at the position Y before the scan begins,
it will be discrete scanned from Y+m2_start_pos to Y+m2_final_pos
using the specified m2_nr_interv number of intervals.
The scan considers the accel. and decel. times of the motor1, so the
counts (for the integ_time seconds or monitor counts,
if integ_time is negative) are executed while motor1 is moving
with the constant velocity.
Upon scan completion, it returns the motors to their original positions.
"""
hints = copy.deepcopy(meshc.hints)
hints['scan'] = 'dmeshc'
env = copy.deepcopy(meshc.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
meshc.prepare(self, m1, start1, final1, slow_down,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class aNscanct(aNscan):
"""N-dimensional continuous scan. This is **not** meant to be called by
the user, but as a generic base to construct ascanct, a2scanct, a3scanct,
..."""
hints = {"scan": "aNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class ascanct(aNscanct, Macro):
"""Do an absolute continuous scan of the specified motor.
ascanct scans one motor, as specified by motor. The motor starts before the
position given by start_pos in order to reach the constant velocity at the
start_pos and finishes at the position after the final_pos in order to
maintain the constant velocity until the final_pos."""
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a2scanct(aNscanct, Macro):
"""Two-motor continuous scan.
a2scanct scans two motors, as specified by motor1 and motor2. Each motor
starts before the position given by its start_pos in order to reach the
constant velocity at its start_pos and finishes at the position after
its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a3scanct(aNscanct, Macro):
"""Three-motor continuous scan.
a2scanct scans three motors, as specified by motor1, motor2 and motor3.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a4scanct(aNscan, Macro):
"""Four-motor continuous scan.
a2scanct scans four motors, as specified by motor1, motor2, motor3 and
motor4. Each motor starts before the position given by its start_pos in
order to reach the constant velocity at its start_pos and finishes at the
position after its final_pos in order to maintain the constant velocity
until its final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 4 to move'],
['start_pos4', Type.Float, None, 'Scan start position 4'],
['final_pos4', Type.Float, None, 'Scan final position 4'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, latency_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
nr_interv, integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class dNscanct(dNscan):
"""N-dimensional continuous scan. This is **not** meant to be called by
the user, but as a generic base to construct ascanct, a2scanct, a3scanct,
..."""
hints = {"scan": "dNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class dscanct(dNscanct, Macro):
"""Do an a relative continuous motor scan,
dscanct scans a motor, as specified by motor1.
The Motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class d2scanct(dNscanct, Macro):
"""continuous two-motor scan relative to the starting positions,
d2scanct scans three motors, as specified by motor1 and motor2.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, integ_time, slow_down, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], slow_down, integ_time,
mode=ContinuousHwTimeMode, **opts)
class d3scanct(dNscanct, Macro):
"""continuous three-motor scan relative to the starting positions,
d3scanct scans three motors, as specified by motor1, motor2 and motor3.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos.
"""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousHwTimeMode, **opts)
class d4scanct(dNscanct, Macro):
"""continuous four-motor scan relative to the starting positions,
d4scanct scans three motors, as specified by motor1, motor2, motor3 and
motor4.
Each motor starts before the position given by its start_pos in order to
reach the constant velocity at its start_pos and finishes at the position
after its final_pos in order to maintain the constant velocity until its
final_pos."""
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousHwTimeMode, **opts)
class meshct(Macro, Hookable):
"""2d grid scan .
The mesh scan traces out a grid using motor1 and motor2.
The first motor scans in contiuous mode from m1_start_pos to m1_final_pos
using the specified number of intervals. The second motor similarly
scans from m2_start_pos to m2_final_pos but it does not move during the
continuous scan. Each point is counted for integ_time seconds
(or monitor counts, if integ_time is negative).
The scan of motor1 is done at each point scanned by motor2. That is, the
first motor scan is nested within the second motor scan.
"""
hints = {"scan": "meshct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped'],
['latency_time', Type.Float, 0, 'Latency time']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, latency_time, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
# Number of intervals of the first motor which is doing the
# continuous scan.
self.nr_interv = m1_nr_interv
self.nb_points = self.nr_interv + 1
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
# Prepare the waypoints
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
m2_space = numpy.linspace(m2start, m2end, points2)
self.waypoints = []
self.starts_points = []
for i, m2pos in enumerate(m2_space):
self.starts_points.append(numpy.array([m1start, m2pos], dtype='d'))
self.waypoints.append(numpy.array([m1end, m2pos], dtype='d'))
if self.bidirectional_mode:
m1start, m1end = m1end, m1start
self.name = opts.get('name', 'meshct')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
constrains = [getCallable(cns) for cns in opts.get('constrains',
[UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = CTScan(self, self._generator, moveables, env, constrains,
extrainfodesc)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
points1, _ = self.nr_intervs + 1
for i, waypoint in enumerate(self.waypoints):
self.point_id = points1 * i
step["waypoint_id"] = i
self.starts = self.starts_points[i]
self.finals = waypoint
step["positions"] = []
step["start_positions"] = []
for start, end, moveable_tree in zip(self.starts, self.finals,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return 0.0
def getIntervalEstimation(self):
return len(self.waypoints)
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_total_records = len(scan.data.records)
nb_of_records = nb_of_total_records - self.point_id
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class timescan(Macro, Hookable):
"""Do a time scan over the specified time intervals. The scan starts
immediately. The number of data points collected will be nr_interv + 1.
Count time is given by integ_time. Latency time will be the longer one
of latency_time and measurement group latency time.
"""
hints = {'scan': 'timescan', 'allowsHooks': ('pre-scan', 'pre-acq',
'post-acq', 'post-scan')}
param_def = [
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, nr_interv, integ_time, latency_time):
self.nr_interv = nr_interv
self.nb_points = nr_interv + 1
self.integ_time = integ_time
self.latency_time = latency_time
self._gScan = TScan(self)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
mg_latency_time = self._gScan.measurement_group.getLatencyTime()
latency_time = max(self.latency_time, mg_latency_time)
return self.nb_points * (self.integ_time + latency_time)
def getIntervalEstimation(self):
return self.nr_interv
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class scanstats(Macro):
"""Calculate basic statistics of the enabled and plotted channels in
the active measurement group for the last scan. If no channel is selected
for plotting it fallbacks to the first enabled channel. Print stats and
publish them in the env.
The macro must be hooked in the post-scan hook place.
"""
env = ("ActiveMntGrp", )
param_def = [
["channel",
[["channel", Type.ExpChannel, None, ""], {"min": 0}],
None,
"List of channels for statistics calculations"
]
]
def run(self, channel):
parent = self.getParentMacro()
if not parent:
self.warning("for now the scanstats macro can only be executed as"
" a post-scan hook")
return
if not hasattr(parent, "motors"):
self.warning("scan must involve at least one moveable "
"to calculate statistics")
return
active_meas_grp = self.getEnv("ActiveMntGrp")
meas_grp = self.getMeasurementGroup(active_meas_grp)
calc_channels = []
enabled_channels = meas_grp.getEnabled()
if channel:
stat_channels = [chan.name for chan in channel]
else:
stat_channels = [key for key in enabled_channels.keys()]
for chan in stat_channels:
enabled = enabled_channels.get(chan)
if enabled is None:
self.warning("{} not in {}".format(chan, meas_grp.name))
else:
if not enabled and channel:
self.warning("{} not enabled".format(chan))
elif enabled and channel:
# channel was given as parameters
calc_channels.append(chan)
elif enabled and meas_grp.getPlotType(chan)[chan] == 1:
calc_channels.append(chan)
if len(calc_channels) == 0:
# fallback is first enabled channel in meas_grp
calc_channels.append(next(iter(enabled_channels)))
scalar_channels = []
for _, chan in self.getExpChannels().items():
if chan.type in ("OneDExpChannel", "TwoDExpChannel"):
continue
scalar_channels.append(chan.name)
calc_channels = [ch for ch in calc_channels if ch in scalar_channels]
if len(calc_channels) == 0:
self.warning("measurement group must contain at least one "
"enabled scalar channel to calculate statistics")
return
selected_motor = str(parent.motors[0])
stats = {}
col_header = []
cols = []
motor_data = []
channels_data = {}
for channel_name in calc_channels:
channels_data[channel_name] = []
for idx, rc in parent.data.items():
motor_data.append(rc[selected_motor])
for channel_name in calc_channels:
channels_data[channel_name].append(rc[channel_name])
motor_data = numpy.array(motor_data)
for channel_name, data in channels_data.items():
channel_data = numpy.array(data)
(_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen) = self._calcStats(motor_data, channel_data)
stats[channel_name] = {
"min": _min,
"max": _max,
"minpos": min_at,
"maxpos": max_at,
"mean": mean,
"int": _int,
"com": com,
"fwhm": fwhm,
"cen": cen}
col_header.append([channel_name])
cols.append([
stats[channel_name]["min"],
stats[channel_name]["max"],
stats[channel_name]["minpos"],
stats[channel_name]["maxpos"],
stats[channel_name]["mean"],
stats[channel_name]["int"],
stats[channel_name]["com"],
stats[channel_name]["fwhm"],
stats[channel_name]["cen"],
])
self.info("Statistics for movable: {:s}".format(selected_motor))
table = Table(elem_list=cols, elem_fmt=["%*g"],
row_head_str=["MIN", "MAX", "MIN@", "MAX@",
"MEAN", "INT", "COM", "FWHM", "CEN"],
col_head_str=col_header, col_head_sep="-")
out = table.genOutput()
for line in out:
self.info(line)
self.setEnv("{:s}.ScanStats".format(self.getDoorName()),
{"Stats": stats,
"Motor": selected_motor,
"ScanID": self.getEnv("ScanID")})
@staticmethod
def _calcStats(x, y):
# max and min
_min = numpy.min(y)
_max = numpy.max(y)
min_idx = numpy.argmin(y)
min_at = x[min_idx]
max_idx = numpy.argmax(y)
max_at = x[max_idx]
# center of mass (com)
try:
com = numpy.sum(y*x)/numpy.sum(y)
except ZeroDivisionError:
com = 0
mean = numpy.mean(y)
_int = numpy.sum(y)
# determine if it is a peak- or erf-like function
half_max = (_max-_min)/2+_min
lower_left = False
lower_right = False
if numpy.any(y[0:max_idx] < half_max):
lower_left = True
if numpy.any(y[max_idx:] < half_max):
lower_right = True
if lower_left and lower_right:
# it is a peak-like function
y_data = y
elif lower_left:
# it is an erf-like function
# use the gradient for further calculation
y_data = numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
else:
# it is an erf-like function
# use the gradient for further calculation
y_data = -1*numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
# cen and fwhm
# this part is adapted from:
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2014 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed
# at the ESRF by the Software group.
max_idx_data = numpy.argmax(y_data)
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx-1
x0 = x[idx]
x1 = x[idx+1]
y0 = y_data[idx]
y1 = y_data[idx+1]
lhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
lhmx = 0
except IndexError:
lhmx = x[0]
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx+1
x0 = x[idx-1]
x1 = x[idx]
y0 = y_data[idx-1]
y1 = y_data[idx]
uhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
uhmx = 0
except IndexError:
uhmx = x[-1]
fwhm = uhmx - lhmx
cen = (uhmx + lhmx)/2
return (_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen)
| 43.126718 | 99 | 0.587436 | id value for mode %s' % mode)
self._data = self._gScan.data
def _stepGenerator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for point_no in range(self.nb_points):
step["positions"] = self.starts + point_no * self.interv_sizes
step["point_id"] = point_no
yield step
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
for point_no in range(self.nr_waypoints):
step["positions"] = self.starts + point_no * self.way_lengths
step["waypoint_id"] = point_no
yield step
def _waypoint_generator_hwtime(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
step["positions"] = []
step["start_positions"] = []
starts = self.starts
for point_no, waypoint in enumerate(self.waypoints):
for start, end, moveable_tree in zip(starts, waypoint,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
step["waypoint_id"] = point_no
starts = waypoint
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
gScan = self._gScan
mode = self.mode
it = gScan.generator()
v_motors = gScan.get_virtual_motors()
curr_pos = gScan.motion.readPosition()
total_time = 0.0
if mode == StepMode:
max_step0_time, max_step_time = 0.0, 0.0
step0 = next(it)
for v_motor, start, stop, length in zip(v_motors, curr_pos,
step0['positions'],
self.interv_sizes):
path0 = MotionPath(v_motor, start, stop)
path = MotionPath(v_motor, 0, length)
max_step0_time = max(max_step0_time, path0.duration)
max_step_time = max(max_step_time, path.duration)
motion_time = max_step0_time + self.nr_interv * max_step_time
acq_time = self.nb_points * self.integ_time
total_time = motion_time + acq_time
elif mode == ContinuousMode:
total_time = gScan.waypoint_estimation()
return total_time
def getIntervalEstimation(self):
mode = self.mode
if mode in [StepMode, ContinuousHwTimeMode, HybridMode]:
return self.nr_interv
elif mode == ContinuousMode:
return self.nr_waypoints
def _fill_missing_records(self):
nb_of_points = self.nb_points
scan = self._gScan
nb_of_records = len(scan.data.records)
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class dNscan(aNscan):
hints = copy.deepcopy(aNscan.hints)
hints['scan'] = 'dNscan'
def _prepare(self, motorlist, startlist, endlist, scan_length,
integ_time, mode=StepMode, **opts):
self._motion = self.getMotion([m.getName() for m in motorlist])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
starts = numpy.array(startlist, dtype='d') + self.originalPositions
finals = numpy.array(endlist, dtype='d') + self.originalPositions
aNscan._prepare(self, motorlist, starts, finals,
scan_length, integ_time, mode=mode, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class ascan(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class a2scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class a3scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class a4scan(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class amultiscan(aNscan, Macro):
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dmultiscan(dNscan, Macro):
param_def = [
['motor_start_end_list',
[['motor', Type.Moveable, None, 'Moveable to move'],
['start', Type.Float, None, 'Starting position'],
['end', Type.Float, None, 'Final position']],
None, 'List of motor, start and end positions'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, *args, **opts):
motors = args[0:-2:3]
starts = args[1:-2:3]
ends = args[2:-2:3]
nr_interv = args[-2]
integ_time = args[-1]
self._prepare(motors, starts, ends, nr_interv, integ_time, **opts)
class dscan(dNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos],
nr_interv, integ_time, **opts)
class d2scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, nr_interv, integ_time, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2], [
final_pos1, final_pos2], nr_interv, integ_time, **opts)
class d3scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3],
nr_interv, integ_time, **opts)
class d4scan(dNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [
f1, f2, f3, f4], nr_interv, integ_time, **opts)
class mesh(Macro, Hookable):
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
self.nb_points = (m1_nr_interv + 1) * (m2_nr_interv + 1)
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.name = opts.get('name', 'mesh')
generator = self._generator
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m,
min_value=min(start, final),
max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
self._gScan = SScan(self, generator, moveables, env, constrains)
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
point_no = 1
m1_space = numpy.linspace(m1start, m1end, points1)
m1_space_inv = numpy.linspace(m1end, m1start, points1)
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
space = m1_space
if i % 2 != 0 and self.bidirectional_mode:
space = m1_space_inv
for m1pos in space:
step["positions"] = numpy.array([m1pos, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class dmesh(mesh):
hints = copy.deepcopy(mesh.hints)
hints['scan'] = 'dmesh'
env = copy.deepcopy(mesh.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
mesh.prepare(self, m1, start1, final1, m1_nr_interv,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class fscan(Macro, Hookable):
hints = {'scan': 'fscan',
'allowsHooks': ('pre-scan', 'pre-move', 'post-move', 'pre-acq',
'post-acq', 'post-step', 'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['indepvars', Type.String, None, 'Independent Variables'],
['integ_time', Type.String, None, 'Integration time'],
['motor_funcs',
[['motor', Type.Moveable, None, 'motor'],
['func', Type.String, None, 'curve defining path']],
None, 'List of motor and path curves']
]
def prepare(self, *args, **opts):
if args[0].lower() in ["!", "*", "none", None]:
indepvars = {}
else:
indepvars = SafeEvaluator({'dict': dict}).eval(
'dict(%s)' % args[0])
self.motors = [item[0] for item in args[2]]
self.funcstrings = [item[1] for item in args[2]]
globals_lst = [dict(list(zip(indepvars, values)))
for values in zip(*list(indepvars.values()))]
self.paths = [[SafeEvaluator(globals).eval(
func) for globals in globals_lst] for func in self.funcstrings]
self._integ_time = numpy.array(eval(args[1]), dtype='d')
self.opts = opts
if len(self.motors) == len(self.paths) > 0:
self.N = len(self.motors)
else:
raise ValueError(
'Moveable and func lists must be non-empty and same length')
npoints = len(self.paths[0])
try:
self.paths = numpy.array(self.paths, dtype='d')
self.paths.reshape((self.N, npoints))
except Exception:
for p, fs in zip(self.paths, self.funcstrings):
if len(p) != npoints:
raise ValueError('"%s" and "%s" yield different number '
'of points (%i vs %i)' %
(self.funcstrings[0], fs, npoints,
len(p)))
raise
self._nb_points = npoints
if self._integ_time.size == 1:
self._integ_time = self._integ_time * \
numpy.ones(self._nb_points) # extend integ_time
elif self._integ_time.size != self._nb_points:
raise ValueError('time_integ must either be a scalar or '
'length=npoints (%i)' % self._nb_points)
self.name = opts.get('name', 'fscan')
generator = self._generator
moveables = self.motors
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = SScan(self, generator, moveables, env, constrains)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
for i in range(self._nb_points):
step["positions"] = self.paths[:, i]
step["integ_time"] = self._integ_time[i]
step["point_id"] = i
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class ascanh(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, final_pos, nr_interv, integ_time,
**opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv, integ_time,
mode=HybridMode, **opts)
class rscan(Macro, Hookable):
hints = {'scan': 'rscan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time']
]
def prepare(self, motor, start_pos, regions, integ_time, **opts):
self.name = 'rscan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r2scan(Macro, Hookable):
hints = {'scan': 'r2scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, start_pos, regions, integ_time, **opts):
self.name = 'r2scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class r3scan(Macro, Hookable):
hints = {'scan': 'r3scan', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
# env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'Motor to move'],
['motor2', Type.Moveable, None, 'Motor to move'],
['motor3', Type.Moveable, None, 'Motor to move'],
['start_pos', Type.Float, None, 'Start position'],
['regions',
[['next_pos', Type.Float, None, 'next position'],
['region_nr_intervals', Type.Integer, None,
'Region number of intervals']],
None, 'List of tuples: (next_pos, region_nr_intervals'],
['integ_time', Type.Float, None, 'Integration time'],
]
def prepare(self, motor1, motor2, motor3, start_pos, regions, integ_time, **opts):
self.name = 'r3scan'
self.integ_time = integ_time
self.start_pos = start_pos
self.regions = regions
self.regions_count = len(self.regions) // 2
generator = self._generator
self.motors = [motor1, motor2, motor3]
env = opts.get('env', {})
constrains = []
self._gScan = SScan(self, generator, self.motors, env, constrains)
self._data = self._gScan.data
def _generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = self.getHooks('post-acq') + self.getHooks(
'_NOHINTS_')
step["post-step-hooks"] = self.getHooks('post-step')
point_id = 0
region_start = self.start_pos
for r in range(len(self.regions)):
region_stop, region_nr_intervals = self.regions[
r][0], self.regions[r][1]
positions = numpy.linspace(
region_start, region_stop, region_nr_intervals + 1)
if point_id != 0:
# positions must be calculated from the start to the end of the region
# but after the first region, the 'start' point must not be
# repeated
positions = positions[1:]
for p in positions:
step['positions'] = [p, p, p]
step['point_id'] = point_id
point_id += 1
yield step
region_start = region_stop
def run(self, *args):
for step in self._gScan.step_scan():
yield step
class scanhist(Macro):
param_def = [
['scan number', Type.Integer, -1,
'scan number. [default=-1 meaning show all scans]'],
]
def run(self, scan_number):
try:
hist = self.getEnv("ScanHistory")
except UnknownEnv:
print("No scan recorded in history")
return
if scan_number < 0:
self.show_all(hist)
else:
self.show_one(hist, scan_number)
def show_one(self, hist, scan_number):
item = None
for h in hist:
if h['serialno'] == scan_number:
item = h
break
if item is None:
self.warning("Could not find scan number %s", scan_number)
return
serialno, title = h['serialno'], h['title']
start = datetime.datetime.fromtimestamp(h['startts'])
end = datetime.datetime.fromtimestamp(h['endts'])
total_time = end - start
start, end, total_time = start.ctime(), end.ctime(), str(total_time)
scan_dir, scan_file = h['ScanDir'], h['ScanFile']
deadtime = '%.1f%%' % h['deadtime']
user = h['user']
store = "Not stored!"
if scan_dir is not None and scan_file is not None:
if isinstance(scan_file, str):
store = os.path.join(scan_dir, scan_file)
else:
store = scan_dir + os.path.sep + str(scan_file)
channels = ", ".join(h['channels'])
cols = ["#", "Title", "Start time", "End time", "Took", "Dead time",
"User", "Stored", "Channels"]
data = [serialno, title, start, end, total_time, deadtime, user, store,
channels]
table = Table([data], row_head_str=cols, row_head_fmt='%*s',
elem_fmt=['%-*s'],
col_sep=' : ')
for line in table.genOutput():
self.output(line)
def show_all(self, hist):
cols = "#", "Title", "Start time", "End time", "Stored"
width = -1, -1, -1, -1, -1
out = List(cols, max_col_width=width)
today = datetime.datetime.today().date()
for h in hist:
start = datetime.datetime.fromtimestamp(h['startts'])
if start.date() == today:
start = start.time().strftime("%H:%M:%S")
else:
start = start.strftime("%Y-%m-%d %H:%M:%S")
end = datetime.datetime.fromtimestamp(h['endts'])
if end.date() == today:
end = end.time().strftime("%H:%M:%S")
else:
end = end.strftime("%Y-%m-%d %H:%M:%S")
scan_file = h['ScanFile']
store = "Not stored!"
if scan_file is not None:
store = ", ".join(scan_file)
row = h['serialno'], h['title'], start, end, store
out.appendRow(row)
for line in out.genOutput():
self.output(line)
class ascanc(aNscan, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a2scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class a3scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class a4scanc(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class dNscanc(dNscan):
def do_restore(self):
# set velocities to maximum and then move to initial positions
for moveable in self.motors:
self._gScan.set_max_top_velocity(moveable)
dNscan.do_restore(self)
class dscanc(dNscanc, Macro):
param_def = [
['motor', Type.Moveable, None, 'Moveable to move'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor, start_pos, final_pos, integ_time, slow_down,
**opts):
self._prepare([motor], [start_pos], [final_pos], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d2scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, motor1, start_pos1, final_pos1, motor2, start_pos2,
final_pos2, integ_time, slow_down, **opts):
self._prepare([motor1, motor2], [start_pos1, start_pos2],
[final_pos1, final_pos2], slow_down, integ_time,
mode=ContinuousMode, **opts)
class d3scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousMode, **opts)
class d4scanc(dNscanc, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousMode, **opts)
class meshc(Macro, Hookable):
hints = {'scan': 'mesh', 'allowsHooks': ('pre-scan', 'pre-move',
'post-move', 'pre-acq',
'post-acq', 'post-step',
'post-scan')}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self.motors = [m1, m2]
self.slow_down = slow_down
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.m2_nr_interv = m2_nr_interv
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
self.nr_waypoints = m2_nr_interv + 1
self.name = opts.get('name', 'meshc')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
constrains = [getCallable(cns) for cns in opts.get(
'constrains', [UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan'
self._gScan = CSScan(self, self._waypoint_generator,
self._period_generator, moveables, env,
constrains, extrainfodesc)
self._gScan.frozen_motors = [m2]
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _waypoint_generator(self):
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
step["post-move-hooks"] = self.getHooks('post-move')
step["check_func"] = []
step["slow_down"] = self.slow_down
points2 = self.m2_nr_interv + 1
m1start, m2start = self.starts
m1end, m2end = self.finals
point_no = 1
for i, m2pos in enumerate(numpy.linspace(m2start, m2end, points2)):
start, end = m1start, m1end
if i % 2 != 0 and self.bidirectional_mode:
start, end = m1end, m1start
step["start_positions"] = numpy.array([start, m2pos])
step["positions"] = numpy.array([end, m2pos])
step["point_id"] = point_no
point_no += 1
yield step
def _period_generator(self):
step = {}
step["integ_time"] = self.integ_time
step["pre-acq-hooks"] = self.getHooks('pre-acq')
step["post-acq-hooks"] = (self.getHooks('post-acq') +
self.getHooks('_NOHINTS_'))
step["post-step-hooks"] = self.getHooks('post-step')
step["check_func"] = []
step['extrainfo'] = {}
point_no = 0
while(True):
point_no += 1
step["point_id"] = point_no
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return self._gScan.waypoint_estimation()
def getIntervalEstimation(self):
return self.nr_waypoints
class dmeshc(meshc):
hints = copy.deepcopy(meshc.hints)
hints['scan'] = 'dmeshc'
env = copy.deepcopy(meshc.env)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['slow_down', Type.Float, None, 'global scan slow down factor (0, 1]'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, slow_down,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, **opts):
self._motion = self.getMotion([m1, m2])
self.originalPositions = numpy.array(
self._motion.readPosition(force=True))
start1 = self.originalPositions[0] + m1_start_pos
start2 = self.originalPositions[1] + m2_start_pos
final1 = self.originalPositions[0] + m1_final_pos
final2 = self.originalPositions[1] + m2_final_pos
meshc.prepare(self, m1, start1, final1, slow_down,
m2, start2, final2, m2_nr_interv, integ_time,
bidirectional, **opts)
def do_restore(self):
self.info("Returning to start positions...")
self._motion.move(self.originalPositions)
class aNscanct(aNscan):
hints = {"scan": "aNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class ascanct(aNscanct, Macro):
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a2scanct(aNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a3scanct(aNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, nr_interv,
integ_time, latency_time, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class a4scanct(aNscan, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 4 to move'],
['start_pos4', Type.Float, None, 'Scan start position 4'],
['final_pos4', Type.Float, None, 'Scan final position 4'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
nr_interv, integ_time, latency_time, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
nr_interv, integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class dNscanct(dNscan):
hints = {"scan": "dNscanct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
class dscanct(dNscanct, Macro):
param_def = [['motor', Type.Moveable, None, 'Moveable name'],
['start_pos', Type.Float, None, 'Scan start position'],
['final_pos', Type.Float, None, 'Scan final position'],
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, motor, start_pos, final_pos, nr_interv,
integ_time, latency_time, **opts):
self._prepare([motor], [start_pos], [final_pos], nr_interv,
integ_time, mode=ContinuousHwTimeMode,
latency_time=latency_time, **opts)
class d2scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, integ_time, slow_down, **opts):
self._prepare([m1, m2], [s1, s2], [f1, f2], slow_down, integ_time,
mode=ContinuousHwTimeMode, **opts)
class d3scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, integ_time,
slow_down, **opts):
self._prepare([m1, m2, m3], [s1, s2, s3], [f1, f2, f3], slow_down,
integ_time, mode=ContinuousHwTimeMode, **opts)
class d4scanct(dNscanct, Macro):
param_def = [
['motor1', Type.Moveable, None, 'Moveable 1 to move'],
['start_pos1', Type.Float, None, 'Scan start position 1'],
['final_pos1', Type.Float, None, 'Scan final position 1'],
['motor2', Type.Moveable, None, 'Moveable 2 to move'],
['start_pos2', Type.Float, None, 'Scan start position 2'],
['final_pos2', Type.Float, None, 'Scan final position 2'],
['motor3', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos3', Type.Float, None, 'Scan start position 3'],
['final_pos3', Type.Float, None, 'Scan final position 3'],
['motor4', Type.Moveable, None, 'Moveable 3 to move'],
['start_pos4', Type.Float, None, 'Scan start position 3'],
['final_pos4', Type.Float, None, 'Scan final position 3'],
['integ_time', Type.Float, None, 'Integration time'],
['slow_down', Type.Float, 1, 'global scan slow down factor (0, 1]'],
]
def prepare(self, m1, s1, f1, m2, s2, f2, m3, s3, f3, m4, s4, f4,
integ_time, slow_down, **opts):
self._prepare([m1, m2, m3, m4], [s1, s2, s3, s4], [f1, f2, f3, f4],
slow_down, integ_time, mode=ContinuousHwTimeMode, **opts)
class meshct(Macro, Hookable):
hints = {"scan": "meshct",
"allowsHooks": ("pre-scan", "pre-configuration",
"post-configuration", "pre-move",
"post-move", "pre-acq", "pre-start",
"post-acq", "pre-cleanup", "post-cleanup",
"post-scan")}
env = ('ActiveMntGrp',)
param_def = [
['motor1', Type.Moveable, None, 'First motor to move'],
['m1_start_pos', Type.Float, None, 'Scan start position for first '
'motor'],
['m1_final_pos', Type.Float, None, 'Scan final position for first '
'motor'],
['m1_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['motor2', Type.Moveable, None, 'Second motor to move'],
['m2_start_pos', Type.Float, None, 'Scan start position for second '
'motor'],
['m2_final_pos', Type.Float, None, 'Scan final position for second '
'motor'],
['m2_nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['bidirectional', Type.Boolean, False, 'Save time by scanning '
's-shaped'],
['latency_time', Type.Float, 0, 'Latency time']
]
def prepare(self, m1, m1_start_pos, m1_final_pos, m1_nr_interv,
m2, m2_start_pos, m2_final_pos, m2_nr_interv, integ_time,
bidirectional, latency_time, **opts):
self.motors = [m1, m2]
self.starts = numpy.array([m1_start_pos, m2_start_pos], dtype='d')
self.finals = numpy.array([m1_final_pos, m2_final_pos], dtype='d')
self.nr_intervs = numpy.array([m1_nr_interv, m2_nr_interv], dtype='i')
# Number of intervals of the first motor which is doing the
# continuous scan.
self.nr_interv = m1_nr_interv
self.nb_points = self.nr_interv + 1
self.integ_time = integ_time
self.bidirectional_mode = bidirectional
# Prepare the waypoints
m1start, m2start = self.starts
m1end, m2end = self.finals
points1, points2 = self.nr_intervs + 1
m2_space = numpy.linspace(m2start, m2end, points2)
self.waypoints = []
self.starts_points = []
for i, m2pos in enumerate(m2_space):
self.starts_points.append(numpy.array([m1start, m2pos], dtype='d'))
self.waypoints.append(numpy.array([m1end, m2pos], dtype='d'))
if self.bidirectional_mode:
m1start, m1end = m1end, m1start
self.name = opts.get('name', 'meshct')
moveables = []
for m, start, final in zip(self.motors, self.starts, self.finals):
moveables.append(MoveableDesc(moveable=m, min_value=min(
start, final), max_value=max(start, final)))
moveables[0].is_reference = True
env = opts.get('env', {})
mg_name = self.getEnv('ActiveMntGrp')
mg = self.getMeasurementGroup(mg_name)
mg_latency_time = mg.getLatencyTime()
if mg_latency_time > latency_time:
self.info("Choosing measurement group latency time: %f" %
mg_latency_time)
latency_time = mg_latency_time
self.latency_time = latency_time
constrains = [getCallable(cns) for cns in opts.get('constrains',
[UNCONSTRAINED])]
extrainfodesc = opts.get('extrainfodesc', [])
# Hooks are not always set at this point. We will call getHooks
# later on in the scan_loop
# self.pre_scan_hooks = self.getHooks('pre-scan')
# self.post_scan_hooks = self.getHooks('post-scan')
self._gScan = CTScan(self, self._generator, moveables, env, constrains,
extrainfodesc)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def _generator(self):
moveables_trees = self._gScan.get_moveables_trees()
step = {}
step["pre-move-hooks"] = self.getHooks('pre-move')
post_move_hooks = self.getHooks(
'post-move') + [self._fill_missing_records]
step["post-move-hooks"] = post_move_hooks
step["check_func"] = []
step["active_time"] = self.nb_points * (self.integ_time
+ self.latency_time)
points1, _ = self.nr_intervs + 1
for i, waypoint in enumerate(self.waypoints):
self.point_id = points1 * i
step["waypoint_id"] = i
self.starts = self.starts_points[i]
self.finals = waypoint
step["positions"] = []
step["start_positions"] = []
for start, end, moveable_tree in zip(self.starts, self.finals,
moveables_trees):
moveable_root = moveable_tree.root()
start_positions, end_positions = _calculate_positions(
moveable_root, start, end)
step["start_positions"] += start_positions
step["positions"] += end_positions
yield step
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
return 0.0
def getIntervalEstimation(self):
return len(self.waypoints)
def _fill_missing_records(self):
# fill record list with dummy records for the final padding
nb_of_points = self.nb_points
scan = self._gScan
nb_of_total_records = len(scan.data.records)
nb_of_records = nb_of_total_records - self.point_id
missing_records = nb_of_points - nb_of_records
scan.data.initRecords(missing_records)
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class timescan(Macro, Hookable):
hints = {'scan': 'timescan', 'allowsHooks': ('pre-scan', 'pre-acq',
'post-acq', 'post-scan')}
param_def = [
['nr_interv', Type.Integer, None, 'Number of scan intervals'],
['integ_time', Type.Float, None, 'Integration time'],
['latency_time', Type.Float, 0, 'Latency time']]
def prepare(self, nr_interv, integ_time, latency_time):
self.nr_interv = nr_interv
self.nb_points = nr_interv + 1
self.integ_time = integ_time
self.latency_time = latency_time
self._gScan = TScan(self)
# _data is the default member where the Macro class stores the data.
# Assign the date produced by GScan (or its subclasses) to it so all
# the Macro infrastructure related to the data works e.g. getter,
# property, etc.
self.setData(self._gScan.data)
def run(self, *args):
for step in self._gScan.step_scan():
yield step
def getTimeEstimation(self):
mg_latency_time = self._gScan.measurement_group.getLatencyTime()
latency_time = max(self.latency_time, mg_latency_time)
return self.nb_points * (self.integ_time + latency_time)
def getIntervalEstimation(self):
return self.nr_interv
def _get_nr_points(self):
msg = ("nr_points is deprecated since version 3.0.3. "
"Use nb_points instead.")
self.warning(msg)
return self.nb_points
nr_points = property(_get_nr_points)
class scanstats(Macro):
env = ("ActiveMntGrp", )
param_def = [
["channel",
[["channel", Type.ExpChannel, None, ""], {"min": 0}],
None,
"List of channels for statistics calculations"
]
]
def run(self, channel):
parent = self.getParentMacro()
if not parent:
self.warning("for now the scanstats macro can only be executed as"
" a post-scan hook")
return
if not hasattr(parent, "motors"):
self.warning("scan must involve at least one moveable "
"to calculate statistics")
return
active_meas_grp = self.getEnv("ActiveMntGrp")
meas_grp = self.getMeasurementGroup(active_meas_grp)
calc_channels = []
enabled_channels = meas_grp.getEnabled()
if channel:
stat_channels = [chan.name for chan in channel]
else:
stat_channels = [key for key in enabled_channels.keys()]
for chan in stat_channels:
enabled = enabled_channels.get(chan)
if enabled is None:
self.warning("{} not in {}".format(chan, meas_grp.name))
else:
if not enabled and channel:
self.warning("{} not enabled".format(chan))
elif enabled and channel:
# channel was given as parameters
calc_channels.append(chan)
elif enabled and meas_grp.getPlotType(chan)[chan] == 1:
calc_channels.append(chan)
if len(calc_channels) == 0:
# fallback is first enabled channel in meas_grp
calc_channels.append(next(iter(enabled_channels)))
scalar_channels = []
for _, chan in self.getExpChannels().items():
if chan.type in ("OneDExpChannel", "TwoDExpChannel"):
continue
scalar_channels.append(chan.name)
calc_channels = [ch for ch in calc_channels if ch in scalar_channels]
if len(calc_channels) == 0:
self.warning("measurement group must contain at least one "
"enabled scalar channel to calculate statistics")
return
selected_motor = str(parent.motors[0])
stats = {}
col_header = []
cols = []
motor_data = []
channels_data = {}
for channel_name in calc_channels:
channels_data[channel_name] = []
for idx, rc in parent.data.items():
motor_data.append(rc[selected_motor])
for channel_name in calc_channels:
channels_data[channel_name].append(rc[channel_name])
motor_data = numpy.array(motor_data)
for channel_name, data in channels_data.items():
channel_data = numpy.array(data)
(_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen) = self._calcStats(motor_data, channel_data)
stats[channel_name] = {
"min": _min,
"max": _max,
"minpos": min_at,
"maxpos": max_at,
"mean": mean,
"int": _int,
"com": com,
"fwhm": fwhm,
"cen": cen}
col_header.append([channel_name])
cols.append([
stats[channel_name]["min"],
stats[channel_name]["max"],
stats[channel_name]["minpos"],
stats[channel_name]["maxpos"],
stats[channel_name]["mean"],
stats[channel_name]["int"],
stats[channel_name]["com"],
stats[channel_name]["fwhm"],
stats[channel_name]["cen"],
])
self.info("Statistics for movable: {:s}".format(selected_motor))
table = Table(elem_list=cols, elem_fmt=["%*g"],
row_head_str=["MIN", "MAX", "MIN@", "MAX@",
"MEAN", "INT", "COM", "FWHM", "CEN"],
col_head_str=col_header, col_head_sep="-")
out = table.genOutput()
for line in out:
self.info(line)
self.setEnv("{:s}.ScanStats".format(self.getDoorName()),
{"Stats": stats,
"Motor": selected_motor,
"ScanID": self.getEnv("ScanID")})
@staticmethod
def _calcStats(x, y):
# max and min
_min = numpy.min(y)
_max = numpy.max(y)
min_idx = numpy.argmin(y)
min_at = x[min_idx]
max_idx = numpy.argmax(y)
max_at = x[max_idx]
# center of mass (com)
try:
com = numpy.sum(y*x)/numpy.sum(y)
except ZeroDivisionError:
com = 0
mean = numpy.mean(y)
_int = numpy.sum(y)
# determine if it is a peak- or erf-like function
half_max = (_max-_min)/2+_min
lower_left = False
lower_right = False
if numpy.any(y[0:max_idx] < half_max):
lower_left = True
if numpy.any(y[max_idx:] < half_max):
lower_right = True
if lower_left and lower_right:
# it is a peak-like function
y_data = y
elif lower_left:
# it is an erf-like function
# use the gradient for further calculation
y_data = numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
else:
# it is an erf-like function
# use the gradient for further calculation
y_data = -1*numpy.gradient(y)
# use also the half maximum of the gradient
half_max = (numpy.max(y_data)-numpy.min(y_data)) \
/ 2+numpy.min(y_data)
# cen and fwhm
# this part is adapted from:
#
# The PyMca X-Ray Fluorescence Toolkit
#
# Copyright (c) 2004-2014 European Synchrotron Radiation Facility
#
# This file is part of the PyMca X-ray Fluorescence Toolkit developed
# at the ESRF by the Software group.
max_idx_data = numpy.argmax(y_data)
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx-1
x0 = x[idx]
x1 = x[idx+1]
y0 = y_data[idx]
y1 = y_data[idx+1]
lhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
lhmx = 0
except IndexError:
lhmx = x[0]
idx = max_idx_data
try:
while y_data[idx] >= half_max:
idx = idx+1
x0 = x[idx-1]
x1 = x[idx]
y0 = y_data[idx-1]
y1 = y_data[idx]
uhmx = (half_max*(x1-x0) - (y0*x1)+(y1*x0)) / (y1-y0)
except ZeroDivisionError:
uhmx = 0
except IndexError:
uhmx = x[-1]
fwhm = uhmx - lhmx
cen = (uhmx + lhmx)/2
return (_min, _max, min_at, max_at, half_max, com, mean, _int,
fwhm, cen)
| true | true |
f71f852be23973714c7a9eb2960199f224ce82d6 | 15,227 | py | Python | train_wide_resnet.py | 1vn/L0_regularization | 3c44e0e3adc80c7167a5bd1439aa1187747392bb | [
"MIT"
] | null | null | null | train_wide_resnet.py | 1vn/L0_regularization | 3c44e0e3adc80c7167a5bd1439aa1187747392bb | [
"MIT"
] | null | null | null | train_wide_resnet.py | 1vn/L0_regularization | 3c44e0e3adc80c7167a5bd1439aa1187747392bb | [
"MIT"
] | null | null | null | import argparse
import os
import shutil
import time
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
from models import L0WideResNet, TDWideResNet
from dataloaders import cifar10, cifar100
from utils import save_checkpoint, AverageMeter, accuracy
from torch.optim import lr_scheduler
parser = argparse.ArgumentParser(description="PyTorch WideResNet Training")
parser.add_argument("--epochs", default=200, type=int, help="number of total epochs to run")
parser.add_argument(
"--start-epoch", default=0, type=int, help="manual epoch number (useful on restarts)"
)
parser.add_argument(
"-b", "--batch-size", default=128, type=int, help="mini-batch size (default: 128)"
)
parser.add_argument(
"--lr", "--learning-rate", default=0.1, type=float, help="initial learning rate"
)
parser.add_argument("--momentum", default=0.9, type=float, help="momentum")
parser.add_argument(
"--weight-decay", "--wd", default=0.0005, type=float, help="weight decay (default: 5e-4)"
)
parser.add_argument(
"--print-freq", "-p", default=100, type=int, help="print frequency (default: 100)"
)
parser.add_argument(
"--depth", default=28, type=int, help="total depth of the network (default: 28)"
)
parser.add_argument(
"--width", default=10, type=int, help="total width of the network (default: 10)"
)
parser.add_argument(
"--droprate_init", default=0.3, type=float, help="dropout probability (default: 0.3)"
)
parser.add_argument(
"--no-augment",
dest="augment",
action="store_false",
help="whether to use standard augmentation (default: True)",
)
parser.add_argument(
"--no-bottleneck", dest="bottleneck", action="store_false", help="To not use bottleneck block"
)
parser.add_argument(
"--resume", default="", type=str, help="path to latest checkpoint (default: none)"
)
parser.add_argument("--name", default="L0WideResNet", type=str, help="name of experiment")
parser.add_argument("--model", default="L0WideResNet", type=str, help="name of experiment")
parser.add_argument(
"--no-tensorboard",
dest="tensorboard",
action="store_false",
help="whether to use tensorboard (default: True)",
)
parser.add_argument("--multi_gpu", action="store_true")
parser.add_argument("--lamba", type=float, default=0.001, help="Coefficient for the L0 term.")
parser.add_argument("--beta_ema", type=float, default=0.99)
parser.add_argument("--lr_decay_ratio", type=float, default=0.2)
parser.add_argument("--dataset", choices=["c10", "c100"], default="c10")
parser.add_argument("--local_rep", action="store_true")
parser.add_argument("--epoch_drop", nargs="*", type=int, default=(60, 120, 160))
parser.add_argument("--temp", type=float, default=2.0 / 3.0)
parser.add_argument("--prune", type=bool, default=False)
parser.add_argument("--dropout", type=float, default=0.5)
parser.add_argument("--dropout_botk", type=float, default=0.5)
parser.add_argument("--dropout_type", type=str, default="weight")
parser.set_defaults(bottleneck=True)
parser.set_defaults(augment=True)
parser.set_defaults(tensorboard=True)
best_prec1 = 100
writer = None
time_acc = [(0, 0, 0)]
total_steps = 0
exp_flops, exp_l0 = [], []
def main():
global args, best_prec1, writer, time_acc, total_steps, exp_flops, exp_l0
args = parser.parse_args()
log_dir_net = args.name
args.name += "_{}_{}".format(args.depth, args.width)
if args.dataset == "c100":
args.name += "_c100"
print("model:", args.name)
if args.tensorboard:
# used for logging to TensorBoard
from tensorboardX import SummaryWriter
directory = "logs/{}/{}".format(log_dir_net, args.name)
if os.path.exists(directory):
shutil.rmtree(directory)
os.makedirs(directory)
else:
os.makedirs(directory)
writer = SummaryWriter(directory)
# Data loading code
dataload = cifar10 if args.dataset == "c10" else cifar100
train_loader, val_loader, num_classes = dataload(
augment=args.augment, batch_size=args.batch_size
)
# create model
if args.model == "L0WideResNet":
model = L0WideResNet(
args.depth,
num_classes,
widen_factor=args.width,
droprate_init=args.droprate_init,
N=50000,
beta_ema=args.beta_ema,
weight_decay=args.weight_decay,
local_rep=args.local_rep,
lamba=args.lamba,
temperature=args.temp,
)
if args.model == "TDWideResNet":
model = TDWideResNet(
args.depth,
num_classes,
widen_factor=args.width,
droprate_init=args.droprate_init,
N=50000,
beta_ema=args.beta_ema,
weight_decay=args.weight_decay,
dropout=args.dropout,
dropout_botk=args.dropout_botk,
dropout_type=args.dropout_type,
)
print(
"Number of model parameters: {}".format(
sum([p.data.nelement() for p in model.parameters()])
)
)
# for training on multiple GPUs.
# Use CUDA_VISIBLE_DEVICES=0,1 to specify which GPUs to use
if args.multi_gpu:
model = torch.nn.DataParallel(model).cuda()
else:
if torch.cuda.is_available():
model = model.cuda()
optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, nesterov=True)
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint["epoch"]
best_prec1 = checkpoint["best_prec1"]
model.load_state_dict(checkpoint["state_dict"])
optimizer.load_state_dict(checkpoint["optimizer"])
total_steps = checkpoint["total_steps"]
time_acc = checkpoint["time_acc"]
exp_flops = checkpoint["exp_flops"]
exp_l0 = checkpoint["exp_l0"]
if args.model == "L0WideResNet" and checkpoint["beta_ema"] > 0:
if not args.multi_gpu:
model.beta_ema = checkpoint["beta_ema"]
model.avg_param = checkpoint["avg_params"]
model.steps_ema = checkpoint["steps_ema"]
else:
model.module.beta_ema = checkpoint["beta_ema"]
model.module.avg_param = checkpoint["avg_params"]
model.module.steps_ema = checkpoint["steps_ema"]
print("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint["epoch"]))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
total_steps, exp_flops, exp_l0 = 0, [], []
cudnn.benchmark = True
loglike = nn.CrossEntropyLoss()
if torch.cuda.is_available():
loglike = loglike.cuda()
# define loss function (criterion) and optimizer
def loss_function(output, target_var, model):
# print("loss:", loss)
loss = loglike(output, target_var)
# reg = model.regularization() if not args.multi_gpu else model.module.regularization()
total_loss = loss
if torch.cuda.is_available():
total_loss = total_loss.cuda()
return total_loss
lr_schedule = lr_scheduler.MultiStepLR(
optimizer, milestones=args.epoch_drop, gamma=args.lr_decay_ratio
)
if args.prune:
for i in range(10):
botk = i * 0.1
model.prune(botk)
prec1 = validate(val_loader, model, loss_function, 1)
model.load_state_dict(checkpoint["state_dict"])
print(botk, 100 - prec1)
return
for epoch in range(args.start_epoch, args.epochs):
time_glob = time.time()
# train for one epoch
prec1_tr = train(train_loader, model, loss_function, optimizer, lr_schedule, epoch)
# evaluate on validation set
prec1 = validate(val_loader, model, loss_function, epoch)
time_ep = time.time() - time_glob
time_acc.append((time_ep + time_acc[-1][0], prec1_tr, prec1))
# remember best prec@1 and save checkpoint
is_best = prec1 < best_prec1
best_prec1 = min(prec1, best_prec1)
state = {
"epoch": epoch + 1,
"state_dict": model.state_dict(),
"best_prec1": best_prec1,
"curr_prec1": prec1,
"optimizer": optimizer.state_dict(),
"total_steps": total_steps,
"time_acc": time_acc,
"exp_flops": exp_flops,
"exp_l0": exp_l0,
}
if args.model == "L0WideResNet":
if not args.multi_gpu:
state["beta_ema"] = model.beta_ema
if model.beta_ema > 0:
state["avg_params"] = model.avg_param
state["steps_ema"] = model.steps_ema
else:
state["beta_ema"] = model.module.beta_ema
if model.module.beta_ema > 0:
state["avg_params"] = model.module.avg_param
state["steps_ema"] = model.module.steps_ema
if args.model == "TDWideResNet":
state["dropout"] = args.dropout
state["dropout_botk"] = args.dropout_botk
save_checkpoint(state, is_best, args.name)
print("Best error: ", best_prec1)
if args.tensorboard:
writer.close()
def train(train_loader, model, criterion, optimizer, lr_schedule, epoch):
"""Train for one epoch on the training set"""
global total_steps, exp_flops, exp_l0, args, writer
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
# switch to train mode
model.train()
lr_schedule.step(epoch=epoch)
if writer is not None:
writer.add_scalar("learning_rate", optimizer.param_groups[0]["lr"], epoch)
end = time.time()
for i, (input_, target) in enumerate(train_loader):
data_time.update(time.time() - end)
total_steps += 1
if torch.cuda.is_available():
target = target.cuda(async=True)
input_ = input_.cuda()
input_var = torch.autograd.Variable(input_)
target_var = torch.autograd.Variable(target)
# compute output
output = model(input_var)
loss = criterion(output, target_var, model)
# measure accuracy and record loss
prec1 = accuracy(output.data, target, topk=(1,))[0]
losses.update(loss.item(), input_.size(0))
top1.update(100 - prec1.item(), input_.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if args.model == "L0WideResNet":
# clamp the parameters
layers = model.layers if not args.multi_gpu else model.module.layers
for k, layer in enumerate(layers):
layer.constrain_parameters()
e_fl, e_l0 = (
model.get_exp_flops_l0() if not args.multi_gpu else model.module.get_exp_flops_l0()
)
exp_flops.append(e_fl)
exp_l0.append(e_l0)
if writer is not None:
writer.add_scalar("stats_comp/exp_flops", e_fl, total_steps)
writer.add_scalar("stats_comp/exp_l0", e_l0, total_steps)
if not args.multi_gpu:
if model.beta_ema > 0.0:
model.update_ema()
else:
if model.module.beta_ema > 0.0:
model.module.update_ema()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# input()
if i % args.print_freq == 0:
print(
" Epoch: [{0}][{1}/{2}]\t"
"Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t"
"Data {data_time.val:.3f} ({data_time.avg:.3f})\t"
"Loss {loss.val:.4f} ({loss.avg:.4f})\t"
"Err@1 {top1.val:.3f} ({top1.avg:.3f})".format(
epoch,
i,
len(train_loader),
batch_time=batch_time,
data_time=data_time,
loss=losses,
top1=top1,
)
)
# log to TensorBoard
if writer is not None:
writer.add_scalar("train/loss", losses.avg, epoch)
writer.add_scalar("train/err", top1.avg, epoch)
return top1.avg
def validate(val_loader, model, criterion, epoch):
"""Perform validation on the validation set"""
global args, writer
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
# switch to evaluate mode
model.eval()
if not args.multi_gpu:
old_params = model.get_params()
if model.beta_ema > 0 and args.model == "L0WideResNet":
model.load_ema_params()
else:
old_params = model.module.get_params()
if args.model == "L0WideResNet" and model.module.beta_ema > 0:
model.module.load_ema_params()
end = time.time()
for i, (input_, target) in enumerate(val_loader):
if torch.cuda.is_available():
target = target.cuda(async=True)
input_ = input_.cuda()
input_var = torch.autograd.Variable(input_, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
# compute output
output = model(input_var)
loss = criterion(output, target_var, model)
# measure accuracy and record loss
prec1 = accuracy(output.data, target, topk=(1,))[0]
losses.update(loss.data.item(), input_.size(0))
top1.update(100 - prec1.item(), input_.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
# if i % args.print_freq == 0:
# print(
# "Test: [{0}/{1}]\t"
# "Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t"
# "Loss {loss.val:.4f} ({loss.avg:.4f})\t"
# "Err@1 {top1.val:.3f} ({top1.avg:.3f})".format(
# i, len(val_loader), batch_time=batch_time, loss=losses, top1=top1
# )
# )
# print(" * Err@1 {top1.avg:.3f}".format(top1=top1))
if not args.multi_gpu:
if model.beta_ema > 0:
model.load_params(old_params)
else:
if model.module.beta_ema > 0:
model.module.load_params(old_params)
# log to TensorBoard
if writer is not None:
writer.add_scalar("val/loss", losses.avg, epoch)
writer.add_scalar("val/err", top1.avg, epoch)
layers = model.layers if not args.multi_gpu else model.module.layers
for k, layer in enumerate(layers):
if hasattr(layer, "qz_loga"):
mode_z = layer.sample_z(1, sample=0).view(-1)
writer.add_histogram("mode_z/layer{}".format(k), mode_z.cpu().data.numpy(), epoch)
return top1.avg
if __name__ == "__main__":
main()
| 35.744131 | 99 | 0.605044 | import argparse
import os
import shutil
import time
import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
from models import L0WideResNet, TDWideResNet
from dataloaders import cifar10, cifar100
from utils import save_checkpoint, AverageMeter, accuracy
from torch.optim import lr_scheduler
parser = argparse.ArgumentParser(description="PyTorch WideResNet Training")
parser.add_argument("--epochs", default=200, type=int, help="number of total epochs to run")
parser.add_argument(
"--start-epoch", default=0, type=int, help="manual epoch number (useful on restarts)"
)
parser.add_argument(
"-b", "--batch-size", default=128, type=int, help="mini-batch size (default: 128)"
)
parser.add_argument(
"--lr", "--learning-rate", default=0.1, type=float, help="initial learning rate"
)
parser.add_argument("--momentum", default=0.9, type=float, help="momentum")
parser.add_argument(
"--weight-decay", "--wd", default=0.0005, type=float, help="weight decay (default: 5e-4)"
)
parser.add_argument(
"--print-freq", "-p", default=100, type=int, help="print frequency (default: 100)"
)
parser.add_argument(
"--depth", default=28, type=int, help="total depth of the network (default: 28)"
)
parser.add_argument(
"--width", default=10, type=int, help="total width of the network (default: 10)"
)
parser.add_argument(
"--droprate_init", default=0.3, type=float, help="dropout probability (default: 0.3)"
)
parser.add_argument(
"--no-augment",
dest="augment",
action="store_false",
help="whether to use standard augmentation (default: True)",
)
parser.add_argument(
"--no-bottleneck", dest="bottleneck", action="store_false", help="To not use bottleneck block"
)
parser.add_argument(
"--resume", default="", type=str, help="path to latest checkpoint (default: none)"
)
parser.add_argument("--name", default="L0WideResNet", type=str, help="name of experiment")
parser.add_argument("--model", default="L0WideResNet", type=str, help="name of experiment")
parser.add_argument(
"--no-tensorboard",
dest="tensorboard",
action="store_false",
help="whether to use tensorboard (default: True)",
)
parser.add_argument("--multi_gpu", action="store_true")
parser.add_argument("--lamba", type=float, default=0.001, help="Coefficient for the L0 term.")
parser.add_argument("--beta_ema", type=float, default=0.99)
parser.add_argument("--lr_decay_ratio", type=float, default=0.2)
parser.add_argument("--dataset", choices=["c10", "c100"], default="c10")
parser.add_argument("--local_rep", action="store_true")
parser.add_argument("--epoch_drop", nargs="*", type=int, default=(60, 120, 160))
parser.add_argument("--temp", type=float, default=2.0 / 3.0)
parser.add_argument("--prune", type=bool, default=False)
parser.add_argument("--dropout", type=float, default=0.5)
parser.add_argument("--dropout_botk", type=float, default=0.5)
parser.add_argument("--dropout_type", type=str, default="weight")
parser.set_defaults(bottleneck=True)
parser.set_defaults(augment=True)
parser.set_defaults(tensorboard=True)
best_prec1 = 100
writer = None
time_acc = [(0, 0, 0)]
total_steps = 0
exp_flops, exp_l0 = [], []
def main():
global args, best_prec1, writer, time_acc, total_steps, exp_flops, exp_l0
args = parser.parse_args()
log_dir_net = args.name
args.name += "_{}_{}".format(args.depth, args.width)
if args.dataset == "c100":
args.name += "_c100"
print("model:", args.name)
if args.tensorboard:
from tensorboardX import SummaryWriter
directory = "logs/{}/{}".format(log_dir_net, args.name)
if os.path.exists(directory):
shutil.rmtree(directory)
os.makedirs(directory)
else:
os.makedirs(directory)
writer = SummaryWriter(directory)
dataload = cifar10 if args.dataset == "c10" else cifar100
train_loader, val_loader, num_classes = dataload(
augment=args.augment, batch_size=args.batch_size
)
if args.model == "L0WideResNet":
model = L0WideResNet(
args.depth,
num_classes,
widen_factor=args.width,
droprate_init=args.droprate_init,
N=50000,
beta_ema=args.beta_ema,
weight_decay=args.weight_decay,
local_rep=args.local_rep,
lamba=args.lamba,
temperature=args.temp,
)
if args.model == "TDWideResNet":
model = TDWideResNet(
args.depth,
num_classes,
widen_factor=args.width,
droprate_init=args.droprate_init,
N=50000,
beta_ema=args.beta_ema,
weight_decay=args.weight_decay,
dropout=args.dropout,
dropout_botk=args.dropout_botk,
dropout_type=args.dropout_type,
)
print(
"Number of model parameters: {}".format(
sum([p.data.nelement() for p in model.parameters()])
)
)
if args.multi_gpu:
model = torch.nn.DataParallel(model).cuda()
else:
if torch.cuda.is_available():
model = model.cuda()
optimizer = torch.optim.SGD(model.parameters(), args.lr, momentum=args.momentum, nesterov=True)
if args.resume:
if os.path.isfile(args.resume):
print("=> loading checkpoint '{}'".format(args.resume))
checkpoint = torch.load(args.resume)
args.start_epoch = checkpoint["epoch"]
best_prec1 = checkpoint["best_prec1"]
model.load_state_dict(checkpoint["state_dict"])
optimizer.load_state_dict(checkpoint["optimizer"])
total_steps = checkpoint["total_steps"]
time_acc = checkpoint["time_acc"]
exp_flops = checkpoint["exp_flops"]
exp_l0 = checkpoint["exp_l0"]
if args.model == "L0WideResNet" and checkpoint["beta_ema"] > 0:
if not args.multi_gpu:
model.beta_ema = checkpoint["beta_ema"]
model.avg_param = checkpoint["avg_params"]
model.steps_ema = checkpoint["steps_ema"]
else:
model.module.beta_ema = checkpoint["beta_ema"]
model.module.avg_param = checkpoint["avg_params"]
model.module.steps_ema = checkpoint["steps_ema"]
print("=> loaded checkpoint '{}' (epoch {})".format(args.resume, checkpoint["epoch"]))
else:
print("=> no checkpoint found at '{}'".format(args.resume))
total_steps, exp_flops, exp_l0 = 0, [], []
cudnn.benchmark = True
loglike = nn.CrossEntropyLoss()
if torch.cuda.is_available():
loglike = loglike.cuda()
def loss_function(output, target_var, model):
loss = loglike(output, target_var)
total_loss = loss
if torch.cuda.is_available():
total_loss = total_loss.cuda()
return total_loss
lr_schedule = lr_scheduler.MultiStepLR(
optimizer, milestones=args.epoch_drop, gamma=args.lr_decay_ratio
)
if args.prune:
for i in range(10):
botk = i * 0.1
model.prune(botk)
prec1 = validate(val_loader, model, loss_function, 1)
model.load_state_dict(checkpoint["state_dict"])
print(botk, 100 - prec1)
return
for epoch in range(args.start_epoch, args.epochs):
time_glob = time.time()
prec1_tr = train(train_loader, model, loss_function, optimizer, lr_schedule, epoch)
prec1 = validate(val_loader, model, loss_function, epoch)
time_ep = time.time() - time_glob
time_acc.append((time_ep + time_acc[-1][0], prec1_tr, prec1))
is_best = prec1 < best_prec1
best_prec1 = min(prec1, best_prec1)
state = {
"epoch": epoch + 1,
"state_dict": model.state_dict(),
"best_prec1": best_prec1,
"curr_prec1": prec1,
"optimizer": optimizer.state_dict(),
"total_steps": total_steps,
"time_acc": time_acc,
"exp_flops": exp_flops,
"exp_l0": exp_l0,
}
if args.model == "L0WideResNet":
if not args.multi_gpu:
state["beta_ema"] = model.beta_ema
if model.beta_ema > 0:
state["avg_params"] = model.avg_param
state["steps_ema"] = model.steps_ema
else:
state["beta_ema"] = model.module.beta_ema
if model.module.beta_ema > 0:
state["avg_params"] = model.module.avg_param
state["steps_ema"] = model.module.steps_ema
if args.model == "TDWideResNet":
state["dropout"] = args.dropout
state["dropout_botk"] = args.dropout_botk
save_checkpoint(state, is_best, args.name)
print("Best error: ", best_prec1)
if args.tensorboard:
writer.close()
def train(train_loader, model, criterion, optimizer, lr_schedule, epoch):
"""Train for one epoch on the training set"""
global total_steps, exp_flops, exp_l0, args, writer
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
model.train()
lr_schedule.step(epoch=epoch)
if writer is not None:
writer.add_scalar("learning_rate", optimizer.param_groups[0]["lr"], epoch)
end = time.time()
for i, (input_, target) in enumerate(train_loader):
data_time.update(time.time() - end)
total_steps += 1
if torch.cuda.is_available():
target = target.cuda(async=True)
input_ = input_.cuda()
input_var = torch.autograd.Variable(input_)
target_var = torch.autograd.Variable(target)
output = model(input_var)
loss = criterion(output, target_var, model)
prec1 = accuracy(output.data, target, topk=(1,))[0]
losses.update(loss.item(), input_.size(0))
top1.update(100 - prec1.item(), input_.size(0))
optimizer.zero_grad()
loss.backward()
optimizer.step()
if args.model == "L0WideResNet":
layers = model.layers if not args.multi_gpu else model.module.layers
for k, layer in enumerate(layers):
layer.constrain_parameters()
e_fl, e_l0 = (
model.get_exp_flops_l0() if not args.multi_gpu else model.module.get_exp_flops_l0()
)
exp_flops.append(e_fl)
exp_l0.append(e_l0)
if writer is not None:
writer.add_scalar("stats_comp/exp_flops", e_fl, total_steps)
writer.add_scalar("stats_comp/exp_l0", e_l0, total_steps)
if not args.multi_gpu:
if model.beta_ema > 0.0:
model.update_ema()
else:
if model.module.beta_ema > 0.0:
model.module.update_ema()
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print(
" Epoch: [{0}][{1}/{2}]\t"
"Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t"
"Data {data_time.val:.3f} ({data_time.avg:.3f})\t"
"Loss {loss.val:.4f} ({loss.avg:.4f})\t"
"Err@1 {top1.val:.3f} ({top1.avg:.3f})".format(
epoch,
i,
len(train_loader),
batch_time=batch_time,
data_time=data_time,
loss=losses,
top1=top1,
)
)
if writer is not None:
writer.add_scalar("train/loss", losses.avg, epoch)
writer.add_scalar("train/err", top1.avg, epoch)
return top1.avg
def validate(val_loader, model, criterion, epoch):
"""Perform validation on the validation set"""
global args, writer
batch_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
model.eval()
if not args.multi_gpu:
old_params = model.get_params()
if model.beta_ema > 0 and args.model == "L0WideResNet":
model.load_ema_params()
else:
old_params = model.module.get_params()
if args.model == "L0WideResNet" and model.module.beta_ema > 0:
model.module.load_ema_params()
end = time.time()
for i, (input_, target) in enumerate(val_loader):
if torch.cuda.is_available():
target = target.cuda(async=True)
input_ = input_.cuda()
input_var = torch.autograd.Variable(input_, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
output = model(input_var)
loss = criterion(output, target_var, model)
prec1 = accuracy(output.data, target, topk=(1,))[0]
losses.update(loss.data.item(), input_.size(0))
top1.update(100 - prec1.item(), input_.size(0))
batch_time.update(time.time() - end)
end = time.time()
if not args.multi_gpu:
if model.beta_ema > 0:
model.load_params(old_params)
else:
if model.module.beta_ema > 0:
model.module.load_params(old_params)
if writer is not None:
writer.add_scalar("val/loss", losses.avg, epoch)
writer.add_scalar("val/err", top1.avg, epoch)
layers = model.layers if not args.multi_gpu else model.module.layers
for k, layer in enumerate(layers):
if hasattr(layer, "qz_loga"):
mode_z = layer.sample_z(1, sample=0).view(-1)
writer.add_histogram("mode_z/layer{}".format(k), mode_z.cpu().data.numpy(), epoch)
return top1.avg
if __name__ == "__main__":
main()
| false | true |
f71f8633b734353bac2000dd7387efb6ae942340 | 2,457 | py | Python | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | 1 | 2022-01-24T11:26:47.000Z | 2022-03-18T23:17:58.000Z | cli/polyaxon/utils/cache.py | polyaxon/cli | 3543c0220a8a7c06fc9573cd2a740f8ae4930641 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
#
# Copyright 2018-2022 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.managers.project import ProjectConfigManager
from polyaxon.utils.formatting import Printer
CACHE_ERROR = (
"Found an invalid project config or project config cache, "
"if you are using Polyaxon CLI please run: "
"`polyaxon config purge --cache-only`"
)
def get_local_project(is_cli: bool = False):
try:
return ProjectConfigManager.get_config()
except Exception: # noqa
if is_cli:
Printer.print_error(CACHE_ERROR, sys_exit=True)
else:
raise PolyaxonSchemaError(CACHE_ERROR)
def _is_same_project(owner=None, project=None):
local_project = get_local_project(is_cli=True)
if project and project == local_project.name:
return not all([owner, local_project.owner]) or owner == local_project.owner
def _cache_project(config, owner=None, project=None):
if (
ProjectConfigManager.is_initialized()
and ProjectConfigManager.is_locally_initialized()
):
if _is_same_project(owner, project):
ProjectConfigManager.set_config(config)
return
ProjectConfigManager.set_config(
config, visibility=ProjectConfigManager.VISIBILITY_GLOBAL
)
def cache(config_manager, config, owner=None, project=None):
if config_manager == ProjectConfigManager:
_cache_project(config=config, project=project, owner=owner)
# Set caching only if we have an initialized project
if not ProjectConfigManager.is_initialized():
return
if not _is_same_project(owner, project):
return
visibility = (
ProjectConfigManager.VISIBILITY_LOCAL
if ProjectConfigManager.is_locally_initialized()
else ProjectConfigManager.VISIBILITY_GLOBAL
)
config_manager.set_config(config, visibility=visibility)
| 33.202703 | 84 | 0.728938 |
from polyaxon.exceptions import PolyaxonSchemaError
from polyaxon.managers.project import ProjectConfigManager
from polyaxon.utils.formatting import Printer
CACHE_ERROR = (
"Found an invalid project config or project config cache, "
"if you are using Polyaxon CLI please run: "
"`polyaxon config purge --cache-only`"
)
def get_local_project(is_cli: bool = False):
try:
return ProjectConfigManager.get_config()
except Exception:
if is_cli:
Printer.print_error(CACHE_ERROR, sys_exit=True)
else:
raise PolyaxonSchemaError(CACHE_ERROR)
def _is_same_project(owner=None, project=None):
local_project = get_local_project(is_cli=True)
if project and project == local_project.name:
return not all([owner, local_project.owner]) or owner == local_project.owner
def _cache_project(config, owner=None, project=None):
if (
ProjectConfigManager.is_initialized()
and ProjectConfigManager.is_locally_initialized()
):
if _is_same_project(owner, project):
ProjectConfigManager.set_config(config)
return
ProjectConfigManager.set_config(
config, visibility=ProjectConfigManager.VISIBILITY_GLOBAL
)
def cache(config_manager, config, owner=None, project=None):
if config_manager == ProjectConfigManager:
_cache_project(config=config, project=project, owner=owner)
if not ProjectConfigManager.is_initialized():
return
if not _is_same_project(owner, project):
return
visibility = (
ProjectConfigManager.VISIBILITY_LOCAL
if ProjectConfigManager.is_locally_initialized()
else ProjectConfigManager.VISIBILITY_GLOBAL
)
config_manager.set_config(config, visibility=visibility)
| true | true |
f71f864e03a6b1e01179e730c110c5a2c2ec95e7 | 1,308 | py | Python | Mac/Modules/ibcarbon/IBCarbonscan.py | cemeyer/tauthon | 2c3328c5272cffa2a544542217181c5828afa7ed | [
"PSF-2.0"
] | 2,293 | 2015-01-02T12:46:10.000Z | 2022-03-29T09:45:43.000Z | python/src/Mac/Modules/ibcarbon/IBCarbonscan.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 315 | 2015-05-31T11:55:46.000Z | 2022-01-12T08:36:37.000Z | python/src/Mac/Modules/ibcarbon/IBCarbonscan.py | weiqiangzheng/sl4a | d3c17dca978cbeee545e12ea240a9dbf2a6999e9 | [
"Apache-2.0"
] | 1,033 | 2015-01-04T07:48:40.000Z | 2022-03-24T09:34:37.000Z | # IBCarbonscan.py
import sys
from bgenlocations import TOOLBOXDIR, BGENDIR
sys.path.append(BGENDIR)
from scantools import Scanner_OSX
def main():
print "---Scanning IBCarbonRuntime.h---"
input = ["IBCarbonRuntime.h"]
output = "IBCarbongen.py"
defsoutput = TOOLBOXDIR + "IBCarbonRuntime.py"
scanner = IBCarbon_Scanner(input, output, defsoutput)
scanner.scan()
scanner.close()
print "=== Testing definitions output code ==="
execfile(defsoutput, {}, {})
print "--done scanning, importing--"
import IBCarbonsupport
print "done"
class IBCarbon_Scanner(Scanner_OSX):
def destination(self, type, name, arglist):
classname = "IBCarbonFunction"
listname = "functions"
if arglist:
t, n, m = arglist[0]
if t == "IBNibRef" and m == "InMode":
classname = "IBCarbonMethod"
listname = "methods"
return classname, listname
def makeblacklistnames(self):
return [
"DisposeNibReference", # taken care of by destructor
"CreateNibReferenceWithCFBundle", ## need to wrap CFBundle.h properly first
]
def makerepairinstructions(self):
return []
if __name__ == "__main__":
main()
| 27.25 | 93 | 0.612385 |
import sys
from bgenlocations import TOOLBOXDIR, BGENDIR
sys.path.append(BGENDIR)
from scantools import Scanner_OSX
def main():
print "---Scanning IBCarbonRuntime.h---"
input = ["IBCarbonRuntime.h"]
output = "IBCarbongen.py"
defsoutput = TOOLBOXDIR + "IBCarbonRuntime.py"
scanner = IBCarbon_Scanner(input, output, defsoutput)
scanner.scan()
scanner.close()
print "=== Testing definitions output code ==="
execfile(defsoutput, {}, {})
print "--done scanning, importing--"
import IBCarbonsupport
print "done"
class IBCarbon_Scanner(Scanner_OSX):
def destination(self, type, name, arglist):
classname = "IBCarbonFunction"
listname = "functions"
if arglist:
t, n, m = arglist[0]
if t == "IBNibRef" and m == "InMode":
classname = "IBCarbonMethod"
listname = "methods"
return classname, listname
def makeblacklistnames(self):
return [
"DisposeNibReference",
"CreateNibReferenceWithCFBundle", structions(self):
return []
if __name__ == "__main__":
main()
| false | true |
f71f8692d84797110282e3423509cce733cecedd | 13,447 | py | Python | containers.py | Fy-Network/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 1 | 2016-06-17T08:48:52.000Z | 2016-06-17T08:48:52.000Z | containers.py | Fy-/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 1 | 2016-06-17T18:06:41.000Z | 2016-06-17T18:06:41.000Z | containers.py | Fy-Network/fysql | 9a5910601e9aa13479c9fbd05eb64e958e90dea2 | [
"MIT"
] | 2 | 2018-02-11T02:14:11.000Z | 2020-01-07T05:40:34.000Z | # -*- coding: utf-8 -*-
"""
fysql.containers
~~~~~~~~~~~~~~~~
:copyright: (c) 2016 by Gasquez Florian
:license: MIT, see LICENSE for more details.
"""
from __future__ import unicode_literals
from functools import wraps
import copy
import hashlib
from .entities import SQLEntity, SQLJoin, SQLCondition, SQLColumn
from .columns import FKeyColumn, PKeyColumn, IntegerColumn
from .static import Tables
'''
class ContainerWalkerType(type):
_instances = {}
def __new__(cls, *args, **kwargs):
if not args[2]:
return super(ContainerWalker, cls).__new__(cls, *args, **kwargs)
key = hashlib.md5(args[0].encode('utf-8')).hexdigest()
if key not in ContainerWalkerType._instances.keys():
ContainerWalkerType._instances[key] = super(ContainerWalker, cls).__new__(cls, *args, **kwargs)
return ContainerWalkerType._instances[key]
'''
class ContainerWalker(object):
"""ContainerWalker: walk through a list of SQLEntity and EntityContainer.
Attributes:
_sql (str): description of the SQL query filled by the walker.
"""
def __init__(self, entities, separator, executable, *args, **kwargs):
self._sql = False
self.entities = entities
self.separator = separator
def prepare(self):
sql = []
for entity in self.entities:
if isinstance(entity, EntityContainer):
sql.append(
entity.separator.join(
map(str, entity.walker.prepare())
)
)
else:
sql.append(str(entity))
self._sql = self.separator.join(map(str, sql)).strip()
return sql
@property
def sql(self):
if self._sql is False:
self.prepare()
return self._sql
@staticmethod
def _sql_entity(value):
return '{0}{1}'.format(str(value))
class ResultContainer(object):
"""Assign sql select datas to Table._data"""
def __init__(self, table, cursor):
self.table = table
self.cursor = cursor
self.sql2py = {}
self.result = []
if self.cursor.description is not None:
for i in range(len(self.cursor.description)):
desc = self.cursor.description[i][0]
if isinstance(desc, bytes):
desc = desc.decode('utf-8')
self.sql2py[i] = desc
self.parse()
def parse(self):
"""Parse rows
Todo:
* Allow cursor.fetchone()? (memory issue)
"""
rows = self.cursor.fetchall()
for row in rows:
self.parse_row(row)
self.cursor.close()
def parse_row(self, row):
item = self.table()
for k, f in self.sql2py.items():
tables = Tables.tables
id_table = f.split('_')[0]
id_column = f.split('_', 1)[1]
if id_table != self.table._db_table:
id_table = self.table._backrefs[id_table]
if '_py' in dir(tables[id_table]._columns[id_column]):
item._data[f] = tables[id_table]._columns[id_column]._py(row[k])
else:
item._data[f] = row[k]
item.__load__()
self.result.append(item)
class EntityContainer(object):
"""List of SQLEntity
Attributes:
entities (list) SQLEntity and EntityContainer
seperator (str) Separator for each element of entities
"""
def __init__(self, separator=' '):
self._walker = False
self.entities = []
self.separator = separator
self.executable = False
def __add__(self, entity):
self.entities.append(entity)
return self
def __len__(self):
return len(self.entities)
@property
def walker(self):
if not self._walker:
self._walker = ContainerWalker(self.entities, self.separator, self.executable)
return self._walker
class EntityExecutableContainer(EntityContainer):
"""List of SQLEntity that can be converted to an executable SQL query."""
def __init__(self, table):
super(EntityExecutableContainer, self).__init__()
self.table = table
self.executable = True
@property
def sql(self):
return self.walker.sql
def execute(self, commit=False):
return self.table._database.execute(self.sql, commit=commit)
class DropContainer(EntityExecutableContainer):
"""DROP TABLE SQL query."""
def __init__(self, table):
super(DropContainer, self).__init__(table)
self += SQLEntity('DROP TABLE IF EXISTS {0};'.format(self.table._sql_entity))
self.execute()
class CreateTableContainer(EntityExecutableContainer):
"""CREATE TABLE SQL query."""
def __init__(self, table):
super(CreateTableContainer, self).__init__(table)
self += SQLEntity('CREATE TABLE IF NOT EXISTS {0} ('.format(self.table._sql_entity))
args_create = EntityContainer(separator=', ')
indexes = EntityContainer(separator=', ')
indexes += SQLEntity('PRIMARY KEY ({0})'.format(self.table._pkey.sql_entities['name']))
for key, column in self.table._columns.items():
column_create = EntityContainer(separator=' ')
column_create += column.sql_entities['name']
if column.sql_type_size is not None:
column_create += SQLEntity('{0}({1})'.format(column.sql_type, column.sql_type_size))
else:
column_create += SQLEntity(column.sql_type)
if isinstance(column, FKeyColumn) or isinstance(column, PKeyColumn):
column_create += SQLEntity('UNSIGNED')
if column.unique and not column.index:
column_create += SQLEntity('UNIQUE')
if column.null is False:
column_create += SQLEntity('NOT NULL')
else:
column_create += SQLEntity('NULL')
# if column.default:
# column_create += SQLEntity('DEFAULT {0}'.format(column.escape(column.default)))
if column.pkey and isinstance(column, IntegerColumn):
column_create += SQLEntity('AUTO_INCREMENT')
args_create += column_create
if column.index:
unique = '' if not column.unique else 'UNIQUE'
indexes += SQLEntity('{0} INDEX {1} ({2})'.format(unique, column.sql_entities['index'], column.sql_entities['name']))
args_create += indexes
self += args_create
self += SQLEntity(') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;')
DropContainer(self.table)
self.execute()
class InsertContainer(EntityExecutableContainer):
"""Table.insert(table_instance)"""
def __init__(self, table, instance):
super(InsertContainer, self).__init__(table)
self.filled = []
self.instance = instance
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=', ')
columns_values = EntityContainer(separator=', ')
for key, column in self.table._columns.items():
value = getattr(self.instance, key)
print (key +':'+ value)
if value:
if column.pkey is True:
self.pkey_id = value
columns_names += column.sql_entities['name']
columns_values += column.escape(getattr(self.instance, key))
for k, v in self.table._defaults.items():
if not value and key == k:
columns_names += self.table._columns[k].sql_entities['name']
columns_values += column.escape(v)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class CreateContainer(EntityExecutableContainer):
"""INSERT INTO SQL query. Used for Table.create()"""
def __init__(self, table, **kwargs):
super(CreateContainer, self).__init__(table)
self.filled = []
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=',')
columns_values = EntityContainer(separator=',')
for attr, value in kwargs.items():
if attr in self.table._columns.keys():
columns_names += self.table._columns[attr].sql_entities['name']
columns_values += self.table._columns[attr].escape(value)
if self.table._columns[attr].pkey is True:
self.pkey_id = value
self.filled.append(attr)
for key, column in self.table._defaults.items():
if key not in self.filled:
columns_names += self.table._columns[key].sql_entities['name']
columns_values += self.table._columns[key].escape(self.table._columns[key].default)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class SaveContainer(EntityExecutableContainer):
"""UPDATE SQL Query. Used for TableInstance.save()"""
def __init__(self, table, instance):
super(SaveContainer, self).__init__(table)
self += SQLEntity('UPDATE')
self += self.table._sql_entity
self += SQLEntity('SET')
columns = EntityContainer(separator=',')
to_update = []
for key, column in self.table._columns.items():
columns += SQLEntity('{0}={1}'.format(
column,
column.escape(getattr(instance, key))
)
)
if isinstance(column, FKeyColumn):
to_update.append(getattr(instance, column.reference))
self += columns
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
for item in to_update:
if item:
item.save()
class RemoveContainer(EntityExecutableContainer):
"""DELETE SQL Query. Used for TableInstance.remove()"""
def __init__(self, table, instance):
super(RemoveContainer, self).__init__(table)
self += SQLEntity('DELETE FROM')
self += self.table._sql_entity
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
def _generative(func):
"""Chainable method"""
@wraps(func)
def decorator(self, *args, **kwargs):
func(self, *args, **kwargs)
return self
return decorator
class ConditionableExecutableContainer(EntityExecutableContainer):
"""Conditionable query, with where, limit, group, having..."""
def __init__(self, table, *args, **kwargs):
super(ConditionableExecutableContainer, self).__init__(table)
self._where = False
self._group = False
self._order = False
def clone(self):
return copy.deepcopy(self)
@_generative
def where(self, *conditions):
if self._where is False:
self += SQLEntity('WHERE')
self._where = True
else:
self += SQLEntity('AND')
size = len(conditions) - 1
i = 0
if size == 0:
if isinstance(conditions[0], SQLCondition):
self += conditions[0]
else:
self += SQLEntity(conditions[0])
else:
for condition in conditions:
if isinstance(condition, SQLCondition):
self += SQLEntity('(')
self += condition
self += SQLEntity(')')
if i < size:
self += SQLEntity('AND')
i += 1
@_generative
def order_by(self, column, order='DESC'):
if self._order is False:
self += SQLEntity('ORDER BY')
self._order = True
else:
self += SQLEntity(',')
if isinstance(column, str):
self += SQLEntity(column)
else:
self += column
self += SQLEntity(order)
@_generative
def group_by(self, group_by):
if self._group is False:
self += SQLEntity('GROUP BY')
self._group = True
else:
self += SQLEntity(',')
if isinstance(group_by, str):
self += SQLEntity(group_by)
def limit(self, limit, position=0):
self += SQLEntity('LIMIT {0},{1}'.format(position, limit))
if limit == 1:
return self.execute(unique=True)
return self.execute()
def one(self):
return self.limit(1)
def all(self):
return self.execute()
class SelectContainer(ConditionableExecutableContainer):
"""SELECT SQL Query."""
def __init__(self, table, *args, **kwargs):
super(SelectContainer, self).__init__(table)
self.kwargs = kwargs
self.args = args
self.is_count = kwargs.get('is_count') or False
self.selected = []
self.add_from = kwargs.get('add_from') or False
self.executable = True
# add selected columns
if self.is_count:
columns = SQLEntity('COUNT(*)')
else:
columns = EntityContainer(separator=',')
for column in self.table._columns.values() if not args else args:
columns += column.sql_entities['selection']
self.selected.append(hash(column))
# add selected tables
tables = EntityContainer(separator=',')
tables += self.table._sql_entity
if self.add_from:
tables += SQLEntity(self.add_from)
# add joins
joins = EntityContainer()
for foreign in reversed(self.table._foreigns):
if hash(foreign['column']) in self.selected or self.is_count:
join = 'INNER' if foreign['column'].required else 'LEFT'
joins += SQLJoin(join, foreign['table']._sql_entity, foreign['left_on'], foreign['right_on'])
if not self.is_count:
for key, column in foreign['table']._columns.items():
columns += SQLColumn(
column.sql_column,
column.table._db_table,
'{0}_{1}'.format(foreign['column'].reference, column.sql_column)
)
self += SQLEntity('SELECT')
self += columns
self += SQLEntity('FROM')
self += tables
if len(joins) != 0:
self += joins
def execute(self, unique=False):
cursor = self.table._database.execute(self.sql)
if self.is_count:
return cursor.fetchone()[0]
if unique:
try:
return ResultContainer(self.table, cursor).result[0]
except IndexError:
return False
return ResultContainer(self.table, cursor).result
def count(self):
self.entities[1] = SQLEntity('COUNT(*)')
self.is_count = True
return self.execute()
| 25.809981 | 121 | 0.692348 |
from __future__ import unicode_literals
from functools import wraps
import copy
import hashlib
from .entities import SQLEntity, SQLJoin, SQLCondition, SQLColumn
from .columns import FKeyColumn, PKeyColumn, IntegerColumn
from .static import Tables
class ContainerWalker(object):
def __init__(self, entities, separator, executable, *args, **kwargs):
self._sql = False
self.entities = entities
self.separator = separator
def prepare(self):
sql = []
for entity in self.entities:
if isinstance(entity, EntityContainer):
sql.append(
entity.separator.join(
map(str, entity.walker.prepare())
)
)
else:
sql.append(str(entity))
self._sql = self.separator.join(map(str, sql)).strip()
return sql
@property
def sql(self):
if self._sql is False:
self.prepare()
return self._sql
@staticmethod
def _sql_entity(value):
return '{0}{1}'.format(str(value))
class ResultContainer(object):
def __init__(self, table, cursor):
self.table = table
self.cursor = cursor
self.sql2py = {}
self.result = []
if self.cursor.description is not None:
for i in range(len(self.cursor.description)):
desc = self.cursor.description[i][0]
if isinstance(desc, bytes):
desc = desc.decode('utf-8')
self.sql2py[i] = desc
self.parse()
def parse(self):
rows = self.cursor.fetchall()
for row in rows:
self.parse_row(row)
self.cursor.close()
def parse_row(self, row):
item = self.table()
for k, f in self.sql2py.items():
tables = Tables.tables
id_table = f.split('_')[0]
id_column = f.split('_', 1)[1]
if id_table != self.table._db_table:
id_table = self.table._backrefs[id_table]
if '_py' in dir(tables[id_table]._columns[id_column]):
item._data[f] = tables[id_table]._columns[id_column]._py(row[k])
else:
item._data[f] = row[k]
item.__load__()
self.result.append(item)
class EntityContainer(object):
def __init__(self, separator=' '):
self._walker = False
self.entities = []
self.separator = separator
self.executable = False
def __add__(self, entity):
self.entities.append(entity)
return self
def __len__(self):
return len(self.entities)
@property
def walker(self):
if not self._walker:
self._walker = ContainerWalker(self.entities, self.separator, self.executable)
return self._walker
class EntityExecutableContainer(EntityContainer):
def __init__(self, table):
super(EntityExecutableContainer, self).__init__()
self.table = table
self.executable = True
@property
def sql(self):
return self.walker.sql
def execute(self, commit=False):
return self.table._database.execute(self.sql, commit=commit)
class DropContainer(EntityExecutableContainer):
def __init__(self, table):
super(DropContainer, self).__init__(table)
self += SQLEntity('DROP TABLE IF EXISTS {0};'.format(self.table._sql_entity))
self.execute()
class CreateTableContainer(EntityExecutableContainer):
def __init__(self, table):
super(CreateTableContainer, self).__init__(table)
self += SQLEntity('CREATE TABLE IF NOT EXISTS {0} ('.format(self.table._sql_entity))
args_create = EntityContainer(separator=', ')
indexes = EntityContainer(separator=', ')
indexes += SQLEntity('PRIMARY KEY ({0})'.format(self.table._pkey.sql_entities['name']))
for key, column in self.table._columns.items():
column_create = EntityContainer(separator=' ')
column_create += column.sql_entities['name']
if column.sql_type_size is not None:
column_create += SQLEntity('{0}({1})'.format(column.sql_type, column.sql_type_size))
else:
column_create += SQLEntity(column.sql_type)
if isinstance(column, FKeyColumn) or isinstance(column, PKeyColumn):
column_create += SQLEntity('UNSIGNED')
if column.unique and not column.index:
column_create += SQLEntity('UNIQUE')
if column.null is False:
column_create += SQLEntity('NOT NULL')
else:
column_create += SQLEntity('NULL')
if column.pkey and isinstance(column, IntegerColumn):
column_create += SQLEntity('AUTO_INCREMENT')
args_create += column_create
if column.index:
unique = '' if not column.unique else 'UNIQUE'
indexes += SQLEntity('{0} INDEX {1} ({2})'.format(unique, column.sql_entities['index'], column.sql_entities['name']))
args_create += indexes
self += args_create
self += SQLEntity(') ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;')
DropContainer(self.table)
self.execute()
class InsertContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(InsertContainer, self).__init__(table)
self.filled = []
self.instance = instance
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=', ')
columns_values = EntityContainer(separator=', ')
for key, column in self.table._columns.items():
value = getattr(self.instance, key)
print (key +':'+ value)
if value:
if column.pkey is True:
self.pkey_id = value
columns_names += column.sql_entities['name']
columns_values += column.escape(getattr(self.instance, key))
for k, v in self.table._defaults.items():
if not value and key == k:
columns_names += self.table._columns[k].sql_entities['name']
columns_values += column.escape(v)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class CreateContainer(EntityExecutableContainer):
def __init__(self, table, **kwargs):
super(CreateContainer, self).__init__(table)
self.filled = []
self.pkey_id = False
self += SQLEntity('INSERT INTO')
self += self.table._sql_entity
self += SQLEntity('(')
columns_names = EntityContainer(separator=',')
columns_values = EntityContainer(separator=',')
for attr, value in kwargs.items():
if attr in self.table._columns.keys():
columns_names += self.table._columns[attr].sql_entities['name']
columns_values += self.table._columns[attr].escape(value)
if self.table._columns[attr].pkey is True:
self.pkey_id = value
self.filled.append(attr)
for key, column in self.table._defaults.items():
if key not in self.filled:
columns_names += self.table._columns[key].sql_entities['name']
columns_values += self.table._columns[key].escape(self.table._columns[key].default)
self += columns_names
self += SQLEntity(')')
self += SQLEntity('VALUES (')
self += columns_values
self += SQLEntity(');')
def execute(self):
cursor = self.table._database.execute(self.sql)
if self.pkey_id is False:
self.pkey_id = self.table._database.insert_id(cursor)
self.table._database.commit()
return self.table.get(self.table._pkey == self.pkey_id)
class SaveContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(SaveContainer, self).__init__(table)
self += SQLEntity('UPDATE')
self += self.table._sql_entity
self += SQLEntity('SET')
columns = EntityContainer(separator=',')
to_update = []
for key, column in self.table._columns.items():
columns += SQLEntity('{0}={1}'.format(
column,
column.escape(getattr(instance, key))
)
)
if isinstance(column, FKeyColumn):
to_update.append(getattr(instance, column.reference))
self += columns
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
for item in to_update:
if item:
item.save()
class RemoveContainer(EntityExecutableContainer):
def __init__(self, table, instance):
super(RemoveContainer, self).__init__(table)
self += SQLEntity('DELETE FROM')
self += self.table._sql_entity
self += SQLEntity('WHERE {0}={1} LIMIT 1'.format(
self.table._pkey,
self.table._pkey.escape(getattr(instance, self.table._pkey.name))
))
self.execute(commit=True)
def _generative(func):
@wraps(func)
def decorator(self, *args, **kwargs):
func(self, *args, **kwargs)
return self
return decorator
class ConditionableExecutableContainer(EntityExecutableContainer):
def __init__(self, table, *args, **kwargs):
super(ConditionableExecutableContainer, self).__init__(table)
self._where = False
self._group = False
self._order = False
def clone(self):
return copy.deepcopy(self)
@_generative
def where(self, *conditions):
if self._where is False:
self += SQLEntity('WHERE')
self._where = True
else:
self += SQLEntity('AND')
size = len(conditions) - 1
i = 0
if size == 0:
if isinstance(conditions[0], SQLCondition):
self += conditions[0]
else:
self += SQLEntity(conditions[0])
else:
for condition in conditions:
if isinstance(condition, SQLCondition):
self += SQLEntity('(')
self += condition
self += SQLEntity(')')
if i < size:
self += SQLEntity('AND')
i += 1
@_generative
def order_by(self, column, order='DESC'):
if self._order is False:
self += SQLEntity('ORDER BY')
self._order = True
else:
self += SQLEntity(',')
if isinstance(column, str):
self += SQLEntity(column)
else:
self += column
self += SQLEntity(order)
@_generative
def group_by(self, group_by):
if self._group is False:
self += SQLEntity('GROUP BY')
self._group = True
else:
self += SQLEntity(',')
if isinstance(group_by, str):
self += SQLEntity(group_by)
def limit(self, limit, position=0):
self += SQLEntity('LIMIT {0},{1}'.format(position, limit))
if limit == 1:
return self.execute(unique=True)
return self.execute()
def one(self):
return self.limit(1)
def all(self):
return self.execute()
class SelectContainer(ConditionableExecutableContainer):
def __init__(self, table, *args, **kwargs):
super(SelectContainer, self).__init__(table)
self.kwargs = kwargs
self.args = args
self.is_count = kwargs.get('is_count') or False
self.selected = []
self.add_from = kwargs.get('add_from') or False
self.executable = True
if self.is_count:
columns = SQLEntity('COUNT(*)')
else:
columns = EntityContainer(separator=',')
for column in self.table._columns.values() if not args else args:
columns += column.sql_entities['selection']
self.selected.append(hash(column))
tables = EntityContainer(separator=',')
tables += self.table._sql_entity
if self.add_from:
tables += SQLEntity(self.add_from)
joins = EntityContainer()
for foreign in reversed(self.table._foreigns):
if hash(foreign['column']) in self.selected or self.is_count:
join = 'INNER' if foreign['column'].required else 'LEFT'
joins += SQLJoin(join, foreign['table']._sql_entity, foreign['left_on'], foreign['right_on'])
if not self.is_count:
for key, column in foreign['table']._columns.items():
columns += SQLColumn(
column.sql_column,
column.table._db_table,
'{0}_{1}'.format(foreign['column'].reference, column.sql_column)
)
self += SQLEntity('SELECT')
self += columns
self += SQLEntity('FROM')
self += tables
if len(joins) != 0:
self += joins
def execute(self, unique=False):
cursor = self.table._database.execute(self.sql)
if self.is_count:
return cursor.fetchone()[0]
if unique:
try:
return ResultContainer(self.table, cursor).result[0]
except IndexError:
return False
return ResultContainer(self.table, cursor).result
def count(self):
self.entities[1] = SQLEntity('COUNT(*)')
self.is_count = True
return self.execute()
| true | true |
f71f86944f4a3f67142dcc0a2330fcdd6e0e21be | 8,966 | py | Python | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 7 | 2019-12-21T00:14:14.000Z | 2021-03-11T14:51:37.000Z | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 29 | 2019-10-09T11:16:21.000Z | 2020-06-23T09:32:09.000Z | lib/kubernetes/client/models/v1_resource_attributes.py | splunkenizer/splunk_as_a_service_app | 97c4aaf927d2171bf131126cf9b70489ac75bc5a | [
"Apache-2.0"
] | 1 | 2021-05-07T10:13:31.000Z | 2021-05-07T10:13:31.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ResourceAttributes(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'group': 'str',
'name': 'str',
'namespace': 'str',
'resource': 'str',
'subresource': 'str',
'verb': 'str',
'version': 'str'
}
attribute_map = {
'group': 'group',
'name': 'name',
'namespace': 'namespace',
'resource': 'resource',
'subresource': 'subresource',
'verb': 'verb',
'version': 'version'
}
def __init__(self, group=None, name=None, namespace=None, resource=None, subresource=None, verb=None, version=None):
"""
V1ResourceAttributes - a model defined in Swagger
"""
self._group = None
self._name = None
self._namespace = None
self._resource = None
self._subresource = None
self._verb = None
self._version = None
self.discriminator = None
if group is not None:
self.group = group
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
if resource is not None:
self.resource = resource
if subresource is not None:
self.subresource = subresource
if verb is not None:
self.verb = verb
if version is not None:
self.version = version
@property
def group(self):
"""
Gets the group of this V1ResourceAttributes.
Group is the API Group of the Resource. \"*\" means all.
:return: The group of this V1ResourceAttributes.
:rtype: str
"""
return self._group
@group.setter
def group(self, group):
"""
Sets the group of this V1ResourceAttributes.
Group is the API Group of the Resource. \"*\" means all.
:param group: The group of this V1ResourceAttributes.
:type: str
"""
self._group = group
@property
def name(self):
"""
Gets the name of this V1ResourceAttributes.
Name is the name of the resource being requested for a \"get\" or deleted for a \"delete\". \"\" (empty) means all.
:return: The name of this V1ResourceAttributes.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1ResourceAttributes.
Name is the name of the resource being requested for a \"get\" or deleted for a \"delete\". \"\" (empty) means all.
:param name: The name of this V1ResourceAttributes.
:type: str
"""
self._name = name
@property
def namespace(self):
"""
Gets the namespace of this V1ResourceAttributes.
Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces \"\" (empty) is defaulted for LocalSubjectAccessReviews \"\" (empty) is empty for cluster-scoped resources \"\" (empty) means \"all\" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
:return: The namespace of this V1ResourceAttributes.
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""
Sets the namespace of this V1ResourceAttributes.
Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces \"\" (empty) is defaulted for LocalSubjectAccessReviews \"\" (empty) is empty for cluster-scoped resources \"\" (empty) means \"all\" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
:param namespace: The namespace of this V1ResourceAttributes.
:type: str
"""
self._namespace = namespace
@property
def resource(self):
"""
Gets the resource of this V1ResourceAttributes.
Resource is one of the existing resource types. \"*\" means all.
:return: The resource of this V1ResourceAttributes.
:rtype: str
"""
return self._resource
@resource.setter
def resource(self, resource):
"""
Sets the resource of this V1ResourceAttributes.
Resource is one of the existing resource types. \"*\" means all.
:param resource: The resource of this V1ResourceAttributes.
:type: str
"""
self._resource = resource
@property
def subresource(self):
"""
Gets the subresource of this V1ResourceAttributes.
Subresource is one of the existing resource types. \"\" means none.
:return: The subresource of this V1ResourceAttributes.
:rtype: str
"""
return self._subresource
@subresource.setter
def subresource(self, subresource):
"""
Sets the subresource of this V1ResourceAttributes.
Subresource is one of the existing resource types. \"\" means none.
:param subresource: The subresource of this V1ResourceAttributes.
:type: str
"""
self._subresource = subresource
@property
def verb(self):
"""
Gets the verb of this V1ResourceAttributes.
Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. \"*\" means all.
:return: The verb of this V1ResourceAttributes.
:rtype: str
"""
return self._verb
@verb.setter
def verb(self, verb):
"""
Sets the verb of this V1ResourceAttributes.
Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. \"*\" means all.
:param verb: The verb of this V1ResourceAttributes.
:type: str
"""
self._verb = verb
@property
def version(self):
"""
Gets the version of this V1ResourceAttributes.
Version is the API Version of the Resource. \"*\" means all.
:return: The version of this V1ResourceAttributes.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this V1ResourceAttributes.
Version is the API Version of the Resource. \"*\" means all.
:param version: The version of this V1ResourceAttributes.
:type: str
"""
self._version = version
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ResourceAttributes):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.39322 | 361 | 0.565135 |
from pprint import pformat
from six import iteritems
import re
class V1ResourceAttributes(object):
swagger_types = {
'group': 'str',
'name': 'str',
'namespace': 'str',
'resource': 'str',
'subresource': 'str',
'verb': 'str',
'version': 'str'
}
attribute_map = {
'group': 'group',
'name': 'name',
'namespace': 'namespace',
'resource': 'resource',
'subresource': 'subresource',
'verb': 'verb',
'version': 'version'
}
def __init__(self, group=None, name=None, namespace=None, resource=None, subresource=None, verb=None, version=None):
self._group = None
self._name = None
self._namespace = None
self._resource = None
self._subresource = None
self._verb = None
self._version = None
self.discriminator = None
if group is not None:
self.group = group
if name is not None:
self.name = name
if namespace is not None:
self.namespace = namespace
if resource is not None:
self.resource = resource
if subresource is not None:
self.subresource = subresource
if verb is not None:
self.verb = verb
if version is not None:
self.version = version
@property
def group(self):
return self._group
@group.setter
def group(self, group):
self._group = group
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@property
def namespace(self):
return self._namespace
@namespace.setter
def namespace(self, namespace):
self._namespace = namespace
@property
def resource(self):
return self._resource
@resource.setter
def resource(self, resource):
self._resource = resource
@property
def subresource(self):
return self._subresource
@subresource.setter
def subresource(self, subresource):
self._subresource = subresource
@property
def verb(self):
return self._verb
@verb.setter
def verb(self, verb):
self._verb = verb
@property
def version(self):
return self._version
@version.setter
def version(self, version):
self._version = version
def to_dict(self):
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1ResourceAttributes):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f71f86e48de8074e6e823ee832ad036d915afdae | 16,478 | py | Python | figures/kCSD_properties/targeted_basis.py | rdarie/kCSD-python | 5b9e1b1dce2ff95c0d981c2c4015b7a75199de9a | [
"BSD-3-Clause"
] | 11 | 2017-11-06T21:24:18.000Z | 2022-02-07T21:17:13.000Z | figures/kCSD_properties/targeted_basis.py | aeladly91/kCSD-python | 4dd0015e9c5598e7eceeeb25668e696e495b2026 | [
"BSD-3-Clause"
] | 105 | 2017-12-13T12:49:54.000Z | 2022-03-19T12:25:51.000Z | figures/kCSD_properties/targeted_basis.py | aeladly91/kCSD-python | 4dd0015e9c5598e7eceeeb25668e696e495b2026 | [
"BSD-3-Clause"
] | 27 | 2017-06-08T07:32:32.000Z | 2022-02-07T21:17:15.000Z | """
@author: mkowalska
"""
import os
from os.path import expanduser
import numpy as np
import matplotlib.pyplot as plt
import datetime
import time
from kcsd import ValidateKCSD, ValidateKCSD1D, SpectralStructure, KCSD1D
__abs_file__ = os.path.abspath(__file__)
home = expanduser('~')
DAY = datetime.datetime.now()
DAY = DAY.strftime('%Y%m%d')
TIMESTR = time.strftime("%H%M%S")
SAVE_PATH = home + "/kCSD_results/" + DAY + '/' + TIMESTR
def makemydir(directory):
"""
Creates a new folder if it doesn't exist
Parameters
----------
directory: string
directory
Returns
-------
None
"""
try:
os.makedirs(directory)
except OSError:
pass
os.chdir(directory)
def save_source_code(save_path, timestr):
"""
Saves the source code.
Parameters
----------
save_path: string
directory
timestr: float
Returns
-------
None
"""
with open(save_path + '/source_code_' + str(timestr), 'w') as sf:
sf.write(open(__file__).read())
def csd_profile(x, seed):
'''Function used for adding multiple 1D gaussians.
Parameters
----------
x: numpy array
x coordinates of true source profile.
seed: list [r, mu]
Returns
-------
gauss: numpy array
Gaussian profile for given R and M.
'''
r = seed[0]
mu = seed[1]
STDDEV = r/3.0
gauss = (np.exp(-((x - mu)**2)/(2 * STDDEV**2)) /
(np.sqrt(2 * np.pi) * STDDEV)**1)
gauss /= np.max(gauss)
return gauss
def targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None):
'''
Function investigating kCSD analysis for targeted bases.
Parameters
----------
val: object of the class ValidateKCSD.
csd_at: numpy array
Coordinates of ground truth data.
true_csd: numpy array
Values of ground truth data (true_csd).
ele_pos: numpy array
Locations of electrodes.
pots: numpy array
Potentials measured (calculated) on electrodes.
n_src: int
Number of basis sources.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
true_csd_xlims: list
Boundaries for ground truth space.
ele_lims: list
Boundaries for electrodes placement.
title: string
Name of the figure that is to be saved
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
Returns
-------
obj: object of the class KCSD1D
k: object of the class ValidateKCSD1D
'''
k = ValidateKCSD1D(1, n_src_init=n_src, R_init=0.23,
ele_lims=ele_lims, est_xres=0.01,
true_csd_xlims=true_csd_xlims, sigma=sigma, h=h,
src_type='gauss')
obj, est_csd = k.do_kcsd(pots, ele_pos, method=method, Rs=Rs,
lambdas=lambdas)
test_csd = csd_profile(obj.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj.lambd, obj.R,
rms)
fig = k.make_plot(csd_at, true_csd, obj, est_csd, ele_pos, pots, titl)
save_as = (SAVE_PATH)
fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
plt.close()
return obj, k
def simulate_data(csd_profile, true_csd_xlims, R, MU, total_ele, ele_lims,
h=0.25, sigma=0.3, csd_res=100, noise=0):
'''
Generates groundtruth profiles and interpolates potentials.
Parameters
----------
csd_profile: function
Function to produce csd profile.
true_csd_xlims: list
Boundaries for ground truth space.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
total_ele: int
Number of electrodes.
ele_lims: list
Boundaries for electrodes placement.
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
noise: float
Determines the level of noise in the data.
Default: 0.
Returns
-------
csd_at: numpy array
Coordinates of ground truth data.
true_csd: numpy array
Values of ground truth data (true_csd).
ele_pos: numpy array
Locations of electrodes.
pots: numpy array
Potentials measured (calculated) on electrodes.
val: object of the class ValidateKCSD
'''
val = ValidateKCSD(1)
csd_at = np.linspace(true_csd_xlims[0], true_csd_xlims[1], csd_res)
true_csd = csd_profile(csd_at, [R, MU])
ele_pos = val.generate_electrodes(total_ele=total_ele, ele_lims=ele_lims)
pots = val.calculate_potential(true_csd, csd_at, ele_pos, h, sigma)
if noise is not None:
pots = val.add_noise(pots, 10, level=noise)
return csd_at, true_csd, ele_pos, pots, val
def structure_investigation(csd_profile, true_csd_xlims, n_src, R, MU,
total_ele, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None, noise=0):
'''
.
Parameters
----------
csd_profile: function
Function to produce csd profile.
true_csd_xlims: list
Boundaries for ground truth space.
n_src: int
Number of basis sources.
R: float
Thickness of the groundtruth source.
MU: float
x coordinate of maximum ampliude of groundtruth source.
total_ele: int
Number of electrodes.
ele_lims: list
Boundaries for electrodes placement.
title: string
Name of the figure that is to be saved
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
csd_res: int
Resolution of ground truth.
Default: 100.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
noise: float
Determines the level of noise in the data.
Default: 0.
Returns
-------
obj: object of the class KCSD1D
'''
val = ValidateKCSD(1)
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
true_csd_xlims, R, MU,
total_ele, ele_lims,
h=h, sigma=sigma,
noise=noise)
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25,
sigma=0.3, csd_res=100, method=method, Rs=Rs,
lambdas=lambdas)
return obj
def plot_eigenvalues(eigenvalues, save_path, title):
'''
Creates plot of eigenvalues of kernel matrix (k_pot).
Parameters
----------
eigenvalues: numpy array
Eigenvalues of k_pot matrix.
save_path: string
Directory.
title: string
Title of the plot.
Returns
-------
None
'''
fig = plt.figure()
plt.plot(eigenvalues, '--', marker='.')
plt.title('Eigenvalue decomposition of kernel matrix. ele_lims=basis_lims')
plt.xlabel('Number of components')
plt.ylabel('Eigenvalues')
plt.show()
save_as = (save_path + '/eigenvalues_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def plot_eigenvectors(eigenvectors, save_path, title):
"""
Creates plot of eigenvectors of kernel matrix (k_pot).
Parameters
----------
eigenvectors: numpy array
Eigenvectors of k_pot matrix.
save_path: string
Directory.
title: string
Title of the plot.
Returns
-------
None
"""
fig = plt.figure(figsize=(15, 15))
plt.suptitle('Eigenvalue decomposition of kernel matrix for different '
'number of basis sources')
for i in range(eigenvectors.shape[1]):
plt.subplot(int(eigenvectors.shape[1]/2) + 1, 2, i + 1)
plt.plot(eigenvectors[:, i].T, '--', marker='.')
plt.ylabel('Eigenvectors')
plt.title(r'$v_' + str(i + 1) + '$')
plt.xlabel('Number of components')
plt.tight_layout()
plt.show()
save_as = (save_path + '/eigenvectors_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def modified_bases(val, pots, ele_pos, n_src, title=None, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1, R=0.2, MU=0.25,
method='cross-validation', Rs=None, lambdas=None):
'''
Parameters
----------
val: object of the class ValidateKCSD1D
pots: numpy array
Potentials measured (calculated) on electrodes.
ele_pos: numpy array
Locations of electrodes.
n_src: int
Number of basis sources.
title: string
Title of the plot.
h: float
Thickness of analyzed cylindrical slice.
Default: 0.25.
sigma: float
Space conductance of the medium.
Default: 0.3.
gdx: float
Space increments in the estimation space.
Default: 0.035.
ext_x: float
Length of space extension: xmin-ext_x ... xmax+ext_x.
Default: 0.
xmin: float
Boundaries for CSD estimation space.
xmax: float
boundaries for CSD estimation space.
R: float
Thickness of the groundtruth source.
Default: 0.2.
MU: float
Central position of Gaussian source
Default: 0.25.
method: string
Determines the method of regularization.
Default: cross-validation.
Rs: numpy 1D array
Basis source parameter for crossvalidation.
Default: None.
lambdas: numpy 1D array
Regularization parameter for crossvalidation.
Default: None.
Returns
-------
obj_m: object of the class KCSD1D
'''
pots = pots.reshape((len(ele_pos), 1))
obj_m = KCSD1D(ele_pos, pots, src_type='gauss', sigma=sigma, h=h, gdx=gdx,
n_src_init=n_src, ext_x=ext_x, xmin=xmin, xmax=xmax)
if method == 'cross-validation':
obj_m.cross_validate(Rs=Rs, lambdas=lambdas)
elif method == 'L-curve':
obj_m.L_curve(Rs=Rs, lambdas=lambdas)
est_csd = obj_m.values('CSD')
test_csd = csd_profile(obj_m.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
# titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj_m.lambd,
# obj_m.R, rms)
# fig = k.make_plot(csd_at, true_csd, obj_m, est_csd, ele_pos, pots, titl)
# save_as = (SAVE_PATH)
# fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
# plt.close()
# ss = SpectralStructure(obj_m)
# eigenvectors, eigenvalues = ss.evd()
return obj_m
def plot_k_interp_cross_v(k_icross, eigenvectors, save_path, title):
"""
Creates plot of product of cross kernel vectors and eigenvectors for
different number of basis sources
Parameters
----------
k_icross: numpy array
List of cross kernel matrixes for different number of basis sources.
eigenvectors: numpy array
Eigenvectors of k_pot matrix.
save_path: string
Directory.
title: string
Name of the figure that is to be saved.
Returns
-------
None
"""
fig = plt.figure(figsize=(15, 15))
for i in range(eigenvectors.shape[0]):
plt.subplot(int(k_icross.shape[1]/2) + 1, 2, i + 1)
plt.plot(np.dot(k_icross, eigenvectors[:, i]), '--',
marker='.')
plt.title(r'$\tilde{K}*v_' + str(i + 1) + '$')
# plt.ylabel('Product K~V')
plt.xlabel('Number of estimation points')
fig.tight_layout()
plt.show()
save_path = save_path + '/cross_kernel'
makemydir(save_path)
save_as = (save_path + '/cross_kernel_eigenvector_product' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
if __name__ == '__main__':
makemydir(SAVE_PATH)
save_source_code(SAVE_PATH, time.strftime("%Y%m%d-%H%M%S"))
CSD_SEED = 15
N_SRC = 64
ELE_LIMS = [0, 1.] # range of electrodes space
TRUE_CSD_XLIMS = [0., 1.]
TOTAL_ELE = 12
noise = 0
method = 'cross-validation'
Rs = None
lambdas = None
# A
R = 0.2
MU = 0.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'A_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
# A.2
title = 'A_basis_lims_0_0_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
# A.2.b
title = 'A_basis_lims_0_0_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
# B
TRUE_CSD_XLIMS = [0., 1.5]
R = 0.2
MU = 1.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'B_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
# B.2
title = 'B_basis_lims_1_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
# B.2.b
title = 'B_basis_lims_1_1_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
# B.3
title = 'B_basis_lims_0_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
| 31.749518 | 82 | 0.585933 | import os
from os.path import expanduser
import numpy as np
import matplotlib.pyplot as plt
import datetime
import time
from kcsd import ValidateKCSD, ValidateKCSD1D, SpectralStructure, KCSD1D
__abs_file__ = os.path.abspath(__file__)
home = expanduser('~')
DAY = datetime.datetime.now()
DAY = DAY.strftime('%Y%m%d')
TIMESTR = time.strftime("%H%M%S")
SAVE_PATH = home + "/kCSD_results/" + DAY + '/' + TIMESTR
def makemydir(directory):
try:
os.makedirs(directory)
except OSError:
pass
os.chdir(directory)
def save_source_code(save_path, timestr):
with open(save_path + '/source_code_' + str(timestr), 'w') as sf:
sf.write(open(__file__).read())
def csd_profile(x, seed):
r = seed[0]
mu = seed[1]
STDDEV = r/3.0
gauss = (np.exp(-((x - mu)**2)/(2 * STDDEV**2)) /
(np.sqrt(2 * np.pi) * STDDEV)**1)
gauss /= np.max(gauss)
return gauss
def targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None):
k = ValidateKCSD1D(1, n_src_init=n_src, R_init=0.23,
ele_lims=ele_lims, est_xres=0.01,
true_csd_xlims=true_csd_xlims, sigma=sigma, h=h,
src_type='gauss')
obj, est_csd = k.do_kcsd(pots, ele_pos, method=method, Rs=Rs,
lambdas=lambdas)
test_csd = csd_profile(obj.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
titl = "Lambda: %0.2E; R: %0.2f; RMS_Error: %0.2E;" % (obj.lambd, obj.R,
rms)
fig = k.make_plot(csd_at, true_csd, obj, est_csd, ele_pos, pots, titl)
save_as = (SAVE_PATH)
fig.savefig(os.path.join(SAVE_PATH, save_as + '/' + title + '.png'))
plt.close()
return obj, k
def simulate_data(csd_profile, true_csd_xlims, R, MU, total_ele, ele_lims,
h=0.25, sigma=0.3, csd_res=100, noise=0):
val = ValidateKCSD(1)
csd_at = np.linspace(true_csd_xlims[0], true_csd_xlims[1], csd_res)
true_csd = csd_profile(csd_at, [R, MU])
ele_pos = val.generate_electrodes(total_ele=total_ele, ele_lims=ele_lims)
pots = val.calculate_potential(true_csd, csd_at, ele_pos, h, sigma)
if noise is not None:
pots = val.add_noise(pots, 10, level=noise)
return csd_at, true_csd, ele_pos, pots, val
def structure_investigation(csd_profile, true_csd_xlims, n_src, R, MU,
total_ele, ele_lims, title, h=0.25, sigma=0.3,
csd_res=100, method='cross-validation', Rs=None,
lambdas=None, noise=0):
val = ValidateKCSD(1)
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
true_csd_xlims, R, MU,
total_ele, ele_lims,
h=h, sigma=sigma,
noise=noise)
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, n_src, R, MU,
true_csd_xlims, ele_lims, title, h=0.25,
sigma=0.3, csd_res=100, method=method, Rs=Rs,
lambdas=lambdas)
return obj
def plot_eigenvalues(eigenvalues, save_path, title):
fig = plt.figure()
plt.plot(eigenvalues, '--', marker='.')
plt.title('Eigenvalue decomposition of kernel matrix. ele_lims=basis_lims')
plt.xlabel('Number of components')
plt.ylabel('Eigenvalues')
plt.show()
save_as = (save_path + '/eigenvalues_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def plot_eigenvectors(eigenvectors, save_path, title):
fig = plt.figure(figsize=(15, 15))
plt.suptitle('Eigenvalue decomposition of kernel matrix for different '
'number of basis sources')
for i in range(eigenvectors.shape[1]):
plt.subplot(int(eigenvectors.shape[1]/2) + 1, 2, i + 1)
plt.plot(eigenvectors[:, i].T, '--', marker='.')
plt.ylabel('Eigenvectors')
plt.title(r'$v_' + str(i + 1) + '$')
plt.xlabel('Number of components')
plt.tight_layout()
plt.show()
save_as = (save_path + '/eigenvectors_for_' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
def modified_bases(val, pots, ele_pos, n_src, title=None, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1, R=0.2, MU=0.25,
method='cross-validation', Rs=None, lambdas=None):
pots = pots.reshape((len(ele_pos), 1))
obj_m = KCSD1D(ele_pos, pots, src_type='gauss', sigma=sigma, h=h, gdx=gdx,
n_src_init=n_src, ext_x=ext_x, xmin=xmin, xmax=xmax)
if method == 'cross-validation':
obj_m.cross_validate(Rs=Rs, lambdas=lambdas)
elif method == 'L-curve':
obj_m.L_curve(Rs=Rs, lambdas=lambdas)
est_csd = obj_m.values('CSD')
test_csd = csd_profile(obj_m.estm_x, [R, MU])
rms = val.calculate_rms(test_csd, est_csd)
return obj_m
def plot_k_interp_cross_v(k_icross, eigenvectors, save_path, title):
fig = plt.figure(figsize=(15, 15))
for i in range(eigenvectors.shape[0]):
plt.subplot(int(k_icross.shape[1]/2) + 1, 2, i + 1)
plt.plot(np.dot(k_icross, eigenvectors[:, i]), '--',
marker='.')
plt.title(r'$\tilde{K}*v_' + str(i + 1) + '$')
plt.xlabel('Number of estimation points')
fig.tight_layout()
plt.show()
save_path = save_path + '/cross_kernel'
makemydir(save_path)
save_as = (save_path + '/cross_kernel_eigenvector_product' + title)
fig.savefig(os.path.join(save_path, save_as+'.png'))
plt.close()
if __name__ == '__main__':
makemydir(SAVE_PATH)
save_source_code(SAVE_PATH, time.strftime("%Y%m%d-%H%M%S"))
CSD_SEED = 15
N_SRC = 64
ELE_LIMS = [0, 1.]
TRUE_CSD_XLIMS = [0., 1.]
TOTAL_ELE = 12
noise = 0
method = 'cross-validation'
Rs = None
lambdas = None
R = 0.2
MU = 0.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'A_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
title = 'A_basis_lims_0_0_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'A_basis_lims_0_0_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=0.5, method=method, Rs=Rs,
lambdas=lambdas)
TRUE_CSD_XLIMS = [0., 1.5]
R = 0.2
MU = 1.25
csd_at, true_csd, ele_pos, pots, val = simulate_data(csd_profile,
TRUE_CSD_XLIMS, R, MU,
TOTAL_ELE, ELE_LIMS,
noise=noise)
title = 'B_basis_lims_0_1'
obj, k = targeted_basis(val, csd_at, true_csd, ele_pos, pots, N_SRC, R, MU,
TRUE_CSD_XLIMS, ELE_LIMS, title, method=method, Rs=Rs,
lambdas=lambdas)
ss = SpectralStructure(obj)
eigenvectors, eigenvalues = ss.evd()
plot_eigenvalues(eigenvalues, SAVE_PATH, title)
plot_eigenvectors(eigenvectors, SAVE_PATH, title)
plot_k_interp_cross_v(obj.k_interp_cross, eigenvectors, SAVE_PATH, title)
title = 'B_basis_lims_1_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'B_basis_lims_1_1_5_less_sources'
modified_bases(val, pots, ele_pos, N_SRC/2, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=1, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
title = 'B_basis_lims_0_1_5'
modified_bases(val, pots, ele_pos, N_SRC, title, h=0.25, sigma=0.3,
gdx=0.01, ext_x=0, xmin=0, xmax=1.5, method=method, Rs=Rs,
lambdas=lambdas)
| true | true |
f71f873815e728bc7fb92f7c3c25537c688114fb | 58,117 | py | Python | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 75 | 2021-02-25T20:04:53.000Z | 2022-03-12T12:12:58.000Z | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 1 | 2021-08-08T13:12:27.000Z | 2021-08-08T13:12:27.000Z | src/train_eval.py | chanyh0/PyTorch-StudioGAN | 5a912affc1ec975d97a33a12d1c96d05d4b883f0 | [
"MIT"
] | 7 | 2021-03-02T18:47:45.000Z | 2022-01-26T13:49:25.000Z | # PyTorch StudioGAN: https://github.com/POSTECH-CVLab/PyTorch-StudioGAN
# The MIT License (MIT)
# See license file or visit https://github.com/POSTECH-CVLab/PyTorch-StudioGAN for details
# train_eval.py
import numpy as np
import sys
import glob
from scipy import ndimage
from os.path import join
from PIL import Image
from tqdm import tqdm
from datetime import datetime
from metrics.IS import calculate_incep_score
from metrics.FID import calculate_fid_score
from metrics.F_beta import calculate_f_beta_score
from metrics.Accuracy import calculate_accuracy
from utils.ada import augment
from utils.biggan_utils import interp
from utils.sample import sample_latents, sample_1hot, make_mask, target_class_sampler
from utils.misc import *
from utils.losses import calc_derv4gp, calc_derv4dra, calc_derv, latent_optimise
from utils.losses import Conditional_Contrastive_loss, Proxy_NCA_loss, NT_Xent_loss
from utils.diff_aug import DiffAugment
from utils.cr_diff_aug import CR_DiffAug
import torch
import torch.nn as nn
from torch.nn import DataParallel
import torch.nn.functional as F
import torchvision
from torchvision import transforms
SAVE_FORMAT = 'step={step:0>3}-Inception_mean={Inception_mean:<.4}-Inception_std={Inception_std:<.4}-FID={FID:<.5}.pth'
LOG_FORMAT = (
"Step: {step:>7} "
"Progress: {progress:<.1%} "
"Elapsed: {elapsed} "
"temperature: {temperature:<.6} "
"ada_p: {ada_p:<.6} "
"Discriminator_loss: {dis_loss:<.6} "
"Generator_loss: {gen_loss:<.6} "
)
def set_temperature(conditional_strategy, tempering_type, start_temperature, end_temperature, step_count, tempering_step, total_step):
if conditional_strategy == 'ContraGAN':
if tempering_type == 'continuous':
t = start_temperature + step_count*(end_temperature - start_temperature)/total_step
elif tempering_type == 'discrete':
tempering_interval = total_step//(tempering_step + 1)
t = start_temperature + \
(step_count//tempering_interval)*(end_temperature-start_temperature)/tempering_step
else:
t = start_temperature
else:
t = 'no'
return t
class Train_Eval(object):
def __init__(self, run_name, best_step, dataset_name, eval_type, logger, writer, n_gpus, gen_model, dis_model, inception_model,
Gen_copy, Gen_ema, train_dataset, eval_dataset, train_dataloader, eval_dataloader, freeze_layers, conditional_strategy,
pos_collected_numerator, z_dim, num_classes, hypersphere_dim, d_spectral_norm, g_spectral_norm, G_optimizer, D_optimizer,
batch_size, g_steps_per_iter, d_steps_per_iter, accumulation_steps, total_step, G_loss, D_loss, contrastive_lambda, margin,
tempering_type, tempering_step, start_temperature, end_temperature, weight_clipping_for_dis, weight_clipping_bound,
gradient_penalty_for_dis, gradient_penalty_lambda, deep_regret_analysis_for_dis, regret_penalty_lambda, cr, cr_lambda, bcr,
real_lambda, fake_lambda, zcr, gen_lambda, dis_lambda, sigma_noise, diff_aug, ada, prev_ada_p, ada_target, ada_length, prior,
truncated_factor, ema, latent_op, latent_op_rate, latent_op_step, latent_op_step4eval, latent_op_alpha, latent_op_beta,
latent_norm_reg_weight, default_device, print_every, save_every, checkpoint_dir, evaluate, mu, sigma, best_fid,
best_fid_checkpoint_path, mixed_precision, train_config, model_config, gamma, steps):
self.run_name = run_name
self.best_step = best_step
self.dataset_name = dataset_name
self.eval_type = eval_type
self.logger = logger
self.writer = writer
self.n_gpus = n_gpus
self.gen_model = gen_model
self.dis_model = dis_model
self.inception_model = inception_model
self.Gen_copy = Gen_copy
self.Gen_ema = Gen_ema
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.train_dataloader = train_dataloader
self.eval_dataloader = eval_dataloader
self.freeze_layers = freeze_layers
self.conditional_strategy = conditional_strategy
self.pos_collected_numerator = pos_collected_numerator
self.z_dim = z_dim
self.num_classes = num_classes
self.hypersphere_dim = hypersphere_dim
self.d_spectral_norm = d_spectral_norm
self.g_spectral_norm = g_spectral_norm
self.G_optimizer = G_optimizer
self.D_optimizer = D_optimizer
self.batch_size = batch_size
self.g_steps_per_iter = g_steps_per_iter
self.d_steps_per_iter = d_steps_per_iter
self.accumulation_steps = accumulation_steps
self.total_step = total_step
self.G_loss = G_loss
self.D_loss = D_loss
self.contrastive_lambda = contrastive_lambda
self.margin = margin
self.tempering_type = tempering_type
self.tempering_step = tempering_step
self.start_temperature = start_temperature
self.end_temperature = end_temperature
self.weight_clipping_for_dis = weight_clipping_for_dis
self.weight_clipping_bound = weight_clipping_bound
self.gradient_penalty_for_dis = gradient_penalty_for_dis
self.gradient_penalty_lambda = gradient_penalty_lambda
self.deep_regret_analysis_for_dis = deep_regret_analysis_for_dis
self.regret_penalty_lambda = regret_penalty_lambda
self.cr = cr
self.cr_lambda = cr_lambda
self.bcr = bcr
self.real_lambda = real_lambda
self.fake_lambda = fake_lambda
self.zcr = zcr
self.gen_lambda = gen_lambda
self.dis_lambda = dis_lambda
self.sigma_noise = sigma_noise
self.diff_aug = diff_aug
self.ada = ada
self.prev_ada_p = prev_ada_p
self.ada_target = ada_target
self.ada_length = ada_length
self.prior = prior
self.truncated_factor = truncated_factor
self.ema = ema
self.latent_op = latent_op
self.latent_op_rate = latent_op_rate
self.latent_op_step = latent_op_step
self.latent_op_step4eval = latent_op_step4eval
self.latent_op_alpha = latent_op_alpha
self.latent_op_beta = latent_op_beta
self.latent_norm_reg_weight = latent_norm_reg_weight
self.default_device = default_device
self.print_every = print_every
self.save_every = save_every
self.checkpoint_dir = checkpoint_dir
self.evaluate = evaluate
self.mu = mu
self.sigma = sigma
self.best_fid = best_fid
self.best_fid_checkpoint_path = best_fid_checkpoint_path
self.mixed_precision = mixed_precision
self.train_config = train_config
self.model_config = model_config
self.start_time = datetime.now()
self.l2_loss = torch.nn.MSELoss()
self.ce_loss = torch.nn.CrossEntropyLoss()
self.policy = "color,translation,cutout"
self.steps = steps
self.gamma = gamma
sampler = define_sampler(self.dataset_name, self.conditional_strategy)
check_flag_1(self.tempering_type, self.pos_collected_numerator, self.conditional_strategy, self.diff_aug, self.ada,
self.mixed_precision, self.gradient_penalty_for_dis, self.deep_regret_analysis_for_dis, self.cr, self.bcr, self.zcr)
if self.conditional_strategy == 'ContraGAN':
self.contrastive_criterion = Conditional_Contrastive_loss(self.default_device, self.batch_size, self.pos_collected_numerator)
elif self.conditional_strategy == 'Proxy_NCA_GAN':
if isinstance(self.dis_model, DataParallel):
self.embedding_layer = self.dis_model.module.embedding
else:
self.embedding_layer = self.dis_model.embedding
self.NCA_criterion = Proxy_NCA_loss(self.default_device, self.embedding_layer, self.num_classes, self.batch_size)
elif self.conditional_strategy == 'NT_Xent_GAN':
self.NT_Xent_criterion = NT_Xent_loss(self.default_device, self.batch_size)
else:
pass
if self.mixed_precision:
self.scaler = torch.cuda.amp.GradScaler()
if self.dataset_name in ["imagenet"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less_0.25"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name == "tiny_imagenet":
self.num_eval = {'train':50000, 'valid':10000}
elif self.dataset_name == "cifar10":
self.num_eval = {'train':50000, 'test':10000}
elif self.dataset_name == "cifar10_less":
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name in ["cifar100_less"]:
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name == "custom":
num_train_images = len(self.train_dataset.data)
num_eval_images = len(self.eval_dataset.data)
self.num_eval = {'train':num_train_images, 'valid':num_eval_images}
else:
raise NotImplementedError
################################################################################################################################
def train(self, current_step, total_step):
self.dis_model.train()
self.gen_model.train()
if self.Gen_copy is not None:
self.Gen_copy.train()
self.logger.info('Start training....')
step_count = current_step
train_iter = iter(self.train_dataloader)
if self.ada:
self.ada_augment = torch.tensor([0.0, 0.0], device = self.default_device)
if self.prev_ada_p is not None:
self.ada_aug_p = self.prev_ada_p
else:
self.ada_aug_p = 0.0
self.ada_aug_step = self.ada_target/self.ada_length
else:
self.ada_aug_p = 'No'
while step_count <= total_step:
# ================== TRAIN D ================== #
toggle_grad(self.dis_model, True, freeze_layers=self.freeze_layers)
toggle_grad(self.gen_model, False, freeze_layers=-1)
t = set_temperature(self.conditional_strategy, self.tempering_type, self.start_temperature, self.end_temperature, step_count, self.tempering_step, total_step)
for step_index in range(self.d_steps_per_iter):
self.D_optimizer.zero_grad()
for acml_index in range(self.accumulation_steps):
try:
real_images, real_labels = next(train_iter)
except StopIteration:
train_iter = iter(self.train_dataloader)
real_images, real_labels = next(train_iter)
real_images, real_labels = real_images.to(self.default_device), real_labels.to(self.default_device)
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.diff_aug:
real_images = DiffAugment(real_images, policy=self.policy)
if self.ada:
real_images, _ = augment(real_images, self.ada_aug_p)
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
fake_images = self.gen_model(zs, fake_labels)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_real, dis_out_real = self.dis_model(real_images, real_labels)
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real = self.dis_model(real_images, real_labels)
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
real_cls_mask = make_mask(real_labels, self.num_classes, self.default_device)
cls_proxies_real, cls_embed_real, dis_out_real = self.dis_model(real_images, real_labels)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_real_prefc = self.dis_model(real_images, real_labels, fc=False)
dis_out_fake_prefc = self.dis_model(fake_images, fake_labels, fc=False)
loss_real = lambda x: torch.mean(F.relu(1. - x))
loss_fake = lambda x: torch.mean(F.relu(1. + x))
dis_out_real_prefc_adv = PGD(dis_out_real_prefc, real_labels, loss_real, self.dis_model, steps=self.steps, gamma=self.gamma)
dis_out_fake_prefc_adv = PGD(dis_out_fake_prefc, fake_labels, loss_real, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = fake_images.detach()
dis_out_real_prefc = self.dis_model(real_images, real_labels, fc=False, only_fc=False)
dis_out_fake_prefc = self.dis_model(fake_images, fake_labels, fc=False, only_fc=False)
dis_out_real = self.dis_model(dis_out_real_prefc, real_labels, only_fc=True, fc=True)
dis_out_fake = self.dis_model(dis_out_fake_prefc, fake_labels, only_fc=True, fc=True)
dis_out_real_adv = self.dis_model(dis_out_real_prefc_adv, real_labels, only_fc=True)
dis_out_fake_adv = self.dis_model(dis_out_fake_prefc_adv, fake_labels, only_fc=True)
else:
raise NotImplementedError
#if self.conditional_strategy != 'ProjGAN_adv':
if self.conditional_strategy != 'ProjGAN_adv':
dis_acml_loss = self.D_loss(dis_out_real, dis_out_fake)
else:
dis_acml_loss = (self.D_loss(dis_out_real, dis_out_fake) + self.D_loss(dis_out_real_adv, dis_out_fake_adv)) / 2
if self.conditional_strategy == "ACGAN":
dis_acml_loss += (self.ce_loss(cls_out_real, real_labels) + self.ce_loss(cls_out_fake, fake_labels))
elif self.conditional_strategy == "NT_Xent_GAN":
real_images_aug = CR_DiffAug(real_images)
_, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_real, cls_embed_real_aug, t)
elif self.conditional_strategy == "Proxy_NCA_GAN":
dis_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_real, cls_proxies_real, real_labels)
elif self.conditional_strategy == "ContraGAN":
dis_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_real, cls_proxies_real,
real_cls_mask, real_labels, t, self.margin)
else:
pass
if self.cr:
real_images_aug = CR_DiffAug(real_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
_, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
else:
raise NotImplementedError
consistency_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
consistency_loss += cls_consistency_loss
dis_acml_loss += self.cr_lambda*consistency_loss
if self.bcr:
real_images_aug = CR_DiffAug(real_images)
fake_images_aug = CR_DiffAug(fake_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_out_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_out_fake, cls_out_fake_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_real_aug, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_proxies_fake_aug, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
else:
raise NotImplementedError
bcr_real_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
bcr_fake_loss = self.l2_loss(dis_out_fake, dis_out_fake_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
bcr_real_loss += cls_bcr_real_loss
bcr_fake_loss += cls_bcr_fake_loss
dis_acml_loss += self.real_lambda*bcr_real_loss + self.fake_lambda*bcr_fake_loss
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
if self.conditional_strategy == "ACGAN":
cls_out_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_out_fake, cls_out_fake_zaug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_fake_zaug, cls_embed_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_zaug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
else:
raise NotImplementedError
zcr_dis_loss = self.l2_loss(dis_out_fake, dis_out_fake_zaug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
zcr_dis_loss += cls_zcr_dis_loss
dis_acml_loss += self.dis_lambda*zcr_dis_loss
if self.gradient_penalty_for_dis:
dis_acml_loss += self.gradient_penalty_lambda*calc_derv4gp(self.dis_model, self.conditional_strategy, real_images,
fake_images, real_labels, self.default_device)
if self.deep_regret_analysis_for_dis:
dis_acml_loss += self.regret_penalty_lambda*calc_derv4dra(self.dis_model, self.conditional_strategy, real_images,
real_labels, self.default_device)
if self.ada:
ada_aug_data = torch.tensor((torch.sign(dis_out_real).sum().item(), dis_out_real.shape[0]), device = self.default_device)
self.ada_augment += ada_aug_data
if self.ada_augment[1] > (self.batch_size*4 - 1):
authen_out_signs, num_outputs = self.ada_augment.tolist()
r_t_stat = authen_out_signs/num_outputs
sign = 1 if r_t_stat > self.ada_target else -1
self.ada_aug_p += sign*self.ada_aug_step*num_outputs
self.ada_aug_p = min(1.0, max(0.0, self.ada_aug_p))
self.ada_augment.mul_(0.0)
dis_acml_loss = dis_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(dis_acml_loss).backward()
else:
dis_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.D_optimizer)
self.scaler.update()
else:
self.D_optimizer.step()
if self.weight_clipping_for_dis:
for p in self.dis_model.parameters():
p.data.clamp_(-self.weight_clipping_bound, self.weight_clipping_bound)
if step_count % self.print_every == 0 and step_count !=0 and self.logger:
if self.d_spectral_norm:
dis_sigmas = calculate_all_sn(self.dis_model)
self.writer.add_scalars('SN_of_dis', dis_sigmas, step_count)
# ================== TRAIN G ================== #
toggle_grad(self.dis_model, False, freeze_layers=-1)
toggle_grad(self.gen_model, True, freeze_layers=-1)
for step_index in range(self.g_steps_per_iter):
self.G_optimizer.zero_grad()
for acml_step in range(self.accumulation_steps):
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs, transport_cost = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha,
self.latent_op_beta, True, self.default_device)
if not self.conditional_strategy == 'ProjGAN_adv':
fake_images = self.gen_model(zs, fake_labels)
else:
gen_out_prefc, labels_prefc = self.gen_model(zs, fake_labels, only_l1=True)
loss_fake = lambda x: -torch.mean(x)
gen_out_adv = PGD_G(gen_out_prefc, labels_prefc, fake_labels, loss_fake, self.gen_model, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = self.gen_model(gen_out_prefc, labels_prefc, l1=False)
fake_images_adv = self.gen_model(gen_out_adv, labels_prefc, l1=False)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
fake_cls_mask = make_mask(fake_labels, self.num_classes, self.default_device)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_fake = self.dis_model(fake_images, fake_labels)
dis_out_adv = self.dis_model(fake_images_adv, fake_labels)
else:
raise NotImplementedError
gen_acml_loss = self.G_loss(dis_out_fake)
if self.latent_op:
gen_acml_loss += transport_cost*self.latent_norm_reg_weight
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
zcr_gen_loss = -1 * self.l2_loss(fake_images, fake_images_zaug)
gen_acml_loss += self.gen_lambda*zcr_gen_loss
if self.conditional_strategy == "ACGAN":
gen_acml_loss += self.ce_loss(cls_out_fake, fake_labels)
elif self.conditional_strategy == "ContraGAN":
gen_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_fake, cls_proxies_fake, fake_cls_mask, fake_labels, t, self.margin)
elif self.conditional_strategy == "Proxy_NCA_GAN":
gen_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_fake, cls_proxies_fake, fake_labels)
elif self.conditional_strategy == "NT_Xent_GAN":
fake_images_aug = CR_DiffAug(fake_images)
_, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
gen_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_fake, cls_embed_fake_aug, t)
elif self.conditional_strategy == 'ProjGAN_adv':
gen_acml_loss = (self.G_loss(dis_out_fake) + self.G_loss(dis_out_adv)) / 2
else:
pass
gen_acml_loss = gen_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(gen_acml_loss).backward()
else:
gen_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.G_optimizer)
self.scaler.update()
else:
self.G_optimizer.step()
# if ema is True: we update parameters of the Gen_copy in adaptive way.
if self.ema:
self.Gen_ema.update(step_count)
step_count += 1
if step_count % self.print_every == 0 and self.logger:
log_message = LOG_FORMAT.format(step=step_count,
progress=step_count/total_step,
elapsed=elapsed_time(self.start_time),
temperature=t,
ada_p=self.ada_aug_p,
dis_loss=dis_acml_loss.item(),
gen_loss=gen_acml_loss.item(),
)
self.logger.info(log_message)
if self.g_spectral_norm:
gen_sigmas = calculate_all_sn(self.gen_model)
self.writer.add_scalars('SN_of_gen', gen_sigmas, step_count)
self.writer.add_scalars('Losses', {'discriminator': dis_acml_loss.item(),
'generator': gen_acml_loss.item()}, step_count)
if self.ada:
self.writer.add_scalar('ada_p', self.ada_aug_p, step_count)
if step_count % self.save_every == 0 or step_count == total_step:
if self.evaluate:
is_best = self.evaluation(step_count, False, "N/A")
self.save(step_count, is_best)
else:
self.save(step_count, False)
return step_count-1
################################################################################################################################
################################################################################################################################
def save(self, step, is_best):
when = "best" if is_best is True else "current"
self.dis_model.eval()
self.gen_model.eval()
if self.Gen_copy is not None:
self.Gen_copy.eval()
if isinstance(self.gen_model, DataParallel):
gen = self.gen_model.module
dis = self.dis_model.module
if self.Gen_copy is not None:
gen_copy = self.Gen_copy.module
else:
gen, dis = self.gen_model, self.dis_model
if self.Gen_copy is not None:
gen_copy = self.Gen_copy
g_states = {'seed': self.train_config['seed'], 'run_name': self.run_name, 'step': step, 'best_step': self.best_step,
'state_dict': gen.state_dict(), 'optimizer': self.G_optimizer.state_dict(), 'ada_p': self.ada_aug_p}
d_states = {'seed': self.train_config['seed'], 'run_name': self.run_name, 'step': step, 'best_step': self.best_step,
'state_dict': dis.state_dict(), 'optimizer': self.D_optimizer.state_dict(), 'ada_p': self.ada_aug_p,
'best_fid': self.best_fid, 'best_fid_checkpoint_path': self.checkpoint_dir}
if len(glob.glob(join(self.checkpoint_dir,"model=G-{when}-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G-{when}-weights-step*.pth".format(when=when)))[0])
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=D-{when}-weights-step*.pth".format(when=when)))[0])
g_checkpoint_output_path = join(self.checkpoint_dir, "model=G-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
d_checkpoint_output_path = join(self.checkpoint_dir, "model=D-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
if when == "best":
if len(glob.glob(join(self.checkpoint_dir,"model=G-current-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G-current-weights-step*.pth".format(when=when)))[0])
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=D-current-weights-step*.pth".format(when=when)))[0])
g_checkpoint_output_path_ = join(self.checkpoint_dir, "model=G-current-weights-step={step}.pth".format(when=when, step=str(step)))
d_checkpoint_output_path_ = join(self.checkpoint_dir, "model=D-current-weights-step={step}.pth".format(when=when, step=str(step)))
torch.save(g_states, g_checkpoint_output_path_)
torch.save(d_states, d_checkpoint_output_path_)
torch.save(g_states, g_checkpoint_output_path)
torch.save(d_states, d_checkpoint_output_path)
if self.Gen_copy is not None:
g_ema_states = {'state_dict': gen_copy.state_dict()}
if len(glob.glob(join(self.checkpoint_dir, "model=G_ema-{when}-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir, "model=G_ema-{when}-weights-step*.pth".format(when=when)))[0])
g_ema_checkpoint_output_path = join(self.checkpoint_dir, "model=G_ema-{when}-weights-step={step}.pth".format(when=when, step=str(step)))
if when == "best":
if len(glob.glob(join(self.checkpoint_dir,"model=G_ema-current-weights-step*.pth".format(when=when)))) >= 1:
find_and_remove(glob.glob(join(self.checkpoint_dir,"model=G_ema-current-weights-step*.pth".format(when=when)))[0])
g_ema_checkpoint_output_path_ = join(self.checkpoint_dir, "model=G_ema-current-weights-step={step}.pth".format(when=when, step=str(step)))
torch.save(g_ema_states, g_ema_checkpoint_output_path_)
torch.save(g_ema_states, g_ema_checkpoint_output_path)
if self.logger:
self.logger.info("Saved model to {}".format(self.checkpoint_dir))
self.dis_model.train()
self.gen_model.train()
if self.Gen_copy is not None:
self.Gen_copy.train()
################################################################################################################################
################################################################################################################################
def evaluation(self, step, standing_statistics, standing_step):
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
self.logger.info("Start Evaluation ({step} Step): {run_name}".format(step=step, run_name=self.run_name))
is_best = False
num_split, num_run4PR, num_cluster4PR, beta4PR = 1, 10, 20, 8
self.dis_model.eval()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
fid_score, self.m1, self.s1 = calculate_fid_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step4eval, self.latent_op_alpha,
self.latent_op_beta, self.default_device, self.mu, self.sigma, self.run_name)
kl_score, kl_std = calculate_incep_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step4eval, self.latent_op_alpha,
self.latent_op_beta, num_split, self.default_device)
precision, recall, f_beta, f_beta_inv = calculate_f_beta_score(self.eval_dataloader, generator, self.dis_model, self.inception_model, self.num_eval[self.eval_type],
num_run4PR, num_cluster4PR, beta4PR, self.truncated_factor, self.prior, self.latent_op,
self.latent_op_step4eval, self.latent_op_alpha, self.latent_op_beta, self.default_device)
PR_Curve = plot_pr_curve(precision, recall, self.run_name, self.logger)
'''
if self.D_loss.__name__ != "loss_wgan_dis":
real_train_acc, fake_acc = calculate_accuracy(self.train_dataloader, generator, self.dis_model, self.D_loss, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step, self.latent_op_alpha,
self.latent_op_beta, self.default_device, cr=self.cr, eval_generated_sample=True)
if self.eval_type == 'train':
acc_dict = {'real_train': real_train_acc, 'fake': fake_acc}
else:
real_eval_acc = calculate_accuracy(self.eval_dataloader, generator, self.dis_model, self.D_loss, self.num_eval[self.eval_type],
self.truncated_factor, self.prior, self.latent_op, self.latent_op_step, self.latent_op_alpha,
self. latent_op_beta, self.default_device, cr=self.cr, eval_generated_sample=False)
acc_dict = {'real_train': real_train_acc, 'real_valid': real_eval_acc, 'fake': fake_acc}
self.writer.add_scalars('{}/Accuracy'.format(self.prune_round), acc_dict, step)
'''
if self.best_fid is None:
self.best_fid, self.best_step, is_best, f_beta_best, f_beta_inv_best = fid_score, step, True, f_beta, f_beta_inv
else:
if fid_score <= self.best_fid:
self.best_fid, self.best_step, is_best, f_beta_best, f_beta_inv_best = fid_score, step, True, f_beta, f_beta_inv
self.writer.add_scalars('FID score', {'using {type} moments'.format(type=self.eval_type):fid_score}, step)
self.writer.add_scalars('F_beta score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):f_beta}, step)
self.writer.add_scalars('F_beta_inv score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):f_beta_inv}, step)
self.writer.add_scalars('IS score', {'{num} generated images'.format(num=str(self.num_eval[self.eval_type])):kl_score}, step)
self.writer.add_figure('PR_Curve', PR_Curve, global_step=step)
self.logger.info('F_{beta} score (Step: {step}, Using {type} images): {F_beta}'.format(beta=beta4PR, step=step, type=self.eval_type, F_beta=f_beta))
self.logger.info('F_1/{beta} score (Step: {step}, Using {type} images): {F_beta_inv}'.format(beta=beta4PR, step=step, type=self.eval_type, F_beta_inv=f_beta_inv))
self.logger.info('FID score (Step: {step}, Using {type} moments): {FID}'.format(step=step, type=self.eval_type, FID=fid_score))
self.logger.info('Inception score (Step: {step}, {num} generated images): {IS}'.format(step=step, num=str(self.num_eval[self.eval_type]), IS=kl_score))
if self.train:
self.logger.info('Best FID score (Step: {step}, Using {type} moments): {FID}'.format(step=self.best_step, type=self.eval_type, FID=self.best_fid))
self.dis_model.train()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
return is_best
################################################################################################################################
################################################################################################################################
def save_images(self, is_generate, standing_statistics, standing_step, png=True, npz=True):
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
self.dis_model.eval()
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
if png:
save_images_png(self.run_name, self.eval_dataloader, self.num_eval[self.eval_type], self.num_classes, generator,
self.dis_model, is_generate, self.truncated_factor, self.prior, self.latent_op, self.latent_op_step,
self.latent_op_alpha, self.latent_op_beta, self.default_device)
if npz:
save_images_npz(self.run_name, self.eval_dataloader, self.num_eval[self.eval_type], self.num_classes, generator,
self.dis_model, is_generate, self.truncated_factor, self.prior, self.latent_op, self.latent_op_step,
self.latent_op_alpha, self.latent_op_beta, self.default_device)
################################################################################################################################
################################################################################################################################
def run_image_visualization(self, nrow, ncol, standing_statistics, standing_step):
self.logger.info('Start visualizing images....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
sampler = "default" if self.conditional_strategy == "no" else "class_order_some"
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device, sampler=sampler)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device, sampler=sampler)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
generated_images = generator(zs, fake_labels, evaluation=True)
plot_img_canvas((generated_images.detach().cpu()+1)/2, "./figures/{run_name}/generated_canvas.png".\
format(run_name=self.run_name), self.logger, ncol)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_linear_interpolation(self, nrow, ncol, fix_z, fix_y, standing_statistics, standing_step):
self.logger.info('Start linear interpolation analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
shared = generator.module.shared if isinstance(generator, DataParallel) else generator.shared
assert int(fix_z)*int(fix_y) != 1, "unable to switch fix_z and fix_y on together!"
if fix_z:
zs = torch.randn(nrow, 1, self.z_dim, device=self.default_device)
zs = zs.repeat(1, ncol, 1).view(-1, self.z_dim)
name = "fix_z"
else:
zs = interp(torch.randn(nrow, 1, self.z_dim, device=self.default_device),
torch.randn(nrow, 1, self.z_dim, device=self.default_device),
ncol - 2).view(-1, self.z_dim)
if fix_y:
ys = sample_1hot(nrow, self.num_classes, device=self.default_device)
ys = shared(ys).view(nrow, 1, -1)
ys = ys.repeat(1, ncol, 1).view(nrow * (ncol), -1)
name = "fix_y"
else:
ys = interp(shared(sample_1hot(nrow, self.num_classes)).view(nrow, 1, -1),
shared(sample_1hot(nrow, self.num_classes)).view(nrow, 1, -1),
ncol-2).view(nrow * (ncol), -1)
interpolated_images = generator(zs, None, shared_label=ys, evaluation=True)
plot_img_canvas((interpolated_images.detach().cpu()+1)/2, "./figures/{run_name}/Interpolated_images_{fix_flag}.png".\
format(run_name=self.run_name, fix_flag=name), self.logger, ncol)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_nearest_neighbor(self, nrow, ncol, standing_statistics, standing_step):
self.logger.info('Start nearest neighbor analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
resnet50_model = torch.hub.load('pytorch/vision:v0.6.0', 'resnet50', pretrained=True)
resnet50_conv = nn.Sequential(*list(resnet50_model.children())[:-1]).to(self.default_device)
if self.n_gpus > 1:
resnet50_conv = DataParallel(resnet50_conv, output_device=self.default_device)
resnet50_conv.eval()
for c in tqdm(range(self.num_classes)):
fake_images, fake_labels = generate_images_for_KNN(self.batch_size, c, generator, self.dis_model, self.truncated_factor, self.prior, self.latent_op,
self.latent_op_step, self.latent_op_alpha, self.latent_op_beta, self.default_device)
fake_image = torch.unsqueeze(fake_images[0], dim=0)
fake_anchor_embedding = torch.squeeze(resnet50_conv((fake_image+1)/2))
num_samples, target_sampler = target_class_sampler(self.train_dataset, c)
train_dataloader = torch.utils.data.DataLoader(self.train_dataset, batch_size=self.batch_size, shuffle=False, sampler=target_sampler,
num_workers=self.train_config['num_workers'], pin_memory=True)
train_iter = iter(train_dataloader)
for batch_idx in range(num_samples//self.batch_size):
real_images, real_labels = next(train_iter)
real_images = real_images.to(self.default_device)
real_embeddings = torch.squeeze(resnet50_conv((real_images+1)/2))
if batch_idx == 0:
distances = torch.square(real_embeddings - fake_anchor_embedding).mean(dim=1).detach().cpu().numpy()
holder = real_images.detach().cpu().numpy()
else:
distances = np.concatenate([distances, torch.square(real_embeddings - fake_anchor_embedding).mean(dim=1).detach().cpu().numpy()], axis=0)
holder = np.concatenate([holder, real_images.detach().cpu().numpy()], axis=0)
nearest_indices = (-distances).argsort()[-(ncol-1):][::-1]
if c % nrow == 0:
canvas = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
elif c % nrow == nrow-1:
row_images = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
canvas = np.concatenate((canvas, row_images), axis=0)
plot_img_canvas((torch.from_numpy(canvas)+1)/2, "./figures/{run_name}/Fake_anchor_{ncol}NN_{cls}.png".\
format(run_name=self.run_name,ncol=ncol, cls=c), self.logger, ncol)
else:
row_images = np.concatenate([fake_image.detach().cpu().numpy(), holder[nearest_indices]], axis=0)
canvas = np.concatenate((canvas, row_images), axis=0)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
################################################################################################################################
def run_frequency_analysis(self, num_images, standing_statistics, standing_step):
self.logger.info('Start linear interpolation analysis....')
with torch.no_grad() if self.latent_op is False else dummy_context_mgr() as mpc:
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=False)
train_iter = iter(self.train_dataloader)
num_batches = num_images//self.batch_size
for i in range(num_batches):
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha, self.latent_op_beta,
False, self.default_device)
real_images, real_labels = next(train_iter)
fake_images = generator(zs, fake_labels, evaluation=True).detach().cpu().numpy()
real_images = np.asarray((real_images + 1)*127.5, np.uint8)
fake_images = np.asarray((fake_images + 1)*127.5, np.uint8)
if i == 0:
real_array = real_images
fake_array = fake_images
else:
real_array = np.concatenate([real_array, real_images], axis = 0)
fake_array = np.concatenate([fake_array, fake_images], axis = 0)
N, C, H, W = np.shape(real_array)
real_r, real_g, real_b = real_array[:,0,:,:], real_array[:,1,:,:], real_array[:,2,:,:]
real_gray = 0.2989 * real_r + 0.5870 * real_g + 0.1140 * real_b
fake_r, fake_g, fake_b = fake_array[:,0,:,:], fake_array[:,1,:,:], fake_array[:,2,:,:]
fake_gray = 0.2989 * fake_r + 0.5870 * fake_g + 0.1140 * fake_b
for j in tqdm(range(N)):
real_gray_f = np.fft.fft2(real_gray[j] - ndimage.median_filter(real_gray[j], size= H//8))
fake_gray_f = np.fft.fft2(fake_gray[j] - ndimage.median_filter(fake_gray[j], size=H//8))
real_gray_f_shifted = np.fft.fftshift(real_gray_f)
fake_gray_f_shifted = np.fft.fftshift(fake_gray_f)
if j == 0:
real_gray_spectrum = 20*np.log(np.abs(real_gray_f_shifted))/N
fake_gray_spectrum = 20*np.log(np.abs(fake_gray_f_shifted))/N
else:
real_gray_spectrum += 20*np.log(np.abs(real_gray_f_shifted))/N
fake_gray_spectrum += 20*np.log(np.abs(fake_gray_f_shifted))/N
plot_spectrum_image(real_gray_spectrum, fake_gray_spectrum, self.run_name, self.logger)
generator = change_generator_mode(self.gen_model, self.Gen_copy, standing_statistics, standing_step, self.prior,
self.batch_size, self.z_dim, self.num_classes, self.default_device, training=True)
################################################################################################################################
def PGD(x, label, loss, model=None, steps=1, gamma=0.1, eps=(1/255), randinit=False, clip=False):
# Compute loss
x_adv = x.clone()
if randinit:
# adv noise (-eps, eps)
x_adv += (2.0 * torch.rand(x_adv.shape).cuda() - 1.0) * eps
x_adv = x_adv.cuda()
x = x.cuda()
for t in range(steps):
out = model(x_adv, label, only_fc=True)
loss_adv0 = -loss(out)
grad0 = torch.autograd.grad(loss_adv0, x_adv, only_inputs=True)[0]
x_adv.data.add_(gamma * torch.sign(grad0.data))
if clip:
linfball_proj(x, eps, x_adv, in_place=True)
return x_adv
def PGD_G(x, gen_labels, label, loss, gen_model, dis_model, steps=1, gamma=0.1, eps=(1/255), randinit=False, clip=False):
# Compute loss
x_adv = x.clone()
x_adv = x_adv.cuda()
x = x.cuda()
for t in range(steps):
out = gen_model(x_adv, gen_labels, l1=False)
out = dis_model(out, label)
loss_adv0 = -loss(out)
grad0 = torch.autograd.grad(loss_adv0, x_adv, only_inputs=True)[0]
x_adv.data.add_(gamma * torch.sign(grad0.data))
if clip:
linfball_proj(x, eps, x_adv, in_place=True)
return x_adv | 61.958422 | 176 | 0.573189 |
import numpy as np
import sys
import glob
from scipy import ndimage
from os.path import join
from PIL import Image
from tqdm import tqdm
from datetime import datetime
from metrics.IS import calculate_incep_score
from metrics.FID import calculate_fid_score
from metrics.F_beta import calculate_f_beta_score
from metrics.Accuracy import calculate_accuracy
from utils.ada import augment
from utils.biggan_utils import interp
from utils.sample import sample_latents, sample_1hot, make_mask, target_class_sampler
from utils.misc import *
from utils.losses import calc_derv4gp, calc_derv4dra, calc_derv, latent_optimise
from utils.losses import Conditional_Contrastive_loss, Proxy_NCA_loss, NT_Xent_loss
from utils.diff_aug import DiffAugment
from utils.cr_diff_aug import CR_DiffAug
import torch
import torch.nn as nn
from torch.nn import DataParallel
import torch.nn.functional as F
import torchvision
from torchvision import transforms
SAVE_FORMAT = 'step={step:0>3}-Inception_mean={Inception_mean:<.4}-Inception_std={Inception_std:<.4}-FID={FID:<.5}.pth'
LOG_FORMAT = (
"Step: {step:>7} "
"Progress: {progress:<.1%} "
"Elapsed: {elapsed} "
"temperature: {temperature:<.6} "
"ada_p: {ada_p:<.6} "
"Discriminator_loss: {dis_loss:<.6} "
"Generator_loss: {gen_loss:<.6} "
)
def set_temperature(conditional_strategy, tempering_type, start_temperature, end_temperature, step_count, tempering_step, total_step):
if conditional_strategy == 'ContraGAN':
if tempering_type == 'continuous':
t = start_temperature + step_count*(end_temperature - start_temperature)/total_step
elif tempering_type == 'discrete':
tempering_interval = total_step//(tempering_step + 1)
t = start_temperature + \
(step_count//tempering_interval)*(end_temperature-start_temperature)/tempering_step
else:
t = start_temperature
else:
t = 'no'
return t
class Train_Eval(object):
def __init__(self, run_name, best_step, dataset_name, eval_type, logger, writer, n_gpus, gen_model, dis_model, inception_model,
Gen_copy, Gen_ema, train_dataset, eval_dataset, train_dataloader, eval_dataloader, freeze_layers, conditional_strategy,
pos_collected_numerator, z_dim, num_classes, hypersphere_dim, d_spectral_norm, g_spectral_norm, G_optimizer, D_optimizer,
batch_size, g_steps_per_iter, d_steps_per_iter, accumulation_steps, total_step, G_loss, D_loss, contrastive_lambda, margin,
tempering_type, tempering_step, start_temperature, end_temperature, weight_clipping_for_dis, weight_clipping_bound,
gradient_penalty_for_dis, gradient_penalty_lambda, deep_regret_analysis_for_dis, regret_penalty_lambda, cr, cr_lambda, bcr,
real_lambda, fake_lambda, zcr, gen_lambda, dis_lambda, sigma_noise, diff_aug, ada, prev_ada_p, ada_target, ada_length, prior,
truncated_factor, ema, latent_op, latent_op_rate, latent_op_step, latent_op_step4eval, latent_op_alpha, latent_op_beta,
latent_norm_reg_weight, default_device, print_every, save_every, checkpoint_dir, evaluate, mu, sigma, best_fid,
best_fid_checkpoint_path, mixed_precision, train_config, model_config, gamma, steps):
self.run_name = run_name
self.best_step = best_step
self.dataset_name = dataset_name
self.eval_type = eval_type
self.logger = logger
self.writer = writer
self.n_gpus = n_gpus
self.gen_model = gen_model
self.dis_model = dis_model
self.inception_model = inception_model
self.Gen_copy = Gen_copy
self.Gen_ema = Gen_ema
self.train_dataset = train_dataset
self.eval_dataset = eval_dataset
self.train_dataloader = train_dataloader
self.eval_dataloader = eval_dataloader
self.freeze_layers = freeze_layers
self.conditional_strategy = conditional_strategy
self.pos_collected_numerator = pos_collected_numerator
self.z_dim = z_dim
self.num_classes = num_classes
self.hypersphere_dim = hypersphere_dim
self.d_spectral_norm = d_spectral_norm
self.g_spectral_norm = g_spectral_norm
self.G_optimizer = G_optimizer
self.D_optimizer = D_optimizer
self.batch_size = batch_size
self.g_steps_per_iter = g_steps_per_iter
self.d_steps_per_iter = d_steps_per_iter
self.accumulation_steps = accumulation_steps
self.total_step = total_step
self.G_loss = G_loss
self.D_loss = D_loss
self.contrastive_lambda = contrastive_lambda
self.margin = margin
self.tempering_type = tempering_type
self.tempering_step = tempering_step
self.start_temperature = start_temperature
self.end_temperature = end_temperature
self.weight_clipping_for_dis = weight_clipping_for_dis
self.weight_clipping_bound = weight_clipping_bound
self.gradient_penalty_for_dis = gradient_penalty_for_dis
self.gradient_penalty_lambda = gradient_penalty_lambda
self.deep_regret_analysis_for_dis = deep_regret_analysis_for_dis
self.regret_penalty_lambda = regret_penalty_lambda
self.cr = cr
self.cr_lambda = cr_lambda
self.bcr = bcr
self.real_lambda = real_lambda
self.fake_lambda = fake_lambda
self.zcr = zcr
self.gen_lambda = gen_lambda
self.dis_lambda = dis_lambda
self.sigma_noise = sigma_noise
self.diff_aug = diff_aug
self.ada = ada
self.prev_ada_p = prev_ada_p
self.ada_target = ada_target
self.ada_length = ada_length
self.prior = prior
self.truncated_factor = truncated_factor
self.ema = ema
self.latent_op = latent_op
self.latent_op_rate = latent_op_rate
self.latent_op_step = latent_op_step
self.latent_op_step4eval = latent_op_step4eval
self.latent_op_alpha = latent_op_alpha
self.latent_op_beta = latent_op_beta
self.latent_norm_reg_weight = latent_norm_reg_weight
self.default_device = default_device
self.print_every = print_every
self.save_every = save_every
self.checkpoint_dir = checkpoint_dir
self.evaluate = evaluate
self.mu = mu
self.sigma = sigma
self.best_fid = best_fid
self.best_fid_checkpoint_path = best_fid_checkpoint_path
self.mixed_precision = mixed_precision
self.train_config = train_config
self.model_config = model_config
self.start_time = datetime.now()
self.l2_loss = torch.nn.MSELoss()
self.ce_loss = torch.nn.CrossEntropyLoss()
self.policy = "color,translation,cutout"
self.steps = steps
self.gamma = gamma
sampler = define_sampler(self.dataset_name, self.conditional_strategy)
check_flag_1(self.tempering_type, self.pos_collected_numerator, self.conditional_strategy, self.diff_aug, self.ada,
self.mixed_precision, self.gradient_penalty_for_dis, self.deep_regret_analysis_for_dis, self.cr, self.bcr, self.zcr)
if self.conditional_strategy == 'ContraGAN':
self.contrastive_criterion = Conditional_Contrastive_loss(self.default_device, self.batch_size, self.pos_collected_numerator)
elif self.conditional_strategy == 'Proxy_NCA_GAN':
if isinstance(self.dis_model, DataParallel):
self.embedding_layer = self.dis_model.module.embedding
else:
self.embedding_layer = self.dis_model.embedding
self.NCA_criterion = Proxy_NCA_loss(self.default_device, self.embedding_layer, self.num_classes, self.batch_size)
elif self.conditional_strategy == 'NT_Xent_GAN':
self.NT_Xent_criterion = NT_Xent_loss(self.default_device, self.batch_size)
else:
pass
if self.mixed_precision:
self.scaler = torch.cuda.amp.GradScaler()
if self.dataset_name in ["imagenet"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less_0.25"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name in ["imagenet_less"]:
self.num_eval = {'train':50000, 'valid':50000}
elif self.dataset_name == "tiny_imagenet":
self.num_eval = {'train':50000, 'valid':10000}
elif self.dataset_name == "cifar10":
self.num_eval = {'train':50000, 'test':10000}
elif self.dataset_name == "cifar10_less":
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name in ["cifar100_less"]:
self.num_eval = {'train':len(self.train_dataset.data), 'valid':len(self.eval_dataset.data), 'test':len(self.eval_dataset.data)}
elif self.dataset_name == "custom":
num_train_images = len(self.train_dataset.data)
num_eval_images = len(self.eval_dataset.data)
self.num_eval = {'train':num_train_images, 'valid':num_eval_images}
else:
raise NotImplementedError
_aug = self.dis_model(real_images_aug, real_labels)
cls_consistency_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
else:
raise NotImplementedError
consistency_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
consistency_loss += cls_consistency_loss
dis_acml_loss += self.cr_lambda*consistency_loss
if self.bcr:
real_images_aug = CR_DiffAug(real_images)
fake_images_aug = CR_DiffAug(fake_images)
if self.conditional_strategy == "ACGAN":
cls_out_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_out_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_out_real, cls_out_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_out_fake, cls_out_fake_aug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_real_aug, cls_embed_real_aug, dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
cls_proxies_fake_aug, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
cls_bcr_real_loss = self.l2_loss(cls_embed_real, cls_embed_real_aug)
cls_bcr_fake_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_aug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_real_aug = self.dis_model(real_images_aug, real_labels)
dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
else:
raise NotImplementedError
bcr_real_loss = self.l2_loss(dis_out_real, dis_out_real_aug)
bcr_fake_loss = self.l2_loss(dis_out_fake, dis_out_fake_aug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
bcr_real_loss += cls_bcr_real_loss
bcr_fake_loss += cls_bcr_fake_loss
dis_acml_loss += self.real_lambda*bcr_real_loss + self.fake_lambda*bcr_fake_loss
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
if self.conditional_strategy == "ACGAN":
cls_out_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_out_fake, cls_out_fake_zaug)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
elif self.conditional_strategy in ["ContraGAN", "Proxy_NCA_GAN", "NT_Xent_GAN"]:
cls_proxies_fake_zaug, cls_embed_fake_zaug, dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
cls_zcr_dis_loss = self.l2_loss(cls_embed_fake, cls_embed_fake_zaug)
elif self.conditional_strategy == "ProjGAN_adv":
dis_out_fake_zaug = self.dis_model(fake_images_zaug, fake_labels)
else:
raise NotImplementedError
zcr_dis_loss = self.l2_loss(dis_out_fake, dis_out_fake_zaug)
if self.conditional_strategy in ["ACGAN", "NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
zcr_dis_loss += cls_zcr_dis_loss
dis_acml_loss += self.dis_lambda*zcr_dis_loss
if self.gradient_penalty_for_dis:
dis_acml_loss += self.gradient_penalty_lambda*calc_derv4gp(self.dis_model, self.conditional_strategy, real_images,
fake_images, real_labels, self.default_device)
if self.deep_regret_analysis_for_dis:
dis_acml_loss += self.regret_penalty_lambda*calc_derv4dra(self.dis_model, self.conditional_strategy, real_images,
real_labels, self.default_device)
if self.ada:
ada_aug_data = torch.tensor((torch.sign(dis_out_real).sum().item(), dis_out_real.shape[0]), device = self.default_device)
self.ada_augment += ada_aug_data
if self.ada_augment[1] > (self.batch_size*4 - 1):
authen_out_signs, num_outputs = self.ada_augment.tolist()
r_t_stat = authen_out_signs/num_outputs
sign = 1 if r_t_stat > self.ada_target else -1
self.ada_aug_p += sign*self.ada_aug_step*num_outputs
self.ada_aug_p = min(1.0, max(0.0, self.ada_aug_p))
self.ada_augment.mul_(0.0)
dis_acml_loss = dis_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(dis_acml_loss).backward()
else:
dis_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.D_optimizer)
self.scaler.update()
else:
self.D_optimizer.step()
if self.weight_clipping_for_dis:
for p in self.dis_model.parameters():
p.data.clamp_(-self.weight_clipping_bound, self.weight_clipping_bound)
if step_count % self.print_every == 0 and step_count !=0 and self.logger:
if self.d_spectral_norm:
dis_sigmas = calculate_all_sn(self.dis_model)
self.writer.add_scalars('SN_of_dis', dis_sigmas, step_count)
toggle_grad(self.dis_model, False, freeze_layers=-1)
toggle_grad(self.gen_model, True, freeze_layers=-1)
for step_index in range(self.g_steps_per_iter):
self.G_optimizer.zero_grad()
for acml_step in range(self.accumulation_steps):
with torch.cuda.amp.autocast() if self.mixed_precision else dummy_context_mgr() as mpc:
if self.zcr:
zs, fake_labels, zs_t = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
self.sigma_noise, self.default_device)
else:
zs, fake_labels = sample_latents(self.prior, self.batch_size, self.z_dim, 1, self.num_classes,
None, self.default_device)
if self.latent_op:
zs, transport_cost = latent_optimise(zs, fake_labels, self.gen_model, self.dis_model, self.conditional_strategy,
self.latent_op_step, self.latent_op_rate, self.latent_op_alpha,
self.latent_op_beta, True, self.default_device)
if not self.conditional_strategy == 'ProjGAN_adv':
fake_images = self.gen_model(zs, fake_labels)
else:
gen_out_prefc, labels_prefc = self.gen_model(zs, fake_labels, only_l1=True)
loss_fake = lambda x: -torch.mean(x)
gen_out_adv = PGD_G(gen_out_prefc, labels_prefc, fake_labels, loss_fake, self.gen_model, self.dis_model, steps=self.steps, gamma=self.gamma)
fake_images = self.gen_model(gen_out_prefc, labels_prefc, l1=False)
fake_images_adv = self.gen_model(gen_out_adv, labels_prefc, l1=False)
if self.diff_aug:
fake_images = DiffAugment(fake_images, policy=self.policy)
if self.ada:
fake_images, _ = augment(fake_images, self.ada_aug_p)
if self.conditional_strategy == "ACGAN":
cls_out_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == "ProjGAN" or self.conditional_strategy == "no":
dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy in ["NT_Xent_GAN", "Proxy_NCA_GAN", "ContraGAN"]:
fake_cls_mask = make_mask(fake_labels, self.num_classes, self.default_device)
cls_proxies_fake, cls_embed_fake, dis_out_fake = self.dis_model(fake_images, fake_labels)
elif self.conditional_strategy == 'ProjGAN_adv':
dis_out_fake = self.dis_model(fake_images, fake_labels)
dis_out_adv = self.dis_model(fake_images_adv, fake_labels)
else:
raise NotImplementedError
gen_acml_loss = self.G_loss(dis_out_fake)
if self.latent_op:
gen_acml_loss += transport_cost*self.latent_norm_reg_weight
if self.zcr:
fake_images_zaug = self.gen_model(zs_t, fake_labels)
zcr_gen_loss = -1 * self.l2_loss(fake_images, fake_images_zaug)
gen_acml_loss += self.gen_lambda*zcr_gen_loss
if self.conditional_strategy == "ACGAN":
gen_acml_loss += self.ce_loss(cls_out_fake, fake_labels)
elif self.conditional_strategy == "ContraGAN":
gen_acml_loss += self.contrastive_lambda*self.contrastive_criterion(cls_embed_fake, cls_proxies_fake, fake_cls_mask, fake_labels, t, self.margin)
elif self.conditional_strategy == "Proxy_NCA_GAN":
gen_acml_loss += self.contrastive_lambda*self.NCA_criterion(cls_embed_fake, cls_proxies_fake, fake_labels)
elif self.conditional_strategy == "NT_Xent_GAN":
fake_images_aug = CR_DiffAug(fake_images)
_, cls_embed_fake_aug, dis_out_fake_aug = self.dis_model(fake_images_aug, fake_labels)
gen_acml_loss += self.contrastive_lambda*self.NT_Xent_criterion(cls_embed_fake, cls_embed_fake_aug, t)
elif self.conditional_strategy == 'ProjGAN_adv':
gen_acml_loss = (self.G_loss(dis_out_fake) + self.G_loss(dis_out_adv)) / 2
else:
pass
gen_acml_loss = gen_acml_loss/self.accumulation_steps
if self.mixed_precision:
self.scaler.scale(gen_acml_loss).backward()
else:
gen_acml_loss.backward()
if self.mixed_precision:
self.scaler.step(self.G_optimizer)
self.scaler.update()
else:
self.G_optimizer.step()
if self.ema:
self.Gen_ema.update(step_count)
step_count += 1
if step_count % self.print_every == 0 and self.logger:
log_message = LOG_FORMAT.format(step=step_count,
progress=step_count/total_step,
elapsed=elapsed_time(self.start_time),
temperature=t,
ada_p=self.ada_aug_p,
dis_loss=dis_acml_loss.item(),
gen_loss=gen_acml_loss.item(),
)
self.logger.info(log_message)
if self.g_spectral_norm:
gen_sigmas = calculate_all_sn(self.gen_model)
self.writer.add_scalars('SN_of_gen', gen_sigmas, step_count)
self.writer.add_scalars('Losses', {'discriminator': dis_acml_loss.item(),
'generator': gen_acml_loss.item()}, step_count)
if self.ada:
self.writer.add_scalar('ada_p', self.ada_aug_p, step_count)
if step_count % self.save_every == 0 or step_count == total_step:
if self.evaluate:
is_best = self.evaluation(step_count, False, "N/A")
self.save(step_count, is_best)
else:
self.save(step_count, False)
return step_count-1
| true | true |
f71f885cac4c2f109c64f495a43df8973c10dbfa | 2,038 | py | Python | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | 1 | 2021-09-14T15:55:56.000Z | 2021-09-14T15:55:56.000Z | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | null | null | null | benchmark/points/edge_cnn_ke.py | KuangenZhang/pytorch_geometric | 0bfc79a5eaccfcd16a82395e8578a90c5e44759f | [
"MIT"
] | null | null | null | import argparse
import torch
import torch.nn.functional as F
from torch.nn import Sequential as Seq, Linear as Lin, ReLU, LeakyReLU
from torch_geometric.nn import DynamicEdgeConv, global_max_pool
from datasets import get_dataset
from train_eval import run
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=24)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--lr_decay_factor', type=float, default=0.5)
parser.add_argument('--lr_decay_step_size', type=int, default=50)
parser.add_argument('--weight_decay', type=float, default=0)
args = parser.parse_args()
class Net(torch.nn.Module):
def __init__(self, num_classes):
super(Net, self).__init__()
nn = Seq(Lin(6, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2))
self.conv1 = DynamicEdgeConv(nn, k=20, aggr='max')
nn = Seq(
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 256), LeakyReLU(negative_slope=0.2))
self.conv2 = DynamicEdgeConv(nn, k=20, aggr='max')
self.lin0 = Lin(256, 512)
self.lin1 = Lin(512, 256)
self.lin2 = Lin(256, 256)
self.lin3 = Lin(256, num_classes)
def forward(self, pos, batch):
x = self.conv1(pos, batch)
x = self.conv2(x, batch)
x = F.relu(self.lin0(x))
x = global_max_pool(x, batch)
x = F.relu(self.lin1(x))
x = F.relu(self.lin2(x))
x = F.dropout(x, p=0.5, training=self.training)
x = self.lin3(x)
return F.log_softmax(x, dim=-1)
train_dataset, test_dataset = get_dataset(num_points=1024)
model = Net(train_dataset.num_classes)
run(train_dataset, test_dataset, model, args.epochs, args.batch_size, args.lr,
args.lr_decay_factor, args.lr_decay_step_size, args.weight_decay)
| 33.966667 | 78 | 0.663395 | import argparse
import torch
import torch.nn.functional as F
from torch.nn import Sequential as Seq, Linear as Lin, ReLU, LeakyReLU
from torch_geometric.nn import DynamicEdgeConv, global_max_pool
from datasets import get_dataset
from train_eval import run
parser = argparse.ArgumentParser()
parser.add_argument('--epochs', type=int, default=200)
parser.add_argument('--batch_size', type=int, default=24)
parser.add_argument('--lr', type=float, default=0.001)
parser.add_argument('--lr_decay_factor', type=float, default=0.5)
parser.add_argument('--lr_decay_step_size', type=int, default=50)
parser.add_argument('--weight_decay', type=float, default=0)
args = parser.parse_args()
class Net(torch.nn.Module):
def __init__(self, num_classes):
super(Net, self).__init__()
nn = Seq(Lin(6, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2),
Lin(64, 64), LeakyReLU(negative_slope=0.2))
self.conv1 = DynamicEdgeConv(nn, k=20, aggr='max')
nn = Seq(
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 128), LeakyReLU(negative_slope=0.2),
Lin(128, 256), LeakyReLU(negative_slope=0.2))
self.conv2 = DynamicEdgeConv(nn, k=20, aggr='max')
self.lin0 = Lin(256, 512)
self.lin1 = Lin(512, 256)
self.lin2 = Lin(256, 256)
self.lin3 = Lin(256, num_classes)
def forward(self, pos, batch):
x = self.conv1(pos, batch)
x = self.conv2(x, batch)
x = F.relu(self.lin0(x))
x = global_max_pool(x, batch)
x = F.relu(self.lin1(x))
x = F.relu(self.lin2(x))
x = F.dropout(x, p=0.5, training=self.training)
x = self.lin3(x)
return F.log_softmax(x, dim=-1)
train_dataset, test_dataset = get_dataset(num_points=1024)
model = Net(train_dataset.num_classes)
run(train_dataset, test_dataset, model, args.epochs, args.batch_size, args.lr,
args.lr_decay_factor, args.lr_decay_step_size, args.weight_decay)
| true | true |
f71f895df82d2833eb823b4a18567f17d274743e | 1,316 | py | Python | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | torch2trt/converters/grid_sample.py | huliang2016/torch2trt_dynamic | aa55f354a742d26272eae93934d0cff7cd946cbf | [
"MIT"
] | null | null | null | from torch2trt.torch2trt import *
from torch2trt.plugins import *
@tensorrt_converter('torch.nn.functional.grid_sample')
def convert_grid_sample(ctx):
input = ctx.method_args[0]
grid = get_arg(ctx, 'grid', pos=1, default=None)
mode = get_arg(ctx, 'mode', pos=2, default='bilinear')
padding_mode = get_arg(ctx, 'padding_mode', pos=3, default='zeros')
align_corners = get_arg(ctx, 'align_corners', pos=4, default=False)
output = ctx.method_return
input_trt = trt_(ctx.network, input)
grid_trt = trt_(ctx.network, grid)
if mode == 'bilinear':
mode = trt.ResizeMode.LINEAR
elif mode == 'nearest':
mode = trt.ResizeMode.NEAREST
if padding_mode == 'zeros':
padding_mode = 0
elif padding_mode == 'border':
padding_mode = 1
elif padding_mode == 'reflection':
padding_mode = 2
plugin = create_gridsample_plugin("torch_gridsample_"+str(id(input)),
mode=mode,
padding_mode=padding_mode,
align_corners=align_corners)
layer = ctx.network.add_plugin_v2(
inputs=[input_trt, grid_trt], plugin=plugin)
output._trt = layer.get_output(0) | 34.631579 | 74 | 0.591945 | from torch2trt.torch2trt import *
from torch2trt.plugins import *
@tensorrt_converter('torch.nn.functional.grid_sample')
def convert_grid_sample(ctx):
input = ctx.method_args[0]
grid = get_arg(ctx, 'grid', pos=1, default=None)
mode = get_arg(ctx, 'mode', pos=2, default='bilinear')
padding_mode = get_arg(ctx, 'padding_mode', pos=3, default='zeros')
align_corners = get_arg(ctx, 'align_corners', pos=4, default=False)
output = ctx.method_return
input_trt = trt_(ctx.network, input)
grid_trt = trt_(ctx.network, grid)
if mode == 'bilinear':
mode = trt.ResizeMode.LINEAR
elif mode == 'nearest':
mode = trt.ResizeMode.NEAREST
if padding_mode == 'zeros':
padding_mode = 0
elif padding_mode == 'border':
padding_mode = 1
elif padding_mode == 'reflection':
padding_mode = 2
plugin = create_gridsample_plugin("torch_gridsample_"+str(id(input)),
mode=mode,
padding_mode=padding_mode,
align_corners=align_corners)
layer = ctx.network.add_plugin_v2(
inputs=[input_trt, grid_trt], plugin=plugin)
output._trt = layer.get_output(0) | true | true |
f71f89ceb3c12643c41afd03550daee9d2e132a8 | 658 | py | Python | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 46 | 2021-06-10T02:27:32.000Z | 2022-03-27T11:33:24.000Z | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 58 | 2021-07-03T13:58:20.000Z | 2022-03-13T16:45:35.000Z | main/lynx/template.py | RoastVeg/cports | 803c7f07af341eb32f791b6ec1f237edb2764bd5 | [
"BSD-2-Clause"
] | 6 | 2021-07-04T10:46:40.000Z | 2022-01-09T00:03:59.000Z | pkgname = "lynx"
pkgver = "2.9.0_pre10"
_uver = "2.9.0dev.10"
pkgrel = 0
build_style = "gnu_configure"
configure_args = [
"--enable-widec", "--enable-ipv6", "--with-zlib", "--with-bzlib",
"--with-ssl"
]
hostmakedepends = ["pkgconf"]
makedepends = [
"zlib-devel", "libbz2-devel", "ncurses-devel", "openssl-devel"
]
pkgdesc = "Text web browser"
maintainer = "q66 <q66@chimera-linux.org>"
license = "GPL-2.0-or-later"
url = "http://lynx.invisible-island.net"
source = f"http://invisible-mirror.net/archives/{pkgname}/tarballs/{pkgname}{_uver}.tar.bz2"
sha256 = "898ac82bcfcbd4b20ea39afdf66fd659b8773c7549623b0f8802bf392a41a912"
options = ["!cross"]
| 31.333333 | 92 | 0.696049 | pkgname = "lynx"
pkgver = "2.9.0_pre10"
_uver = "2.9.0dev.10"
pkgrel = 0
build_style = "gnu_configure"
configure_args = [
"--enable-widec", "--enable-ipv6", "--with-zlib", "--with-bzlib",
"--with-ssl"
]
hostmakedepends = ["pkgconf"]
makedepends = [
"zlib-devel", "libbz2-devel", "ncurses-devel", "openssl-devel"
]
pkgdesc = "Text web browser"
maintainer = "q66 <q66@chimera-linux.org>"
license = "GPL-2.0-or-later"
url = "http://lynx.invisible-island.net"
source = f"http://invisible-mirror.net/archives/{pkgname}/tarballs/{pkgname}{_uver}.tar.bz2"
sha256 = "898ac82bcfcbd4b20ea39afdf66fd659b8773c7549623b0f8802bf392a41a912"
options = ["!cross"]
| true | true |
f71f8a26e59f7652219836c7aac3c3072c01af66 | 5,150 | py | Python | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | conf.py | SmartDataProjects/dynamo-docs | dffc4e853ffd8bc1a1eabce8b34c1084412cb562 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Dynamo documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 5 10:40:30 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Dynamo'
copyright = u'2018, Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
author = u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'nature'
html_theme = 'classic'
#html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'rightsidebar': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Dynamodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Dynamo.tex', u'Dynamo Documentation',
u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dynamo', u'Dynamo Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Dynamo', u'Dynamo Documentation',
author, 'Dynamo', 'A Dynamic Data Data Management System',
'Miscellaneous'),
]
html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'] }
| 32.389937 | 121 | 0.687379 |
extensions = ['sphinx.ext.intersphinx']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Dynamo'
copyright = u'2018, Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
author = u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus'
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0'
# The full version, including alpha/beta/rc tags.
release = u'1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
#html_theme = 'nature'
html_theme = 'classic'
#html_theme = 'agogo'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'rightsidebar': False
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Dynamodoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Dynamo.tex', u'Dynamo Documentation',
u'Yutaro Iiyama, Max Goncharov, Benedikt Maier, Daniel Abercrombie, Siddarth Narayanan, Christoph Paus', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'dynamo', u'Dynamo Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Dynamo', u'Dynamo Documentation',
author, 'Dynamo', 'A Dynamic Data Data Management System',
'Miscellaneous'),
]
html_sidebars = { '**': ['globaltoc.html', 'relations.html', 'sourcelink.html', 'searchbox.html'] }
| true | true |
f71f8a38750a1ac3b7381ca20272a675585b6e22 | 414 | py | Python | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | scanEngine/migrations/0006_auto_20200718_0429.py | Suprita-25/rengine | d6aabb49f27f7ad6039477c16a96213b0d80f81f | [
"MIT"
] | null | null | null | # Generated by Django 3.0.7 on 2020-07-18 04:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanEngine', '0005_auto_20200718_0407'),
]
operations = [
migrations.AlterField(
model_name='wordlist',
name='path',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| 21.789474 | 75 | 0.60628 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scanEngine', '0005_auto_20200718_0407'),
]
operations = [
migrations.AlterField(
model_name='wordlist',
name='path',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| true | true |
f71f8aad34c9f5bcb36563c0f477edba60015bc9 | 1,029 | py | Python | printing.py | shuckc/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 3 | 2017-02-03T18:29:35.000Z | 2020-02-19T14:46:05.000Z | printing.py | TeaEngineering/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 3 | 2015-08-03T12:01:25.000Z | 2015-12-26T13:52:18.000Z | printing.py | TeaEngineering/printerface | 1f3eeca4c4d090c119404fd354eac02a4f68a56b | [
"BSD-3-Clause"
] | 1 | 2016-03-21T13:45:34.000Z | 2016-03-21T13:45:34.000Z | #!/usr/bin/python
import sys
import subprocess
printers = []
def getPrinters():
global printers
if not sys.platform == "linux2":
return ['default']
if len(printers) > 0: return printers
try:
process = subprocess.Popen(["lpstat", "-a"], stdout=subprocess.PIPE)
result = process.communicate()[0].strip()
# KONICA_bizhub_192.168.12.10 accepting requests since Sun 16 Dec 2012 07:43:59 PM GMT
print(result)
printers = [x.split(' ')[0] for x in result.split('\n')]
print('[print] printers=%s' % repr(printers))
except OSError as e:
print('[print] %s' % repr(e))
return printers
def printFile(file, printer):
cmd = ["lpr","-P", printer, file]
print("[print] printer=%s file=%s cmd=%s" %(printer, file, repr(cmd) ))
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
results = process.communicate()
results = (None,None)
print("[print] printer=%s file=%s cmd=%s result=%s" %(printer, file, repr(cmd), repr(results)))
if __name__=="__main__":
print ('Installed printers: %s' % repr(getPrinters()))
| 30.264706 | 96 | 0.678328 |
import sys
import subprocess
printers = []
def getPrinters():
global printers
if not sys.platform == "linux2":
return ['default']
if len(printers) > 0: return printers
try:
process = subprocess.Popen(["lpstat", "-a"], stdout=subprocess.PIPE)
result = process.communicate()[0].strip()
print(result)
printers = [x.split(' ')[0] for x in result.split('\n')]
print('[print] printers=%s' % repr(printers))
except OSError as e:
print('[print] %s' % repr(e))
return printers
def printFile(file, printer):
cmd = ["lpr","-P", printer, file]
print("[print] printer=%s file=%s cmd=%s" %(printer, file, repr(cmd) ))
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
results = process.communicate()
results = (None,None)
print("[print] printer=%s file=%s cmd=%s result=%s" %(printer, file, repr(cmd), repr(results)))
if __name__=="__main__":
print ('Installed printers: %s' % repr(getPrinters()))
| true | true |
f71f8acdefea0e50130dc14864f4cd1d3a47060b | 4,453 | py | Python | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | null | null | null | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | 1 | 2019-12-03T20:13:23.000Z | 2019-12-03T20:13:23.000Z | rpc/client.py | watermelonano/melonbot | 7ac8418020e63e340f1f6df13ad4e85d6c864cda | [
"MIT"
] | null | null | null | import aiohttp
import rapidjson as json
import socket
from config import Config
from typing import List, Tuple
class RPCClient(object):
_instance = None
def __init__(self):
raise RuntimeError('Call instance() instead')
@classmethod
def instance(cls) -> 'RPCClient':
if cls._instance is None:
cls._instance = cls.__new__(cls)
cls.node_url = Config.instance().node_url
cls.node_port = Config.instance().node_port
cls.wallet_id = Config.instance().wallet
cls.ipv6 = '::' in cls.node_url
cls.connector = aiohttp.TCPConnector(family=socket.AF_INET6 if cls.ipv6 else socket.AF_INET,resolver=aiohttp.AsyncResolver())
cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps)
return cls._instance
@classmethod
async def close(cls):
if hasattr(cls, 'session') and cls.session is not None:
await cls.session.close()
if cls._instance is not None:
cls._instance = None
async def make_request(self, req_json: dict):
async with self.session.post("http://{0}:{1}".format(self.node_url, self.node_port),json=req_json, timeout=300) as resp:
return await resp.json()
async def account_create(self) -> str:
account_create = {
'action': 'account_create',
'wallet': self.wallet_id
}
respjson = await self.make_request(account_create)
if 'account' in respjson:
return respjson['account']
return None
async def account_balance(self, account: str) -> dict:
account_balance = {
'action': 'account_balance',
'account': account
}
respjson = await self.make_request(account_balance)
if 'balance' in respjson:
return respjson
return None
async def send(self, id: str, source: str, destination: str, amount: str) -> str:
"""Make transaction, return hash if successful"""
send_action = {
'action': 'send',
'wallet': Config.instance().wallet,
'source': source,
'destination': destination,
'amount': amount,
'id': id
}
respjson = await self.make_request(send_action)
if 'block' in respjson:
return respjson['block']
return None
async def pending(self, account: str, count: int = 5) -> List[str]:
"""Return a list of pending blocks"""
pending_action = {
'action': 'pending',
'account': account,
'count': count
}
respjson = await self.make_request(pending_action)
if 'blocks' in respjson:
return respjson['blocks']
return None
async def receive(self, account: str, hash: str) -> str:
"""Receive a block and return hash of receive block if successful"""
receive_action = {
'action': 'receive',
'wallet': Config.instance().wallet,
'account': account,
'block': hash
}
respjson = await self.make_request(receive_action)
if 'block' in respjson:
return respjson['block']
return None
async def account_info(self, account: str) -> dict:
info_action = {
'action': 'account_info',
'account': account,
'representative': True
}
respjson = await self.make_request(info_action)
if 'error' not in respjson:
return respjson
return None
async def account_representative_set(self, account: str, rep: str) -> str:
rep_action = {
"action": "account_representative_set",
"wallet": Config.instance().wallet,
"account": account,
"representative": rep
}
respjson = await self.make_request(rep_action)
if 'block' in respjson:
return respjson['block']
return None
async def block_count(self) -> Tuple[int, int]:
"Returns block_count from the node as a tuple count, unchecked"
count_action = {
"action": "block_count"
}
respjson = await self.make_request(count_action)
if 'count' in respjson and 'unchecked' in respjson:
return int(respjson['count']), int(respjson['unchecked'])
return None, None | 35.062992 | 137 | 0.587469 | import aiohttp
import rapidjson as json
import socket
from config import Config
from typing import List, Tuple
class RPCClient(object):
_instance = None
def __init__(self):
raise RuntimeError('Call instance() instead')
@classmethod
def instance(cls) -> 'RPCClient':
if cls._instance is None:
cls._instance = cls.__new__(cls)
cls.node_url = Config.instance().node_url
cls.node_port = Config.instance().node_port
cls.wallet_id = Config.instance().wallet
cls.ipv6 = '::' in cls.node_url
cls.connector = aiohttp.TCPConnector(family=socket.AF_INET6 if cls.ipv6 else socket.AF_INET,resolver=aiohttp.AsyncResolver())
cls.session = aiohttp.ClientSession(connector=cls.connector, json_serialize=json.dumps)
return cls._instance
@classmethod
async def close(cls):
if hasattr(cls, 'session') and cls.session is not None:
await cls.session.close()
if cls._instance is not None:
cls._instance = None
async def make_request(self, req_json: dict):
async with self.session.post("http://{0}:{1}".format(self.node_url, self.node_port),json=req_json, timeout=300) as resp:
return await resp.json()
async def account_create(self) -> str:
account_create = {
'action': 'account_create',
'wallet': self.wallet_id
}
respjson = await self.make_request(account_create)
if 'account' in respjson:
return respjson['account']
return None
async def account_balance(self, account: str) -> dict:
account_balance = {
'action': 'account_balance',
'account': account
}
respjson = await self.make_request(account_balance)
if 'balance' in respjson:
return respjson
return None
async def send(self, id: str, source: str, destination: str, amount: str) -> str:
send_action = {
'action': 'send',
'wallet': Config.instance().wallet,
'source': source,
'destination': destination,
'amount': amount,
'id': id
}
respjson = await self.make_request(send_action)
if 'block' in respjson:
return respjson['block']
return None
async def pending(self, account: str, count: int = 5) -> List[str]:
pending_action = {
'action': 'pending',
'account': account,
'count': count
}
respjson = await self.make_request(pending_action)
if 'blocks' in respjson:
return respjson['blocks']
return None
async def receive(self, account: str, hash: str) -> str:
receive_action = {
'action': 'receive',
'wallet': Config.instance().wallet,
'account': account,
'block': hash
}
respjson = await self.make_request(receive_action)
if 'block' in respjson:
return respjson['block']
return None
async def account_info(self, account: str) -> dict:
info_action = {
'action': 'account_info',
'account': account,
'representative': True
}
respjson = await self.make_request(info_action)
if 'error' not in respjson:
return respjson
return None
async def account_representative_set(self, account: str, rep: str) -> str:
rep_action = {
"action": "account_representative_set",
"wallet": Config.instance().wallet,
"account": account,
"representative": rep
}
respjson = await self.make_request(rep_action)
if 'block' in respjson:
return respjson['block']
return None
async def block_count(self) -> Tuple[int, int]:
count_action = {
"action": "block_count"
}
respjson = await self.make_request(count_action)
if 'count' in respjson and 'unchecked' in respjson:
return int(respjson['count']), int(respjson['unchecked'])
return None, None | true | true |
f71f8b0bf067a0ad1bfabdb4bd73bb6ce0671e67 | 2,071 | py | Python | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | ryu/tests/unit/packet/test_openflow.py | MrCocoaCat/ryu | 9e9571991a73380099b7ba7c6f37e0e587080a6a | [
"Apache-2.0"
] | null | null | null | # Copyright (C) 2017 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import logging
import os
import sys
import unittest
from nose.tools import eq_
from nose.tools import ok_
from ryu.lib import pcaplib
from ryu.lib.packet import openflow
from ryu.lib.packet import packet
from ryu.utils import binary_str
LOG = logging.getLogger(__name__)
OPENFLOW_DATA_DIR = os.path.join(
os.path.dirname(sys.modules[__name__].__file__),
'../../packet_data/pcap/')
class Test_openflow(unittest.TestCase):
"""
Test case for ryu.lib.packet.openflow.
"""
def test_pcap(self):
files = [
'openflow_flowmod',
'openflow_flowstats_req',
'openflow_invalid_version',
]
for f in files:
# print('*** testing %s ...' % f)
for _, buf in pcaplib.Reader(
open(OPENFLOW_DATA_DIR + f + '.pcap', 'rb')):
# Checks if message can be parsed as expected.
pkt = packet.Packet(buf)
openflow_pkt = pkt.get_protocol(openflow.openflow)
ok_(isinstance(openflow_pkt, openflow.openflow),
'Failed to parse OpenFlow message: %s' % pkt)
# Checks if message can be serialized as expected.
pkt.serialize()
eq_(buf, pkt.data,
"b'%s' != b'%s'" % (binary_str(buf), binary_str(pkt.data)))
| 31.861538 | 80 | 0.627233 |
from __future__ import print_function
import logging
import os
import sys
import unittest
from nose.tools import eq_
from nose.tools import ok_
from ryu.lib import pcaplib
from ryu.lib.packet import openflow
from ryu.lib.packet import packet
from ryu.utils import binary_str
LOG = logging.getLogger(__name__)
OPENFLOW_DATA_DIR = os.path.join(
os.path.dirname(sys.modules[__name__].__file__),
'../../packet_data/pcap/')
class Test_openflow(unittest.TestCase):
def test_pcap(self):
files = [
'openflow_flowmod',
'openflow_flowstats_req',
'openflow_invalid_version',
]
for f in files:
for _, buf in pcaplib.Reader(
open(OPENFLOW_DATA_DIR + f + '.pcap', 'rb')):
pkt = packet.Packet(buf)
openflow_pkt = pkt.get_protocol(openflow.openflow)
ok_(isinstance(openflow_pkt, openflow.openflow),
'Failed to parse OpenFlow message: %s' % pkt)
pkt.serialize()
eq_(buf, pkt.data,
"b'%s' != b'%s'" % (binary_str(buf), binary_str(pkt.data)))
| true | true |
f71f8bd977b164017df62a900a494bb42ac54683 | 2,444 | py | Python | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | tests/test_data_structures/test_linked_list.py | titus-ong/my-python-algorithms | d9eecf2846c0a7dd8978f11fec8e8f52be23f3bc | [
"MIT"
] | null | null | null | import pytest
from my_python_algorithms.data_structures.linked_list import LinkedList, Node
def test_node():
n = Node(1)
assert 1 == n.value
assert None is n.next
def test_empty_ll():
ll = LinkedList()
assert None is ll.head
def test_ll_with_head():
ll = LinkedList(1)
assert 1 == ll.head.value
def test_append_with_no_head():
ll = LinkedList()
ll.append(1)
assert 1 == ll.head.value
def test_append():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll.head.next.value
def test_indexing_1():
ll = LinkedList(1)
assert 1 == ll[0]
def test_indexing_2():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll[1]
def test_indexing_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll[0]
def test_indexing_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll[2]
def test_index():
ll = LinkedList(1)
assert 0 == ll.index(1)
def test_index_error_1():
ll = LinkedList()
with pytest.raises(ValueError):
ll.index(1)
def test_index_error_1():
ll = LinkedList(1)
with pytest.raises(ValueError):
ll.index(2)
def test_insert_head():
ll = LinkedList(1)
ll.insert(0, "hello")
assert "hello" == ll[0]
def test_insert_1():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
ll.insert(1, "hello")
assert 1 == ll[0]
assert "hello" == ll[1]
assert 2 == ll[2]
assert 3 == ll[3]
def test_insert_2():
ll = LinkedList(1)
ll.insert(1, 'hey')
assert 'hey' == ll[1]
def test_insert_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.insert(1, 1)
def test_insert_error_2():
ll = LinkedList(1)
with pytest.raises(IndexError):
ll.insert(2, 1)
def test_insert_error_3():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
with pytest.raises(IndexError):
ll.insert(4, "hey")
def test_delete_head():
ll = LinkedList(1)
ll.delete(0)
assert None is ll.head
def test_delete_1():
ll = LinkedList(1)
ll.append(2)
ll.delete(0)
assert 2 == ll[0]
with pytest.raises(IndexError):
ll[1]
def test_delete_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.delete(0)
def test_delete_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll.delete(3)
| 17.090909 | 77 | 0.614157 | import pytest
from my_python_algorithms.data_structures.linked_list import LinkedList, Node
def test_node():
n = Node(1)
assert 1 == n.value
assert None is n.next
def test_empty_ll():
ll = LinkedList()
assert None is ll.head
def test_ll_with_head():
ll = LinkedList(1)
assert 1 == ll.head.value
def test_append_with_no_head():
ll = LinkedList()
ll.append(1)
assert 1 == ll.head.value
def test_append():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll.head.next.value
def test_indexing_1():
ll = LinkedList(1)
assert 1 == ll[0]
def test_indexing_2():
ll = LinkedList(1)
ll.append(2)
assert 2 == ll[1]
def test_indexing_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll[0]
def test_indexing_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll[2]
def test_index():
ll = LinkedList(1)
assert 0 == ll.index(1)
def test_index_error_1():
ll = LinkedList()
with pytest.raises(ValueError):
ll.index(1)
def test_index_error_1():
ll = LinkedList(1)
with pytest.raises(ValueError):
ll.index(2)
def test_insert_head():
ll = LinkedList(1)
ll.insert(0, "hello")
assert "hello" == ll[0]
def test_insert_1():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
ll.insert(1, "hello")
assert 1 == ll[0]
assert "hello" == ll[1]
assert 2 == ll[2]
assert 3 == ll[3]
def test_insert_2():
ll = LinkedList(1)
ll.insert(1, 'hey')
assert 'hey' == ll[1]
def test_insert_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.insert(1, 1)
def test_insert_error_2():
ll = LinkedList(1)
with pytest.raises(IndexError):
ll.insert(2, 1)
def test_insert_error_3():
ll = LinkedList(1)
ll.append(2)
ll.append(3)
with pytest.raises(IndexError):
ll.insert(4, "hey")
def test_delete_head():
ll = LinkedList(1)
ll.delete(0)
assert None is ll.head
def test_delete_1():
ll = LinkedList(1)
ll.append(2)
ll.delete(0)
assert 2 == ll[0]
with pytest.raises(IndexError):
ll[1]
def test_delete_error_1():
ll = LinkedList()
with pytest.raises(IndexError):
ll.delete(0)
def test_delete_error_2():
ll = LinkedList(1)
ll.append(2)
with pytest.raises(IndexError):
ll.delete(3)
| true | true |
f71f8c4271a58e5975430cb596344aa1a4927d19 | 3,169 | py | Python | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | 2 | 2017-02-25T00:27:06.000Z | 2017-02-25T03:09:30.000Z | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/device_tracker/bluetooth_tracker.py | shire210/home-assistant | 63cd8bbee6f1b74ae9c6c249ac820119a8a573d8 | [
"Apache-2.0"
] | 2 | 2018-06-03T11:14:44.000Z | 2018-11-04T18:18:12.000Z | """Tracking for bluetooth devices."""
import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.components.device_tracker import (
YAML_DEVICES, CONF_TRACK_NEW, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL,
load_config, PLATFORM_SCHEMA, DEFAULT_TRACK_NEW)
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['pybluez==0.22']
BT_PREFIX = 'BT_'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_TRACK_NEW): cv.boolean
})
def setup_scanner(hass, config, see, discovery_info=None):
"""Setup the Bluetooth Scanner."""
# pylint: disable=import-error
import bluetooth
def see_device(device):
"""Mark a device as seen."""
see(mac=BT_PREFIX + device[0], host_name=device[1])
def discover_devices():
"""Discover bluetooth devices."""
result = bluetooth.discover_devices(duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False)
_LOGGER.debug("Bluetooth devices discovered = " + str(len(result)))
return result
yaml_path = hass.config.path(YAML_DEVICES)
devs_to_track = []
devs_donot_track = []
# Load all known devices.
# We just need the devices so set consider_home and home range
# to 0
for device in load_config(yaml_path, hass, 0):
# check if device is a valid bluetooth device
if device.mac and device.mac[:3].upper() == BT_PREFIX:
if device.track:
devs_to_track.append(device.mac[3:])
else:
devs_donot_track.append(device.mac[3:])
# if track new devices is true discover new devices on startup.
track_new = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
see_device(dev)
interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
def update_bluetooth(now):
"""Lookup bluetooth device and update status."""
try:
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
for mac in devs_to_track:
_LOGGER.debug("Scanning " + mac)
result = bluetooth.lookup_name(mac, timeout=5)
if not result:
# Could not lookup device name
continue
see_device((mac, result))
except bluetooth.BluetoothError:
_LOGGER.exception('Error looking up bluetooth device!')
track_point_in_utc_time(
hass, update_bluetooth, dt_util.utcnow() + interval)
update_bluetooth(dt_util.utcnow())
return True
| 34.824176 | 76 | 0.618492 | import logging
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant.components.device_tracker import (
YAML_DEVICES, CONF_TRACK_NEW, CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL,
load_config, PLATFORM_SCHEMA, DEFAULT_TRACK_NEW)
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['pybluez==0.22']
BT_PREFIX = 'BT_'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_TRACK_NEW): cv.boolean
})
def setup_scanner(hass, config, see, discovery_info=None):
import bluetooth
def see_device(device):
see(mac=BT_PREFIX + device[0], host_name=device[1])
def discover_devices():
result = bluetooth.discover_devices(duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False)
_LOGGER.debug("Bluetooth devices discovered = " + str(len(result)))
return result
yaml_path = hass.config.path(YAML_DEVICES)
devs_to_track = []
devs_donot_track = []
for device in load_config(yaml_path, hass, 0):
if device.mac and device.mac[:3].upper() == BT_PREFIX:
if device.track:
devs_to_track.append(device.mac[3:])
else:
devs_donot_track.append(device.mac[3:])
track_new = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
see_device(dev)
interval = config.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
def update_bluetooth(now):
try:
if track_new:
for dev in discover_devices():
if dev[0] not in devs_to_track and \
dev[0] not in devs_donot_track:
devs_to_track.append(dev[0])
for mac in devs_to_track:
_LOGGER.debug("Scanning " + mac)
result = bluetooth.lookup_name(mac, timeout=5)
if not result:
continue
see_device((mac, result))
except bluetooth.BluetoothError:
_LOGGER.exception('Error looking up bluetooth device!')
track_point_in_utc_time(
hass, update_bluetooth, dt_util.utcnow() + interval)
update_bluetooth(dt_util.utcnow())
return True
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.