text
stringlengths 2
999k
|
|---|
"""
This files test the event rsvp functionality
"""
from __future__ import absolute_import
import unittest
import json
from app import app, db
# local imports
from config import app_config
from .helper_methods import Helper
class TestEventsDetails(unittest.TestCase, Helper):
"""This class represents the Events test case"""
def setUp(self):
"""Set up test variables."""
self.app = app
self.app.config.from_object(app_config['testing'])
self.client = self.app.test_client
self.new_event = {
"event": "Barbecue party",
"location": "nairobi",
"category": "Food",
"date": "12/12/2017"
}
# binds the app to the current context
with self.app.app_context():
# create all tables
db.create_all()
def test_rsvp_to_an_event(self):
"""Test API can RSVP to an event (POST request)"""
access_token = self.get_auth_token()
resp = self.create_event()
self.assertEqual(resp.status_code, 201)
res = self.client().post(
'api/events/1/rsvp/',
headers=dict(Authorization="Bearer " + access_token),)
self.assertEqual(res.status_code, 201)
self.assertIn(
'Thank you for registering to attend this event', str(res.data))
def test_rsvp_to_an_event_more_than(self):
"""Test API can not rsvp more than once to an event (POST request)"""
access_token = self.get_auth_token()
resp = self.create_event()
self.assertEqual(resp.status_code, 201)
res = self.client().post(
'api/events/1/rsvp/',
headers=dict(Authorization="Bearer " + access_token),)
self.assertEqual(res.status_code, 201)
new_res = self.client().post(
'api/events/{}/rsvp/'.format(1),
headers=dict(Authorization="Bearer " + access_token),)
self.assertEqual(new_res.status_code, 202)
self.assertIn("You have already RSVP", str(new_res.data))
def tearDown(self):
"""teardown all initialized variables."""
with self.app.app_context():
# drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Anella 4.0 Orchestrator"""
from flask import Flask, Blueprint
from flask_restful import Api
import ConfigParser
from flask_mongoengine import MongoEngine
from api.service_instance import ServiceInstance
from api.service_instance import ServiceInstanceBilling
CONFIG = ConfigParser.RawConfigParser()
CONFIG.read('config.cfg')
HOST = CONFIG.get('flask', 'host')
PREFIX = CONFIG.get('flask', 'prefix')
API_VERSION = CONFIG.get('flask', 'version')
PORT = int(CONFIG.get('flask', 'port'))
APP = Flask(__name__)
API_V2_BP = Blueprint('api_v2', __name__)
API_V2 = Api(API_V2_BP)
URL_PREFIX = '{prefix}/v{version}'.format(
prefix=PREFIX,
version=API_VERSION)
BASE_URL = 'http://localhost:{0}{1}'.format(PORT, URL_PREFIX)
API_V2.add_resource(ServiceInstance,
'/service/instance',
'/service/instance/<ns_id>',
'/service/instance/<ns_id>/state')
API_V2.add_resource(ServiceInstanceBilling,
'/service/instance/<ns_id>/billing/<idate>',
'/service/instance/<ns_id>/billing/<idate>/<fdate>')
APP.register_blueprint(
API_V2_BP,
url_prefix=URL_PREFIX
)
APP.config['MONGODB_SETTINGS'] = {'db': CONFIG.get('mongodb', 'database')}
DB = MongoEngine()
DB.init_app(APP)
if __name__ == "__main__":
print "Industrial Platform 4.0 Orchestrator"
APP.run(debug=True, host=HOST, port=PORT, threaded=True)
|
#!/usr/bin/env python
from nodes import Node
import lang_ast
class Goto(Node):
char = "r"
args = 0
def prepare(self, stack):
raise lang_ast.GotoStart(stack)
@Node.is_func
def goto_start(self):
"""Goto the start of the program, keeping the same stack"""
pass
@Node.is_func
def func_return(self):
"""If in a function, return the current stack"""
pass
|
"""
This module lets you practice the ACCUMULATOR pattern
in its simplest classic forms:
SUMMING: total = total + number
Authors: David Mutchler, Dave Fisher, Vibha Alangar, Mark Hays, Amanda Stouder,
their colleagues and Gerardo Santana.
""" # Done: 1. PUT YOUR NAME IN THE ABOVE LINE.
def main():
""" Calls the TEST functions in this module. """
run_test_sum_powers()
run_test_sum_powers_in_range()
def run_test_sum_powers():
""" Tests the sum_powers function. """
# ------------------------------------------------------------------
# Done: 2. Implement this function.
# It TESTS the sum_powers function defined below.
# Include at least ** 3 ** tests.
#
# Use the same 4-step process as in implementing previous
# TEST functions, including the same way to print expected/actual.
# ------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the sum_powers function:')
print('--------------------------------------------------')
# Test 1:
expected = 30
answer = sum_powers(4, 2)
print('Test 1 expected:', expected)
print(' actual: ', answer)
# Test 2:
expected = 55
answer = sum_powers(5, 2)
print('Test 2 expected:', expected)
print(' actual: ', answer)
# Test 3:
expected = 91
answer = sum_powers(6, 2)
print('Test 3 expected:', expected)
print(' actual: ', answer)
def sum_powers(n, p):
sum = 0
for k in range(n):
sum = sum+(k+1)**p
return sum
"""
What comes in: A non-negative integer n
and a number p.
What goes out: The sum 1**p + 2**p + 3**p + ... + n**p
for the given numbers n and p. The latter may be any number
(possibly a floating point number, and possibly negative).
Side effects: None.
Examples:
-- sum_powers(5, -0.3) returns about 3.80826
-- sum_powers(100, 0.1) returns about 144.45655
"""
# ------------------------------------------------------------------
# Done: 3. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# No fair running the code of sum_powers to GENERATE
# test cases; that would defeat the purpose of TESTING!
# ------------------------------------------------------------------
def run_test_sum_powers_in_range():
""" Tests the sum_powers_in_range function. """
# ------------------------------------------------------------------
# Done: 4. Implement this function.
# It TESTS the sum_powers_in_range function defined below.
# Include at least ** 3 ** tests.
#
# Use the same 4-step process as in implementing previous
# TEST functions, including the same way to print expected/actual.
# ------------------------------------------------------------------
print()
print('--------------------------------------------------')
print('Testing the sum_powers_in_range function:')
print('--------------------------------------------------')
# Test 1:
expected = 142.38476
answer = sum_powers_in_range(3, 100, 0.1)
print('Test 1 expected:', expected)
print(' actual: ', answer)
# Test 2:
expected = 99
answer = sum_powers_in_range(2, 5, 3)
print('Test 2 expected:', expected)
print(' actual: ', answer)
# Test 3:
expected = 405
answer = sum_powers_in_range(4, 5, 3)
print('Test 3 expected:', expected)
print(' actual: ', answer)
def sum_powers_in_range(m, n, p):
sum = 0
for k in range(n-2):
sum = sum + (m+k)**p
return sum
"""
What comes in: Non-negative integers m and n, with n >= m,
and a number p.
What goes out: the sum
m**p + (m+1)**p + (m+2)**p + ... + n**p
for the given numbers m, n and p. The latter may be any number
(possibly a floating point number, and possibly negative).
Side effects: None.
Example:
-- sum_powers_in_range(3, 100, 0.1) returns about 142.384776
"""
# ------------------------------------------------------------------
# Done: 5. Implement and test this function.
# Note that you should write its TEST function first (above).
#
# No fair running the code of sum_powers_in_range to GENERATE
# test cases; that would defeat the purpose of TESTING!
# ------------------------------------------------------------------
# ----------------------------------------------------------------------
# Calls main to start the ball rolling.
# ----------------------------------------------------------------------
main()
|
import enum
import os
try:
from psycopg2ct.compat import register
except ImportError:
pass
else:
register()
from pytest import fixture, yield_fixture
from sqlalchemy.engine import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.pool import NullPool
from sqlalchemy.schema import Column
from sqlalchemy.types import Integer
from sqlalchemy_enum34 import Enum, EnumType
Base = declarative_base()
Session = sessionmaker()
class Color(enum.Enum):
red = 'r'
green = 'g'
blue = 'b'
class ColorTable(Base):
id = Column(Integer, primary_key=True)
color_by_val = Column(
Enum(Color, name='color_by_val'),
nullable=True
)
color_by_name = Column(
Enum(Color, by_name=True, name='color_by_name'),
nullable=True
)
__tablename__ = 'tb_color'
try:
database_urls = os.environ['TEST_DATABASE_URLS'].split()
except KeyError:
database_urls = []
@fixture(scope='function', params=['sqlite://'] + database_urls)
def fx_engine(request):
url = request.param
engine = create_engine(url, poolclass=NullPool)
request.addfinalizer(engine.dispose)
return engine
@yield_fixture
def fx_connection(fx_engine):
connection = fx_engine.connect()
try:
transaction = connection.begin()
try:
metadata = Base.metadata
metadata.create_all(bind=connection)
yield connection
finally:
transaction.rollback()
finally:
connection.close()
@yield_fixture
def fx_session(fx_connection):
session = Session(bind=fx_connection)
try:
yield session
finally:
session.close()
@fixture
def fx_red(fx_session):
red = ColorTable(color_by_val=Color.red, color_by_name=Color.red)
fx_session.add(red)
fx_session.flush()
return red
@fixture
def fx_green(fx_session):
green = ColorTable(color_by_val=Color.green, color_by_name=Color.green)
fx_session.add(green)
fx_session.flush()
return green
@fixture
def fx_blue(fx_session):
blue = ColorTable(color_by_val=Color.blue, color_by_name=Color.blue)
fx_session.add(blue)
fx_session.flush()
return blue
@fixture
def fx_null(fx_session):
null = ColorTable(color_by_val=None, color_by_name=None)
fx_session.add(null)
fx_session.flush()
return null
def test_enum_by_value(fx_session, fx_blue, fx_red):
result = fx_session.query(ColorTable) \
.filter_by(color_by_val=Color.blue) \
.one()
assert fx_blue is result
result2 = fx_session.query(ColorTable) \
.filter("tb_color.color_by_val = 'r'") \
.one()
assert fx_red is result2
def test_enum_by_name(fx_session, fx_green, fx_blue):
result = fx_session.query(ColorTable) \
.filter_by(color_by_name=Color.green) \
.one()
assert fx_green is result
result2 = fx_session.query(ColorTable) \
.filter("tb_color.color_by_name = 'blue'") \
.one()
assert fx_blue is result2
def test_null_by_value(fx_session, fx_null):
result = fx_session.query(ColorTable) \
.filter_by(color_by_val=None) \
.one()
assert fx_null is result
result2 = fx_session.query(ColorTable) \
.filter("tb_color.color_by_val is null") \
.one()
assert fx_null is result2
def test_null_by_name(fx_session, fx_null):
result = fx_session.query(ColorTable) \
.filter_by(color_by_name=None) \
.one()
assert fx_null is result
result2 = fx_session.query(ColorTable) \
.filter("tb_color.color_by_name is null") \
.one()
assert fx_null is result2
def test_enum_is_enum_type():
assert Enum is EnumType
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import unittest
import mock
def _create_signing_credentials():
import google.auth.credentials
class _SigningCredentials(
google.auth.credentials.Credentials,
google.auth.credentials.Signing):
pass
credentials = mock.Mock(spec=_SigningCredentials)
return credentials
class Test_Bucket(unittest.TestCase):
def _make_one(self, client=None, name=None, properties=None):
from google.cloud.storage.bucket import Bucket
if client is None:
connection = _Connection()
client = _Client(connection)
bucket = Bucket(client, name=name)
bucket._properties = properties or {}
return bucket
def test_ctor(self):
NAME = 'name'
properties = {'key': 'value'}
bucket = self._make_one(name=NAME, properties=properties)
self.assertEqual(bucket.name, NAME)
self.assertEqual(bucket._properties, properties)
self.assertFalse(bucket._acl.loaded)
self.assertIs(bucket._acl.bucket, bucket)
self.assertFalse(bucket._default_object_acl.loaded)
self.assertIs(bucket._default_object_acl.bucket, bucket)
def test_blob(self):
from google.cloud.storage.blob import Blob
BUCKET_NAME = 'BUCKET_NAME'
BLOB_NAME = 'BLOB_NAME'
CHUNK_SIZE = 1024 * 1024
KEY = b'01234567890123456789012345678901' # 32 bytes
bucket = self._make_one(name=BUCKET_NAME)
blob = bucket.blob(
BLOB_NAME, chunk_size=CHUNK_SIZE, encryption_key=KEY)
self.assertIsInstance(blob, Blob)
self.assertIs(blob.bucket, bucket)
self.assertIs(blob.client, bucket.client)
self.assertEqual(blob.name, BLOB_NAME)
self.assertEqual(blob.chunk_size, CHUNK_SIZE)
self.assertEqual(blob._encryption_key, KEY)
def test_bucket_name_value(self):
bucket_name = 'testing123'
mixin = self._make_one(name=bucket_name)
self.assertEqual(mixin.name, bucket_name)
bad_start_bucket_name = '/testing123'
with self.assertRaises(ValueError):
self._make_one(name=bad_start_bucket_name)
bad_end_bucket_name = 'testing123/'
with self.assertRaises(ValueError):
self._make_one(name=bad_end_bucket_name)
def test_exists_miss(self):
from google.cloud.exceptions import NotFound
class _FakeConnection(object):
_called_with = []
@classmethod
def api_request(cls, *args, **kwargs):
cls._called_with.append((args, kwargs))
raise NotFound(args)
BUCKET_NAME = 'bucket-name'
bucket = self._make_one(name=BUCKET_NAME)
client = _Client(_FakeConnection)
self.assertFalse(bucket.exists(client=client))
expected_called_kwargs = {
'method': 'GET',
'path': bucket.path,
'query_params': {
'fields': 'name',
},
'_target_object': None,
}
expected_cw = [((), expected_called_kwargs)]
self.assertEqual(_FakeConnection._called_with, expected_cw)
def test_exists_hit(self):
class _FakeConnection(object):
_called_with = []
@classmethod
def api_request(cls, *args, **kwargs):
cls._called_with.append((args, kwargs))
# exists() does not use the return value
return object()
BUCKET_NAME = 'bucket-name'
bucket = self._make_one(name=BUCKET_NAME)
client = _Client(_FakeConnection)
self.assertTrue(bucket.exists(client=client))
expected_called_kwargs = {
'method': 'GET',
'path': bucket.path,
'query_params': {
'fields': 'name',
},
'_target_object': None,
}
expected_cw = [((), expected_called_kwargs)]
self.assertEqual(_FakeConnection._called_with, expected_cw)
def test_create_hit(self):
BUCKET_NAME = 'bucket-name'
DATA = {'name': BUCKET_NAME}
connection = _Connection(DATA)
PROJECT = 'PROJECT'
client = _Client(connection, project=PROJECT)
bucket = self._make_one(client=client, name=BUCKET_NAME)
bucket.create()
kw, = connection._requested
self.assertEqual(kw['method'], 'POST')
self.assertEqual(kw['path'], '/b')
self.assertEqual(kw['query_params'], {'project': PROJECT})
self.assertEqual(kw['data'], DATA)
def test_create_w_extra_properties(self):
BUCKET_NAME = 'bucket-name'
PROJECT = 'PROJECT'
CORS = [{
'maxAgeSeconds': 60,
'methods': ['*'],
'origin': ['https://example.com/frontend'],
'responseHeader': ['X-Custom-Header'],
}]
LIFECYCLE_RULES = [{
"action": {"type": "Delete"},
"condition": {"age": 365}
}]
LOCATION = 'eu'
STORAGE_CLASS = 'NEARLINE'
DATA = {
'name': BUCKET_NAME,
'cors': CORS,
'lifecycle': {'rule': LIFECYCLE_RULES},
'location': LOCATION,
'storageClass': STORAGE_CLASS,
'versioning': {'enabled': True},
}
connection = _Connection(DATA)
client = _Client(connection, project=PROJECT)
bucket = self._make_one(client=client, name=BUCKET_NAME)
bucket.cors = CORS
bucket.lifecycle_rules = LIFECYCLE_RULES
bucket.location = LOCATION
bucket.storage_class = STORAGE_CLASS
bucket.versioning_enabled = True
bucket.create()
kw, = connection._requested
self.assertEqual(kw['method'], 'POST')
self.assertEqual(kw['path'], '/b')
self.assertEqual(kw['query_params'], {'project': PROJECT})
self.assertEqual(kw['data'], DATA)
def test_acl_property(self):
from google.cloud.storage.acl import BucketACL
bucket = self._make_one()
acl = bucket.acl
self.assertIsInstance(acl, BucketACL)
self.assertIs(acl, bucket._acl)
def test_default_object_acl_property(self):
from google.cloud.storage.acl import DefaultObjectACL
bucket = self._make_one()
acl = bucket.default_object_acl
self.assertIsInstance(acl, DefaultObjectACL)
self.assertIs(acl, bucket._default_object_acl)
def test_path_no_name(self):
bucket = self._make_one()
self.assertRaises(ValueError, getattr, bucket, 'path')
def test_path_w_name(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
self.assertEqual(bucket.path, '/b/%s' % NAME)
def test_get_blob_miss(self):
NAME = 'name'
NONESUCH = 'nonesuch'
connection = _Connection()
client = _Client(connection)
bucket = self._make_one(name=NAME)
result = bucket.get_blob(NONESUCH, client=client)
self.assertIsNone(result)
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH))
def test_get_blob_hit(self):
NAME = 'name'
BLOB_NAME = 'blob-name'
connection = _Connection({'name': BLOB_NAME})
client = _Client(connection)
bucket = self._make_one(name=NAME)
blob = bucket.get_blob(BLOB_NAME, client=client)
self.assertIs(blob.bucket, bucket)
self.assertEqual(blob.name, BLOB_NAME)
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
def test_list_blobs_defaults(self):
NAME = 'name'
connection = _Connection({'items': []})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
iterator = bucket.list_blobs()
blobs = list(iterator)
self.assertEqual(blobs, [])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], {'projection': 'noAcl'})
def test_list_blobs_w_all_arguments(self):
NAME = 'name'
MAX_RESULTS = 10
PAGE_TOKEN = 'ABCD'
PREFIX = 'subfolder'
DELIMITER = '/'
VERSIONS = True
PROJECTION = 'full'
FIELDS = 'items/contentLanguage,nextPageToken'
EXPECTED = {
'maxResults': 10,
'pageToken': PAGE_TOKEN,
'prefix': PREFIX,
'delimiter': DELIMITER,
'versions': VERSIONS,
'projection': PROJECTION,
'fields': FIELDS,
}
connection = _Connection({'items': []})
client = _Client(connection)
bucket = self._make_one(name=NAME)
iterator = bucket.list_blobs(
max_results=MAX_RESULTS,
page_token=PAGE_TOKEN,
prefix=PREFIX,
delimiter=DELIMITER,
versions=VERSIONS,
projection=PROJECTION,
fields=FIELDS,
client=client,
)
blobs = list(iterator)
self.assertEqual(blobs, [])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], EXPECTED)
def test_list_blobs(self):
NAME = 'name'
connection = _Connection({'items': []})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
iterator = bucket.list_blobs()
blobs = list(iterator)
self.assertEqual(blobs, [])
kw, = connection._requested
self.assertEqual(kw['method'], 'GET')
self.assertEqual(kw['path'], '/b/%s/o' % NAME)
self.assertEqual(kw['query_params'], {'projection': 'noAcl'})
def test_delete_miss(self):
from google.cloud.exceptions import NotFound
NAME = 'name'
connection = _Connection()
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
self.assertRaises(NotFound, bucket.delete)
expected_cw = [{
'method': 'DELETE',
'path': bucket.path,
'_target_object': None,
}]
self.assertEqual(connection._deleted_buckets, expected_cw)
def test_delete_hit(self):
NAME = 'name'
GET_BLOBS_RESP = {'items': []}
connection = _Connection(GET_BLOBS_RESP)
connection._delete_bucket = True
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
result = bucket.delete(force=True)
self.assertIsNone(result)
expected_cw = [{
'method': 'DELETE',
'path': bucket.path,
'_target_object': None,
}]
self.assertEqual(connection._deleted_buckets, expected_cw)
def test_delete_force_delete_blobs(self):
NAME = 'name'
BLOB_NAME1 = 'blob-name1'
BLOB_NAME2 = 'blob-name2'
GET_BLOBS_RESP = {
'items': [
{'name': BLOB_NAME1},
{'name': BLOB_NAME2},
],
}
DELETE_BLOB1_RESP = DELETE_BLOB2_RESP = {}
connection = _Connection(GET_BLOBS_RESP, DELETE_BLOB1_RESP,
DELETE_BLOB2_RESP)
connection._delete_bucket = True
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
result = bucket.delete(force=True)
self.assertIsNone(result)
expected_cw = [{
'method': 'DELETE',
'path': bucket.path,
'_target_object': None,
}]
self.assertEqual(connection._deleted_buckets, expected_cw)
def test_delete_force_miss_blobs(self):
NAME = 'name'
BLOB_NAME = 'blob-name1'
GET_BLOBS_RESP = {'items': [{'name': BLOB_NAME}]}
# Note the connection does not have a response for the blob.
connection = _Connection(GET_BLOBS_RESP)
connection._delete_bucket = True
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
result = bucket.delete(force=True)
self.assertIsNone(result)
expected_cw = [{
'method': 'DELETE',
'path': bucket.path,
'_target_object': None,
}]
self.assertEqual(connection._deleted_buckets, expected_cw)
def test_delete_too_many(self):
NAME = 'name'
BLOB_NAME1 = 'blob-name1'
BLOB_NAME2 = 'blob-name2'
GET_BLOBS_RESP = {
'items': [
{'name': BLOB_NAME1},
{'name': BLOB_NAME2},
],
}
connection = _Connection(GET_BLOBS_RESP)
connection._delete_bucket = True
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
# Make the Bucket refuse to delete with 2 objects.
bucket._MAX_OBJECTS_FOR_ITERATION = 1
self.assertRaises(ValueError, bucket.delete, force=True)
self.assertEqual(connection._deleted_buckets, [])
def test_delete_blob_miss(self):
from google.cloud.exceptions import NotFound
NAME = 'name'
NONESUCH = 'nonesuch'
connection = _Connection()
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
self.assertRaises(NotFound, bucket.delete_blob, NONESUCH)
kw, = connection._requested
self.assertEqual(kw['method'], 'DELETE')
self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, NONESUCH))
def test_delete_blob_hit(self):
NAME = 'name'
BLOB_NAME = 'blob-name'
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
result = bucket.delete_blob(BLOB_NAME)
self.assertIsNone(result)
kw, = connection._requested
self.assertEqual(kw['method'], 'DELETE')
self.assertEqual(kw['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
def test_delete_blobs_empty(self):
NAME = 'name'
connection = _Connection()
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.delete_blobs([])
self.assertEqual(connection._requested, [])
def test_delete_blobs_hit(self):
NAME = 'name'
BLOB_NAME = 'blob-name'
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.delete_blobs([BLOB_NAME])
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'DELETE')
self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
def test_delete_blobs_miss_no_on_error(self):
from google.cloud.exceptions import NotFound
NAME = 'name'
BLOB_NAME = 'blob-name'
NONESUCH = 'nonesuch'
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
self.assertRaises(NotFound, bucket.delete_blobs, [BLOB_NAME, NONESUCH])
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]['method'], 'DELETE')
self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
self.assertEqual(kw[1]['method'], 'DELETE')
self.assertEqual(kw[1]['path'], '/b/%s/o/%s' % (NAME, NONESUCH))
def test_delete_blobs_miss_w_on_error(self):
NAME = 'name'
BLOB_NAME = 'blob-name'
NONESUCH = 'nonesuch'
connection = _Connection({})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
errors = []
bucket.delete_blobs([BLOB_NAME, NONESUCH], errors.append)
self.assertEqual(errors, [NONESUCH])
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]['method'], 'DELETE')
self.assertEqual(kw[0]['path'], '/b/%s/o/%s' % (NAME, BLOB_NAME))
self.assertEqual(kw[1]['method'], 'DELETE')
self.assertEqual(kw[1]['path'], '/b/%s/o/%s' % (NAME, NONESUCH))
def test_copy_blobs_wo_name(self):
SOURCE = 'source'
DEST = 'dest'
BLOB_NAME = 'blob-name'
class _Blob(object):
name = BLOB_NAME
path = '/b/%s/o/%s' % (SOURCE, BLOB_NAME)
connection = _Connection({})
client = _Client(connection)
source = self._make_one(client=client, name=SOURCE)
dest = self._make_one(client=client, name=DEST)
blob = _Blob()
new_blob = source.copy_blob(blob, dest)
self.assertIs(new_blob.bucket, dest)
self.assertEqual(new_blob.name, BLOB_NAME)
kw, = connection._requested
COPY_PATH = '/b/%s/o/%s/copyTo/b/%s/o/%s' % (SOURCE, BLOB_NAME,
DEST, BLOB_NAME)
self.assertEqual(kw['method'], 'POST')
self.assertEqual(kw['path'], COPY_PATH)
def test_copy_blobs_preserve_acl(self):
from google.cloud.storage.acl import ObjectACL
SOURCE = 'source'
DEST = 'dest'
BLOB_NAME = 'blob-name'
NEW_NAME = 'new_name'
BLOB_PATH = '/b/%s/o/%s' % (SOURCE, BLOB_NAME)
NEW_BLOB_PATH = '/b/%s/o/%s' % (DEST, NEW_NAME)
COPY_PATH = '/b/%s/o/%s/copyTo/b/%s/o/%s' % (SOURCE, BLOB_NAME,
DEST, NEW_NAME)
class _Blob(object):
name = BLOB_NAME
path = BLOB_PATH
connection = _Connection({}, {})
client = _Client(connection)
source = self._make_one(client=client, name=SOURCE)
dest = self._make_one(client=client, name=DEST)
blob = _Blob()
new_blob = source.copy_blob(blob, dest, NEW_NAME, client=client,
preserve_acl=False)
self.assertIs(new_blob.bucket, dest)
self.assertEqual(new_blob.name, NEW_NAME)
self.assertIsInstance(new_blob.acl, ObjectACL)
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]['method'], 'POST')
self.assertEqual(kw[0]['path'], COPY_PATH)
self.assertEqual(kw[1]['method'], 'PATCH')
self.assertEqual(kw[1]['path'], NEW_BLOB_PATH)
def test_copy_blobs_w_name(self):
SOURCE = 'source'
DEST = 'dest'
BLOB_NAME = 'blob-name'
NEW_NAME = 'new_name'
class _Blob(object):
name = BLOB_NAME
path = '/b/%s/o/%s' % (SOURCE, BLOB_NAME)
connection = _Connection({})
client = _Client(connection)
source = self._make_one(client=client, name=SOURCE)
dest = self._make_one(client=client, name=DEST)
blob = _Blob()
new_blob = source.copy_blob(blob, dest, NEW_NAME)
self.assertIs(new_blob.bucket, dest)
self.assertEqual(new_blob.name, NEW_NAME)
kw, = connection._requested
COPY_PATH = '/b/%s/o/%s/copyTo/b/%s/o/%s' % (SOURCE, BLOB_NAME,
DEST, NEW_NAME)
self.assertEqual(kw['method'], 'POST')
self.assertEqual(kw['path'], COPY_PATH)
def test_rename_blob(self):
BUCKET_NAME = 'BUCKET_NAME'
BLOB_NAME = 'blob-name'
NEW_BLOB_NAME = 'new-blob-name'
DATA = {'name': NEW_BLOB_NAME}
connection = _Connection(DATA)
client = _Client(connection)
bucket = self._make_one(client=client, name=BUCKET_NAME)
class _Blob(object):
def __init__(self, name, bucket_name):
self.name = name
self.path = '/b/%s/o/%s' % (bucket_name, name)
self._deleted = []
def delete(self, client=None):
self._deleted.append(client)
blob = _Blob(BLOB_NAME, BUCKET_NAME)
renamed_blob = bucket.rename_blob(blob, NEW_BLOB_NAME, client=client)
self.assertIs(renamed_blob.bucket, bucket)
self.assertEqual(renamed_blob.name, NEW_BLOB_NAME)
self.assertEqual(blob._deleted, [client])
def test_etag(self):
ETAG = 'ETAG'
properties = {'etag': ETAG}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.etag, ETAG)
def test_id(self):
ID = 'ID'
properties = {'id': ID}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.id, ID)
def test_location_getter(self):
NAME = 'name'
before = {'location': 'AS'}
bucket = self._make_one(name=NAME, properties=before)
self.assertEqual(bucket.location, 'AS')
def test_location_setter(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
self.assertIsNone(bucket.location)
bucket.location = 'AS'
self.assertEqual(bucket.location, 'AS')
self.assertTrue('location' in bucket._changes)
def test_lifecycle_rules_getter(self):
NAME = 'name'
LC_RULE = {'action': {'type': 'Delete'}, 'condition': {'age': 42}}
rules = [LC_RULE]
properties = {'lifecycle': {'rule': rules}}
bucket = self._make_one(name=NAME, properties=properties)
self.assertEqual(bucket.lifecycle_rules, rules)
# Make sure it's a copy
self.assertIsNot(bucket.lifecycle_rules, rules)
def test_lifecycle_rules_setter(self):
NAME = 'name'
LC_RULE = {'action': {'type': 'Delete'}, 'condition': {'age': 42}}
rules = [LC_RULE]
bucket = self._make_one(name=NAME)
self.assertEqual(bucket.lifecycle_rules, [])
bucket.lifecycle_rules = rules
self.assertEqual(bucket.lifecycle_rules, rules)
self.assertTrue('lifecycle' in bucket._changes)
def test_cors_getter(self):
NAME = 'name'
CORS_ENTRY = {
'maxAgeSeconds': 1234,
'method': ['OPTIONS', 'GET'],
'origin': ['127.0.0.1'],
'responseHeader': ['Content-Type'],
}
properties = {'cors': [CORS_ENTRY, {}]}
bucket = self._make_one(name=NAME, properties=properties)
entries = bucket.cors
self.assertEqual(len(entries), 2)
self.assertEqual(entries[0], CORS_ENTRY)
self.assertEqual(entries[1], {})
# Make sure it was a copy, not the same object.
self.assertIsNot(entries[0], CORS_ENTRY)
def test_cors_setter(self):
NAME = 'name'
CORS_ENTRY = {
'maxAgeSeconds': 1234,
'method': ['OPTIONS', 'GET'],
'origin': ['127.0.0.1'],
'responseHeader': ['Content-Type'],
}
bucket = self._make_one(name=NAME)
self.assertEqual(bucket.cors, [])
bucket.cors = [CORS_ENTRY]
self.assertEqual(bucket.cors, [CORS_ENTRY])
self.assertTrue('cors' in bucket._changes)
def test_get_logging_w_prefix(self):
NAME = 'name'
LOG_BUCKET = 'logs'
LOG_PREFIX = 'pfx'
before = {
'logging': {
'logBucket': LOG_BUCKET,
'logObjectPrefix': LOG_PREFIX,
},
}
bucket = self._make_one(name=NAME, properties=before)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], LOG_PREFIX)
def test_enable_logging_defaults(self):
NAME = 'name'
LOG_BUCKET = 'logs'
before = {'logging': None}
bucket = self._make_one(name=NAME, properties=before)
self.assertIsNone(bucket.get_logging())
bucket.enable_logging(LOG_BUCKET)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], '')
def test_enable_logging(self):
NAME = 'name'
LOG_BUCKET = 'logs'
LOG_PFX = 'pfx'
before = {'logging': None}
bucket = self._make_one(name=NAME, properties=before)
self.assertIsNone(bucket.get_logging())
bucket.enable_logging(LOG_BUCKET, LOG_PFX)
info = bucket.get_logging()
self.assertEqual(info['logBucket'], LOG_BUCKET)
self.assertEqual(info['logObjectPrefix'], LOG_PFX)
def test_disable_logging(self):
NAME = 'name'
before = {'logging': {'logBucket': 'logs', 'logObjectPrefix': 'pfx'}}
bucket = self._make_one(name=NAME, properties=before)
self.assertIsNotNone(bucket.get_logging())
bucket.disable_logging()
self.assertIsNone(bucket.get_logging())
def test_metageneration(self):
METAGENERATION = 42
properties = {'metageneration': METAGENERATION}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.metageneration, METAGENERATION)
def test_metageneration_unset(self):
bucket = self._make_one()
self.assertIsNone(bucket.metageneration)
def test_metageneration_string_val(self):
METAGENERATION = 42
properties = {'metageneration': str(METAGENERATION)}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.metageneration, METAGENERATION)
def test_owner(self):
OWNER = {'entity': 'project-owner-12345', 'entityId': '23456'}
properties = {'owner': OWNER}
bucket = self._make_one(properties=properties)
owner = bucket.owner
self.assertEqual(owner['entity'], 'project-owner-12345')
self.assertEqual(owner['entityId'], '23456')
def test_project_number(self):
PROJECT_NUMBER = 12345
properties = {'projectNumber': PROJECT_NUMBER}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.project_number, PROJECT_NUMBER)
def test_project_number_unset(self):
bucket = self._make_one()
self.assertIsNone(bucket.project_number)
def test_project_number_string_val(self):
PROJECT_NUMBER = 12345
properties = {'projectNumber': str(PROJECT_NUMBER)}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.project_number, PROJECT_NUMBER)
def test_self_link(self):
SELF_LINK = 'http://example.com/self/'
properties = {'selfLink': SELF_LINK}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.self_link, SELF_LINK)
def test_storage_class_getter(self):
STORAGE_CLASS = 'http://example.com/self/'
properties = {'storageClass': STORAGE_CLASS}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.storage_class, STORAGE_CLASS)
def test_storage_class_setter_invalid(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
with self.assertRaises(ValueError):
bucket.storage_class = 'BOGUS'
self.assertFalse('storageClass' in bucket._changes)
def test_storage_class_setter_STANDARD(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'STANDARD'
self.assertEqual(bucket.storage_class, 'STANDARD')
self.assertTrue('storageClass' in bucket._changes)
def test_storage_class_setter_NEARLINE(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'NEARLINE'
self.assertEqual(bucket.storage_class, 'NEARLINE')
self.assertTrue('storageClass' in bucket._changes)
def test_storage_class_setter_COLDLINE(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'COLDLINE'
self.assertEqual(bucket.storage_class, 'COLDLINE')
self.assertTrue('storageClass' in bucket._changes)
def test_storage_class_setter_MULTI_REGIONAL(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'MULTI_REGIONAL'
self.assertEqual(bucket.storage_class, 'MULTI_REGIONAL')
self.assertTrue('storageClass' in bucket._changes)
def test_storage_class_setter_REGIONAL(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'REGIONAL'
self.assertEqual(bucket.storage_class, 'REGIONAL')
self.assertTrue('storageClass' in bucket._changes)
def test_storage_class_setter_DURABLE_REDUCED_AVAILABILITY(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
bucket.storage_class = 'DURABLE_REDUCED_AVAILABILITY'
self.assertEqual(bucket.storage_class, 'DURABLE_REDUCED_AVAILABILITY')
self.assertTrue('storageClass' in bucket._changes)
def test_time_created(self):
from google.cloud._helpers import _RFC3339_MICROS
from google.cloud._helpers import UTC
TIMESTAMP = datetime.datetime(2014, 11, 5, 20, 34, 37, tzinfo=UTC)
TIME_CREATED = TIMESTAMP.strftime(_RFC3339_MICROS)
properties = {'timeCreated': TIME_CREATED}
bucket = self._make_one(properties=properties)
self.assertEqual(bucket.time_created, TIMESTAMP)
def test_time_created_unset(self):
bucket = self._make_one()
self.assertIsNone(bucket.time_created)
def test_versioning_enabled_getter_missing(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
self.assertEqual(bucket.versioning_enabled, False)
def test_versioning_enabled_getter(self):
NAME = 'name'
before = {'versioning': {'enabled': True}}
bucket = self._make_one(name=NAME, properties=before)
self.assertEqual(bucket.versioning_enabled, True)
def test_versioning_enabled_setter(self):
NAME = 'name'
bucket = self._make_one(name=NAME)
self.assertFalse(bucket.versioning_enabled)
bucket.versioning_enabled = True
self.assertTrue(bucket.versioning_enabled)
def test_configure_website_defaults(self):
NAME = 'name'
UNSET = {'website': {'mainPageSuffix': None,
'notFoundPage': None}}
bucket = self._make_one(name=NAME)
bucket.configure_website()
self.assertEqual(bucket._properties, UNSET)
def test_configure_website(self):
NAME = 'name'
WEBSITE_VAL = {'website': {'mainPageSuffix': 'html',
'notFoundPage': '404.html'}}
bucket = self._make_one(name=NAME)
bucket.configure_website('html', '404.html')
self.assertEqual(bucket._properties, WEBSITE_VAL)
def test_disable_website(self):
NAME = 'name'
UNSET = {'website': {'mainPageSuffix': None,
'notFoundPage': None}}
bucket = self._make_one(name=NAME)
bucket.disable_website()
self.assertEqual(bucket._properties, UNSET)
def test_get_iam_policy(self):
from google.cloud.storage.iam import STORAGE_OWNER_ROLE
from google.cloud.storage.iam import STORAGE_EDITOR_ROLE
from google.cloud.storage.iam import STORAGE_VIEWER_ROLE
from google.cloud.iam import Policy
NAME = 'name'
PATH = '/b/%s' % (NAME,)
ETAG = 'DEADBEEF'
VERSION = 17
OWNER1 = 'user:phred@example.com'
OWNER2 = 'group:cloud-logs@google.com'
EDITOR1 = 'domain:google.com'
EDITOR2 = 'user:phred@example.com'
VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com'
VIEWER2 = 'user:phred@example.com'
RETURNED = {
'resourceId': PATH,
'etag': ETAG,
'version': VERSION,
'bindings': [
{'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]},
{'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]},
{'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]},
],
}
EXPECTED = {
binding['role']: set(binding['members'])
for binding in RETURNED['bindings']}
connection = _Connection(RETURNED)
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
policy = bucket.get_iam_policy()
self.assertIsInstance(policy, Policy)
self.assertEqual(policy.etag, RETURNED['etag'])
self.assertEqual(policy.version, RETURNED['version'])
self.assertEqual(dict(policy), EXPECTED)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,))
def test_set_iam_policy(self):
import operator
from google.cloud.storage.iam import STORAGE_OWNER_ROLE
from google.cloud.storage.iam import STORAGE_EDITOR_ROLE
from google.cloud.storage.iam import STORAGE_VIEWER_ROLE
from google.cloud.iam import Policy
NAME = 'name'
PATH = '/b/%s' % (NAME,)
ETAG = 'DEADBEEF'
VERSION = 17
OWNER1 = 'user:phred@example.com'
OWNER2 = 'group:cloud-logs@google.com'
EDITOR1 = 'domain:google.com'
EDITOR2 = 'user:phred@example.com'
VIEWER1 = 'serviceAccount:1234-abcdef@service.example.com'
VIEWER2 = 'user:phred@example.com'
BINDINGS = [
{'role': STORAGE_OWNER_ROLE, 'members': [OWNER1, OWNER2]},
{'role': STORAGE_EDITOR_ROLE, 'members': [EDITOR1, EDITOR2]},
{'role': STORAGE_VIEWER_ROLE, 'members': [VIEWER1, VIEWER2]},
]
RETURNED = {
'etag': ETAG,
'version': VERSION,
'bindings': BINDINGS,
}
policy = Policy()
for binding in BINDINGS:
policy[binding['role']] = binding['members']
connection = _Connection(RETURNED)
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
returned = bucket.set_iam_policy(policy)
self.assertEqual(returned.etag, ETAG)
self.assertEqual(returned.version, VERSION)
self.assertEqual(dict(returned), dict(policy))
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'PUT')
self.assertEqual(kw[0]['path'], '%s/iam' % (PATH,))
sent = kw[0]['data']
self.assertEqual(sent['resourceId'], PATH)
self.assertEqual(len(sent['bindings']), len(BINDINGS))
key = operator.itemgetter('role')
for found, expected in zip(
sorted(sent['bindings'], key=key),
sorted(BINDINGS, key=key)):
self.assertEqual(found['role'], expected['role'])
self.assertEqual(
sorted(found['members']), sorted(expected['members']))
def test_test_iam_permissions(self):
from google.cloud.storage.iam import STORAGE_OBJECTS_LIST
from google.cloud.storage.iam import STORAGE_BUCKETS_GET
from google.cloud.storage.iam import STORAGE_BUCKETS_UPDATE
NAME = 'name'
PATH = '/b/%s' % (NAME,)
PERMISSIONS = [
STORAGE_OBJECTS_LIST,
STORAGE_BUCKETS_GET,
STORAGE_BUCKETS_UPDATE,
]
ALLOWED = PERMISSIONS[1:]
RETURNED = {'permissions': ALLOWED}
connection = _Connection(RETURNED)
client = _Client(connection, None)
bucket = self._make_one(client=client, name=NAME)
allowed = bucket.test_iam_permissions(PERMISSIONS)
self.assertEqual(allowed, ALLOWED)
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'GET')
self.assertEqual(kw[0]['path'], '%s/iam/testPermissions' % (PATH,))
self.assertEqual(kw[0]['query_params'], {'permissions': PERMISSIONS})
def test_make_public_defaults(self):
from google.cloud.storage.acl import _ACLEntity
NAME = 'name'
permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}]
after = {'acl': permissive, 'defaultObjectAcl': []}
connection = _Connection(after)
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.acl.loaded = True
bucket.default_object_acl.loaded = True
bucket.make_public()
self.assertEqual(list(bucket.acl), permissive)
self.assertEqual(list(bucket.default_object_acl), [])
kw = connection._requested
self.assertEqual(len(kw), 1)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], {'acl': after['acl']})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
def _make_public_w_future_helper(self, default_object_acl_loaded=True):
from google.cloud.storage.acl import _ACLEntity
NAME = 'name'
permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}]
after1 = {'acl': permissive, 'defaultObjectAcl': []}
after2 = {'acl': permissive, 'defaultObjectAcl': permissive}
if default_object_acl_loaded:
num_requests = 2
connection = _Connection(after1, after2)
else:
num_requests = 3
# We return the same value for default_object_acl.reload()
# to consume.
connection = _Connection(after1, after1, after2)
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.acl.loaded = True
bucket.default_object_acl.loaded = default_object_acl_loaded
bucket.make_public(future=True)
self.assertEqual(list(bucket.acl), permissive)
self.assertEqual(list(bucket.default_object_acl), permissive)
kw = connection._requested
self.assertEqual(len(kw), num_requests)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], {'acl': permissive})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
if not default_object_acl_loaded:
self.assertEqual(kw[1]['method'], 'GET')
self.assertEqual(kw[1]['path'], '/b/%s/defaultObjectAcl' % NAME)
# Last could be 1 or 2 depending on `default_object_acl_loaded`.
self.assertEqual(kw[-1]['method'], 'PATCH')
self.assertEqual(kw[-1]['path'], '/b/%s' % NAME)
self.assertEqual(kw[-1]['data'], {'defaultObjectAcl': permissive})
self.assertEqual(kw[-1]['query_params'], {'projection': 'full'})
def test_make_public_w_future(self):
self._make_public_w_future_helper(default_object_acl_loaded=True)
def test_make_public_w_future_reload_default(self):
self._make_public_w_future_helper(default_object_acl_loaded=False)
def test_make_public_recursive(self):
from google.cloud.storage.acl import _ACLEntity
_saved = []
class _Blob(object):
_granted = False
def __init__(self, bucket, name):
self._bucket = bucket
self._name = name
@property
def acl(self):
return self
# Faux ACL methods
def all(self):
return self
def grant_read(self):
self._granted = True
def save(self, client=None):
_saved.append(
(self._bucket, self._name, self._granted, client))
def item_to_blob(self, item):
return _Blob(self.bucket, item['name'])
NAME = 'name'
BLOB_NAME = 'blob-name'
permissive = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}]
after = {'acl': permissive, 'defaultObjectAcl': []}
connection = _Connection(after, {'items': [{'name': BLOB_NAME}]})
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.acl.loaded = True
bucket.default_object_acl.loaded = True
with mock.patch('google.cloud.storage.bucket._item_to_blob',
new=item_to_blob):
bucket.make_public(recursive=True)
self.assertEqual(list(bucket.acl), permissive)
self.assertEqual(list(bucket.default_object_acl), [])
self.assertEqual(_saved, [(bucket, BLOB_NAME, True, None)])
kw = connection._requested
self.assertEqual(len(kw), 2)
self.assertEqual(kw[0]['method'], 'PATCH')
self.assertEqual(kw[0]['path'], '/b/%s' % NAME)
self.assertEqual(kw[0]['data'], {'acl': permissive})
self.assertEqual(kw[0]['query_params'], {'projection': 'full'})
self.assertEqual(kw[1]['method'], 'GET')
self.assertEqual(kw[1]['path'], '/b/%s/o' % NAME)
max_results = bucket._MAX_OBJECTS_FOR_ITERATION + 1
self.assertEqual(kw[1]['query_params'],
{'maxResults': max_results, 'projection': 'full'})
def test_make_public_recursive_too_many(self):
from google.cloud.storage.acl import _ACLEntity
PERMISSIVE = [{'entity': 'allUsers', 'role': _ACLEntity.READER_ROLE}]
AFTER = {'acl': PERMISSIVE, 'defaultObjectAcl': []}
NAME = 'name'
BLOB_NAME1 = 'blob-name1'
BLOB_NAME2 = 'blob-name2'
GET_BLOBS_RESP = {
'items': [
{'name': BLOB_NAME1},
{'name': BLOB_NAME2},
],
}
connection = _Connection(AFTER, GET_BLOBS_RESP)
client = _Client(connection)
bucket = self._make_one(client=client, name=NAME)
bucket.acl.loaded = True
bucket.default_object_acl.loaded = True
# Make the Bucket refuse to make_public with 2 objects.
bucket._MAX_OBJECTS_FOR_ITERATION = 1
self.assertRaises(ValueError, bucket.make_public, recursive=True)
def test_page_empty_response(self):
from google.cloud.iterator import Page
connection = _Connection()
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)
iterator = bucket.list_blobs()
page = Page(iterator, (), None)
iterator._page = page
blobs = list(page)
self.assertEqual(blobs, [])
self.assertEqual(iterator.prefixes, set())
def test_page_non_empty_response(self):
import six
from google.cloud.storage.blob import Blob
blob_name = 'blob-name'
response = {'items': [{'name': blob_name}], 'prefixes': ['foo']}
connection = _Connection()
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)
def dummy_response():
return response
iterator = bucket.list_blobs()
iterator._get_next_page_response = dummy_response
page = six.next(iterator.pages)
self.assertEqual(page.prefixes, ('foo',))
self.assertEqual(page.num_items, 1)
blob = six.next(page)
self.assertEqual(page.remaining, 0)
self.assertIsInstance(blob, Blob)
self.assertEqual(blob.name, blob_name)
self.assertEqual(iterator.prefixes, set(['foo']))
def test_cumulative_prefixes(self):
import six
from google.cloud.storage.blob import Blob
BLOB_NAME = 'blob-name1'
response1 = {
'items': [{'name': BLOB_NAME}],
'prefixes': ['foo'],
'nextPageToken': 's39rmf9',
}
response2 = {
'items': [],
'prefixes': ['bar'],
}
connection = _Connection()
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)
responses = [response1, response2]
def dummy_response():
return responses.pop(0)
iterator = bucket.list_blobs()
iterator._get_next_page_response = dummy_response
# Parse first response.
pages_iter = iterator.pages
page1 = six.next(pages_iter)
self.assertEqual(page1.prefixes, ('foo',))
self.assertEqual(page1.num_items, 1)
blob = six.next(page1)
self.assertEqual(page1.remaining, 0)
self.assertIsInstance(blob, Blob)
self.assertEqual(blob.name, BLOB_NAME)
self.assertEqual(iterator.prefixes, set(['foo']))
# Parse second response.
page2 = six.next(pages_iter)
self.assertEqual(page2.prefixes, ('bar',))
self.assertEqual(page2.num_items, 0)
self.assertEqual(iterator.prefixes, set(['foo', 'bar']))
def _test_generate_upload_policy_helper(self, **kwargs):
import base64
import json
credentials = _create_signing_credentials()
credentials.signer_email = mock.sentinel.signer_email
credentials.sign_bytes.return_value = b'DEADBEEF'
connection = _Connection()
connection.credentials = credentials
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)
conditions = [
['starts-with', '$key', '']]
policy_fields = bucket.generate_upload_policy(conditions, **kwargs)
self.assertEqual(policy_fields['bucket'], bucket.name)
self.assertEqual(
policy_fields['GoogleAccessId'], mock.sentinel.signer_email)
self.assertEqual(
policy_fields['signature'],
base64.b64encode(b'DEADBEEF').decode('utf-8'))
policy = json.loads(
base64.b64decode(policy_fields['policy']).decode('utf-8'))
policy_conditions = policy['conditions']
expected_conditions = [{'bucket': bucket.name}] + conditions
for expected_condition in expected_conditions:
for condition in policy_conditions:
if condition == expected_condition:
break
else: # pragma: NO COVER
self.fail('Condition {} not found in {}'.format(
expected_condition, policy_conditions))
return policy_fields, policy
@mock.patch(
'google.cloud.storage.bucket._NOW',
return_value=datetime.datetime(1990, 1, 1))
def test_generate_upload_policy(self, now):
from google.cloud._helpers import _datetime_to_rfc3339
_, policy = self._test_generate_upload_policy_helper()
self.assertEqual(
policy['expiration'],
_datetime_to_rfc3339(
now() + datetime.timedelta(hours=1)))
def test_generate_upload_policy_args(self):
from google.cloud._helpers import _datetime_to_rfc3339
expiration = datetime.datetime(1990, 5, 29)
_, policy = self._test_generate_upload_policy_helper(
expiration=expiration)
self.assertEqual(
policy['expiration'],
_datetime_to_rfc3339(expiration))
def test_generate_upload_policy_bad_credentials(self):
credentials = object()
connection = _Connection()
connection.credentials = credentials
client = _Client(connection)
name = 'name'
bucket = self._make_one(client=client, name=name)
with self.assertRaises(AttributeError):
bucket.generate_upload_policy([])
class _Connection(object):
_delete_bucket = False
def __init__(self, *responses):
self._responses = responses
self._requested = []
self._deleted_buckets = []
self.credentials = None
@staticmethod
def _is_bucket_path(path):
# Now just ensure the path only has /b/ and one more segment.
return path.startswith('/b/') and path.count('/') == 2
def api_request(self, **kw):
from google.cloud.exceptions import NotFound
self._requested.append(kw)
method = kw.get('method')
path = kw.get('path', '')
if method == 'DELETE' and self._is_bucket_path(path):
self._deleted_buckets.append(kw)
if self._delete_bucket:
return
else:
raise NotFound('miss')
try:
response, self._responses = self._responses[0], self._responses[1:]
except IndexError:
raise NotFound('miss')
else:
return response
class _Client(object):
def __init__(self, connection, project=None):
self._connection = connection
self._base_connection = connection
self.project = project
|
"""diff_factory wraps a model's diff and returns a queryable DiffModel."""
import copy
from django.apps import apps
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
from django.utils import timezone
from django.utils.html import format_html
import django_tables2 as tables
from django_tables2.utils import call_with_appropriate
from nautobot_version_control import diff_table_for_model
class DiffListViewFactory:
"""DiffListViewFactory dynamically generate diff models."""
def __init__(self, content_type):
self.ct = content_type
def get_table_model(self):
"""get_table_model returns the underlying the underlying model."""
try:
return apps.get_model("nautobot_version_control", self.table_model_name)
except LookupError:
return self.make_table_model()
def make_table_model(self):
"""make_table_model creates a DiffList of a model."""
try:
# lookup the list view table for this content type
# todo: once available, use https://github.com/nautobot/nautobot/issues/747
model = self.ct.model_class()
ModelViewTable = diff_table_for_model(model) # pylint: disable=C0103
return type(
self.table_model_name,
(
ModelViewTable,
DiffListViewBase,
),
{
"__module__": "nautobot_version_control.tables",
"_declared": timezone.now(),
"Meta": self._get_table_meta(ModelViewTable),
"content_type": self.ct,
},
)
except KeyError as e:
raise e
def _get_table_meta(self, table):
meta = copy.deepcopy(table._meta)
# add diff styling
meta.row_attrs = {"class": row_attrs_for_record}
meta.sequence = ("diff", "...")
return meta
@property
def table_model_name(self):
"""return the diff table for the model."""
return f"diff_{str(self.ct.app_label)}_{str(self.ct.model)}"
def row_attrs_for_record(record): # pylint: disable=R1710
"""row_attrs_for_record returns button attributes per diff type."""
if not record.diff:
return ""
if record.diff["diff_type"] == "added":
return "bg-success"
if record.diff["diff_type"] == "removed":
return "bg-danger"
# diff_type == "modified"
if record.diff["root"] == "to":
return "bg-warning"
if record.diff["root"] == "from":
return "bg-warning"
class DiffListViewBase(tables.Table):
"""DiffListViewBase base model for a DiffList."""
diff = tables.Column(verbose_name="Diff Type")
class Meta:
abstract = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for col in self.columns:
if col.name == "diff":
continue # uses `render_diff()`
col.render = self.wrap_render_func(col.render)
def render_diff(self, value, record): # pylint: disable=W0613
"""Custom rendering for the the `Diff Type` columns."""
ct = ContentType.objects.get_for_model(self.Meta.model) # pylint: disable=E1101
href = reverse(
"plugins:nautobot_version_control:diff_detail",
kwargs={
"app_label": ct.app_label,
"model": ct.model,
"from_commit": record.diff["from_commit"],
"to_commit": record.diff["to_commit"],
"pk": record.pk,
},
)
if record.diff["diff_type"] == "added":
return format_html(
f"""<a href="{ href }">
<span class="label label-success">added</span>
</a>"""
)
elif record.diff["diff_type"] == "removed":
return format_html(
f"""<a href="{ href }">
<span class="label label-danger">removed</span>
</a>"""
)
else: # diff_type == "modified"
cnt = self.count_diffs(record.diff)
return format_html(
f"""<a href="{ href }">
<span class="label label-primary">changed ({ cnt })</span>
</a>"""
)
@staticmethod
def count_diffs(diff):
"""count_diffs counts the numbers of diffs."""
skip_fields = (
"root",
"diff_type",
"to_commit",
"to_commit_date",
"from_commit",
"from_commit_date",
)
cnt = 0
for k, v in diff.items():
if k in skip_fields:
continue
if k.startswith("to_"):
# compare to and from values
from_key = f"from_{k[3:]}"
if v != diff[from_key]:
cnt += 1
return cnt
@staticmethod
def wrap_render_func(fn):
"""Wraps an existing cell rendering function with diff styling."""
def render_before_after_diff(value, record, column, bound_column, bound_row, table): # pylint: disable=R0913
# the previous render function may take any of the
# following args, so provide them all
kwargs = {
"value": value,
"record": record,
"column": column,
"bound_column": bound_column,
"bound_row": bound_row,
"table": table,
}
try:
# render the existing column function with best effort.
cell = call_with_appropriate(fn, kwargs)
except Exception:
# In particular, rendering TemplateColumns for deleted rows
# causes errors. Deleted rows are accessed with "time-travel"
# queries, but are templates rendered from the current tip of
# the branch, leading to referential integrity errors.
return value
if not record.diff or record.diff["diff_type"] != "modified":
# only render before/after diff styling
# for 'modified' rows
return cell
before_name = f"from_{bound_column.name}"
if before_name not in record.diff:
# can't render diff styling
return cell
after_name = f"to_{bound_column.name}"
if after_name in record.diff and record.diff[after_name] == record.diff[before_name]:
# no diff
return cell
# re-render the cell value with its before value
kwargs["value"] = record.diff[before_name]
before_cell = call_with_appropriate(fn, kwargs)
if before_cell == cell:
# no change
return cell
before_cell = before_cell if before_cell else " — "
return format_html(
f"""<div>
<span class="bg-danger text-danger">
<b>{before_cell}</b>
</span>
</br>
<span class="bg-success text-success">
<b>{cell}</b>
</span>
</div>"""
)
return render_before_after_diff
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.login, name='index_login'),
path('login/', views.login, name='login'),
path('logout/', views.logout, name='logout'),
path('register/', views.register, name='register'),
path('update/<int:contact_id>', views.update_view, name='update'),
path('delete/<int:contact_id>', views.delete, name='delete'),
path('dashboard', views.dashboard, name='dashboard'),
]
|
# Owner(s): ["module: dataloader"]
import copy
import http.server
import itertools
import os
import os.path
import pickle
import random
import socketserver
import sys
import tarfile
import tempfile
import threading
import time
import unittest
import warnings
import zipfile
from functools import partial
from typing import (
Any,
Awaitable,
Dict,
Generic,
Iterator,
List,
NamedTuple,
Optional,
Set,
Tuple,
Type,
TypeVar,
Union,
)
from unittest import skipIf
import numpy as np
import torch
import torch.utils.data.backward_compatibility
import torch.utils.data.datapipes as dp
import torch.utils.data.graph
import torch.utils.data.graph_settings
from torch.testing._internal.common_utils import TestCase, run_tests, suppress_warnings
from torch.utils.data import (
DataLoader,
DataChunk,
IterDataPipe,
MapDataPipe,
RandomSampler,
argument_validation,
runtime_validation,
runtime_validation_disabled,
)
from torch.utils.data.graph import traverse
from torch.utils.data.datapipes.utils.decoder import (
basichandlers as decoder_basichandlers,
)
try:
import dill
# XXX: By default, dill writes the Pickler dispatch table to inject its
# own logic there. This globally affects the behavior of the standard library
# pickler for any user who transitively depends on this module!
# Undo this extension to avoid altering the behavior of the pickler globally.
dill.extend(use_dill=False)
HAS_DILL = True
except ImportError:
HAS_DILL = False
skipIfNoDill = skipIf(not HAS_DILL, "no dill")
try:
import pandas # type: ignore[import] # noqa: F401 F403
HAS_PANDAS = True
except ImportError:
HAS_PANDAS = False
skipIfNoDataFrames = skipIf(not HAS_PANDAS, "no dataframes (pandas)")
T_co = TypeVar("T_co", covariant=True)
def create_temp_dir_and_files():
# The temp dir and files within it will be released and deleted in tearDown().
# Adding `noqa: P201` to avoid mypy's warning on not releasing the dir handle within this function.
temp_dir = tempfile.TemporaryDirectory() # noqa: P201
temp_dir_path = temp_dir.name
with tempfile.NamedTemporaryFile(dir=temp_dir_path, delete=False, suffix='.txt') as f:
temp_file1_name = f.name
with tempfile.NamedTemporaryFile(dir=temp_dir_path, delete=False, suffix='.byte') as f:
temp_file2_name = f.name
with tempfile.NamedTemporaryFile(dir=temp_dir_path, delete=False, suffix='.empty') as f:
temp_file3_name = f.name
with open(temp_file1_name, 'w') as f1:
f1.write('0123456789abcdef')
with open(temp_file2_name, 'wb') as f2:
f2.write(b"0123456789abcdef")
temp_sub_dir = tempfile.TemporaryDirectory(dir=temp_dir_path) # noqa: P201
temp_sub_dir_path = temp_sub_dir.name
with tempfile.NamedTemporaryFile(dir=temp_sub_dir_path, delete=False, suffix='.txt') as f:
temp_sub_file1_name = f.name
with tempfile.NamedTemporaryFile(dir=temp_sub_dir_path, delete=False, suffix='.byte') as f:
temp_sub_file2_name = f.name
with open(temp_sub_file1_name, 'w') as f1:
f1.write('0123456789abcdef')
with open(temp_sub_file2_name, 'wb') as f2:
f2.write(b"0123456789abcdef")
return [(temp_dir, temp_file1_name, temp_file2_name, temp_file3_name),
(temp_sub_dir, temp_sub_file1_name, temp_sub_file2_name)]
# Given a DataPipe and integer n, iterate the DataPipe for n elements and store the elements into a list
# Then, reset the DataPipe and return a tuple of two lists
# 1. A list of elements yielded before the reset
# 2. A list of all elements of the DataPipe after the reset
def reset_after_n_next_calls(datapipe: Union[IterDataPipe[T_co], MapDataPipe[T_co]],
n: int) -> Tuple[List[T_co], List[T_co]]:
it = iter(datapipe)
res_before_reset = []
for _ in range(n):
res_before_reset.append(next(it))
return res_before_reset, list(datapipe)
def odd_or_even(x: int) -> int:
return x % 2
class TestDataChunk(TestCase):
def setUp(self):
self.elements = list(range(10))
random.shuffle(self.elements)
self.chunk: DataChunk[int] = DataChunk(self.elements)
def test_getitem(self):
for i in range(10):
self.assertEqual(self.elements[i], self.chunk[i])
def test_iter(self):
for ele, dc in zip(self.elements, iter(self.chunk)):
self.assertEqual(ele, dc)
def test_len(self):
self.assertEqual(len(self.elements), len(self.chunk))
def test_as_string(self):
self.assertEqual(str(self.chunk), str(self.elements))
batch = [self.elements] * 3
chunks: List[DataChunk[int]] = [DataChunk(self.elements)] * 3
self.assertEqual(str(batch), str(chunks))
def test_sort(self):
chunk: DataChunk[int] = DataChunk(self.elements)
chunk.sort()
self.assertTrue(isinstance(chunk, DataChunk))
for i, d in enumerate(chunk):
self.assertEqual(i, d)
def test_reverse(self):
chunk: DataChunk[int] = DataChunk(self.elements)
chunk.reverse()
self.assertTrue(isinstance(chunk, DataChunk))
for i in range(10):
self.assertEqual(chunk[i], self.elements[9 - i])
def test_random_shuffle(self):
elements = list(range(10))
chunk: DataChunk[int] = DataChunk(elements)
rng = random.Random(0)
rng.shuffle(chunk)
rng = random.Random(0)
rng.shuffle(elements)
self.assertEqual(chunk, elements)
class TestIterableDataPipeBasic(TestCase):
def setUp(self):
ret = create_temp_dir_and_files()
self.temp_dir = ret[0][0]
self.temp_files = ret[0][1:]
self.temp_sub_dir = ret[1][0]
self.temp_sub_files = ret[1][1:]
def tearDown(self):
try:
self.temp_sub_dir.cleanup()
self.temp_dir.cleanup()
except Exception as e:
warnings.warn("TestIterableDatasetBasic was not able to cleanup temp dir due to {}".format(str(e)))
def test_listdirfiles_iterable_datapipe(self):
temp_dir = self.temp_dir.name
datapipe = dp.iter.FileLister(temp_dir, '')
count = 0
for pathname in datapipe:
count = count + 1
self.assertTrue(pathname in self.temp_files)
self.assertEqual(count, len(self.temp_files))
count = 0
datapipe = dp.iter.FileLister(temp_dir, '', recursive=True)
for pathname in datapipe:
count = count + 1
self.assertTrue((pathname in self.temp_files) or (pathname in self.temp_sub_files))
self.assertEqual(count, len(self.temp_files) + len(self.temp_sub_files))
def test_loadfilesfromdisk_iterable_datapipe(self):
# test import datapipe class directly
from torch.utils.data.datapipes.iter import (
FileLister,
FileLoader,
)
temp_dir = self.temp_dir.name
datapipe1 = FileLister(temp_dir, '')
datapipe2 = FileLoader(datapipe1)
count = 0
for rec in datapipe2:
count = count + 1
self.assertTrue(rec[0] in self.temp_files)
with open(rec[0], 'rb') as f:
self.assertEqual(rec[1].read(), f.read())
rec[1].close()
self.assertEqual(count, len(self.temp_files))
def test_readfilesfromtar_iterable_datapipe(self):
temp_dir = self.temp_dir.name
temp_tarfile_pathname = os.path.join(temp_dir, "test_tar.tar")
with tarfile.open(temp_tarfile_pathname, "w:gz") as tar:
tar.add(self.temp_files[0])
tar.add(self.temp_files[1])
tar.add(self.temp_files[2])
datapipe1 = dp.iter.FileLister(temp_dir, '*.tar')
datapipe2 = dp.iter.FileLoader(datapipe1)
datapipe3 = dp.iter.TarArchiveReader(datapipe2)
# Test Case: Read extracted files before reaching the end of the tarfile
for rec, temp_file in itertools.zip_longest(datapipe3, self.temp_files):
self.assertTrue(rec is not None and temp_file is not None)
self.assertEqual(os.path.basename(rec[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(rec[1].read(), f.read())
rec[1].close()
# Test Case: Read extracted files after reaching the end of the tarfile
data_refs = list(datapipe3)
self.assertEqual(len(data_refs), len(self.temp_files))
for data_ref, temp_file in zip(data_refs, self.temp_files):
self.assertEqual(os.path.basename(data_ref[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(data_ref[1].read(), f.read())
data_ref[1].close()
# Test Case: reset the DataPipe after reading part of it
n_elements_before_reset = 1
res_before_reset, res_after_reset = reset_after_n_next_calls(datapipe3, n_elements_before_reset)
# Check result accumulated before reset
self.assertEqual(len(res_before_reset), n_elements_before_reset)
for ele_before_reset, temp_file in zip(res_before_reset, self.temp_files):
self.assertEqual(os.path.basename(ele_before_reset[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(ele_before_reset[1].read(), f.read())
ele_before_reset[1].close()
# Check result accumulated after reset
self.assertEqual(len(res_after_reset), len(self.temp_files))
for ele_after_reset, temp_file in zip(res_after_reset, self.temp_files):
self.assertEqual(os.path.basename(ele_after_reset[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(ele_after_reset[1].read(), f.read())
ele_after_reset[1].close()
# This test throws a warning because data_stream in side ZipArchiveReader cannot be closed
# due to the way zipfiles.open() is implemented
def test_readfilesfromzip_iterable_datapipe(self):
temp_dir = self.temp_dir.name
temp_zipfile_pathname = os.path.join(temp_dir, "test_zip.zip")
with zipfile.ZipFile(temp_zipfile_pathname, 'w') as myzip:
myzip.write(self.temp_files[0])
myzip.write(self.temp_files[1])
myzip.write(self.temp_files[2])
datapipe1 = dp.iter.FileLister(temp_dir, '*.zip')
datapipe2 = dp.iter.FileLoader(datapipe1)
datapipe3 = dp.iter.ZipArchiveReader(datapipe2)
# Test Case: read extracted files before reaching the end of the zipfile
for rec, temp_file in itertools.zip_longest(datapipe3, self.temp_files):
self.assertTrue(rec is not None and temp_file is not None)
self.assertEqual(os.path.basename(rec[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(rec[1].read(), f.read())
rec[1].close()
# Test Case: read extracted files after reaching the end of the zipile
data_refs = list(datapipe3)
self.assertEqual(len(data_refs), len(self.temp_files))
for data_ref, temp_file in zip(data_refs, self.temp_files):
self.assertEqual(os.path.basename(data_ref[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(data_ref[1].read(), f.read())
data_ref[1].close()
# Test Case: reset the DataPipe after reading part of it
n_elements_before_reset = 1
res_before_reset, res_after_reset = reset_after_n_next_calls(datapipe3, n_elements_before_reset)
# Check the results accumulated before reset
self.assertEqual(len(res_before_reset), n_elements_before_reset)
for ele_before_reset, temp_file in zip(res_before_reset, self.temp_files):
self.assertEqual(os.path.basename(ele_before_reset[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(ele_before_reset[1].read(), f.read())
ele_before_reset[1].close()
# Check the results accumulated after reset
self.assertEqual(len(res_after_reset), len(self.temp_files))
for ele_after_reset, temp_file in zip(res_after_reset, self.temp_files):
self.assertEqual(os.path.basename(ele_after_reset[0]), os.path.basename(temp_file))
with open(temp_file, 'rb') as f:
self.assertEqual(ele_after_reset[1].read(), f.read())
ele_after_reset[1].close()
def test_routeddecoder_iterable_datapipe(self):
temp_dir = self.temp_dir.name
temp_pngfile_pathname = os.path.join(temp_dir, "test_png.png")
png_data = np.array([[[1., 0., 0.], [1., 0., 0.]], [[1., 0., 0.], [1., 0., 0.]]], dtype=np.single)
np.save(temp_pngfile_pathname, png_data)
datapipe1 = dp.iter.FileLister(temp_dir, ['*.png', '*.txt'])
datapipe2 = dp.iter.FileLoader(datapipe1)
def _png_decoder(extension, data):
if extension != 'png':
return None
return np.load(data)
def _helper(prior_dp, dp, channel_first=False):
# Byte stream is not closed
for inp in prior_dp:
self.assertFalse(inp[1].closed)
for inp, rec in zip(prior_dp, dp):
ext = os.path.splitext(rec[0])[1]
if ext == '.png':
expected = np.array([[[1., 0., 0.], [1., 0., 0.]], [[1., 0., 0.], [1., 0., 0.]]], dtype=np.single)
if channel_first:
expected = expected.transpose(2, 0, 1)
self.assertEqual(rec[1], expected)
else:
with open(rec[0], 'rb') as f:
self.assertEqual(rec[1], f.read().decode('utf-8'))
# Corresponding byte stream is closed by Decoder
self.assertTrue(inp[1].closed)
cached = list(datapipe2)
datapipe3 = dp.iter.RoutedDecoder(cached, _png_decoder)
datapipe3.add_handler(decoder_basichandlers)
_helper(cached, datapipe3)
cached = list(datapipe2)
datapipe4 = dp.iter.RoutedDecoder(cached, decoder_basichandlers)
datapipe4.add_handler(_png_decoder)
_helper(cached, datapipe4, channel_first=True)
def test_groupby_iterable_datapipe(self):
temp_dir = self.temp_dir.name
temp_tarfile_pathname = os.path.join(temp_dir, "test_tar.tar")
file_list = [
"a.png", "b.png", "c.json", "a.json", "c.png", "b.json", "d.png",
"d.json", "e.png", "f.json", "g.png", "f.png", "g.json", "e.json",
"h.txt", "h.json"]
with tarfile.open(temp_tarfile_pathname, "w:gz") as tar:
for file_name in file_list:
file_pathname = os.path.join(temp_dir, file_name)
with open(file_pathname, 'w') as f:
f.write('12345abcde')
tar.add(file_pathname)
datapipe1 = dp.iter.FileLister(temp_dir, '*.tar')
datapipe2 = dp.iter.FileLoader(datapipe1)
datapipe3 = dp.iter.TarArchiveReader(datapipe2)
def group_fn(data):
filepath, _ = data
return os.path.basename(filepath).split(".")[0]
datapipe4 = dp.iter.Grouper(datapipe3, group_key_fn=group_fn, group_size=2)
def order_fn(data):
data.sort(key=lambda f: f[0], reverse=True)
return data
datapipe5 = dp.iter.Mapper(datapipe4, fn=order_fn) # type: ignore[var-annotated]
expected_result = [
("a.png", "a.json"), ("c.png", "c.json"), ("b.png", "b.json"), ("d.png", "d.json"),
("f.png", "f.json"), ("g.png", "g.json"), ("e.png", "e.json"), ("h.txt", "h.json")]
count = 0
for rec, expected in zip(datapipe5, expected_result):
count = count + 1
self.assertEqual(os.path.basename(rec[0][0]), expected[0])
self.assertEqual(os.path.basename(rec[1][0]), expected[1])
for i in [0, 1]:
self.assertEqual(rec[i][1].read(), b'12345abcde')
rec[i][1].close()
self.assertEqual(count, 8)
def test_demux_mux_datapipe(self):
numbers = NumbersDataset(10)
n1, n2 = numbers.demux(2, lambda x: x % 2)
self.assertEqual([0, 2, 4, 6, 8], list(n1))
self.assertEqual([1, 3, 5, 7, 9], list(n2))
numbers = NumbersDataset(10)
n1, n2, n3 = numbers.demux(3, lambda x: x % 3)
n = n1.mux(n2, n3)
self.assertEqual(list(range(10)), list(n))
# Test Case: Uneven DataPipes
source_numbers = list(range(0, 10)) + [10, 12]
numbers_dp = dp.iter.IterableWrapper(source_numbers)
n1, n2 = numbers_dp.demux(2, lambda x: x % 2)
self.assertEqual([0, 2, 4, 6, 8, 10, 12], list(n1))
self.assertEqual([1, 3, 5, 7, 9], list(n2))
n = n1.mux(n2)
self.assertEqual(source_numbers, list(n))
@suppress_warnings # Suppress warning for lambda fn
def test_map_with_col_file_handle_datapipe(self):
temp_dir = self.temp_dir.name
datapipe1 = dp.iter.FileLister(temp_dir, '')
datapipe2 = dp.iter.FileLoader(datapipe1)
def _helper(datapipe):
dp1 = datapipe.map(lambda x: x.read(), input_col=1)
dp2 = datapipe.map(lambda x: (x[0], x[1].read()))
self.assertEqual(list(dp1), list(dp2))
# tuple
_helper(datapipe2)
# list
datapipe3 = datapipe2.map(lambda x: list(x))
_helper(datapipe3)
class TestDataFramesPipes(TestCase):
"""
Most of test will fail if pandas instaled, but no dill available.
Need to rework them to avoid multiple skips.
"""
def _get_datapipe(self, range=10, dataframe_size=7):
return NumbersDataset(range) \
.map(lambda i: (i, i % 3))
def _get_dataframes_pipe(self, range=10, dataframe_size=7):
return NumbersDataset(range) \
.map(lambda i: (i, i % 3)) \
._to_dataframes_pipe(
columns=['i', 'j'],
dataframe_size=dataframe_size)
@skipIfNoDataFrames
@skipIfNoDill # TODO(VitalyFedyunin): Decouple tests from dill by avoiding lambdas in map
def test_capture(self):
dp_numbers = self._get_datapipe().map(lambda x: (x[0], x[1], x[1] + 3 * x[0]))
df_numbers = self._get_dataframes_pipe()
df_numbers['k'] = df_numbers['j'] + df_numbers.i * 3
self.assertEqual(list(dp_numbers), list(df_numbers))
@skipIfNoDataFrames
@skipIfNoDill
def test_shuffle(self):
# With non-zero (but extremely low) probability (when shuffle do nothing),
# this test fails, so feel free to restart
df_numbers = self._get_dataframes_pipe(range=1000).shuffle()
dp_numbers = self._get_datapipe(range=1000)
df_result = [tuple(item) for item in df_numbers]
self.assertNotEqual(list(dp_numbers), df_result)
self.assertEqual(list(dp_numbers), sorted(df_result))
@skipIfNoDataFrames
@skipIfNoDill
def test_batch(self):
df_numbers = self._get_dataframes_pipe(range=100).batch(8)
df_numbers_list = list(df_numbers)
last_batch = df_numbers_list[-1]
self.assertEqual(4, len(last_batch))
unpacked_batch = [tuple(row) for row in last_batch]
self.assertEqual([(96, 0), (97, 1), (98, 2), (99, 0)], unpacked_batch)
@skipIfNoDataFrames
@skipIfNoDill
def test_unbatch(self):
df_numbers = self._get_dataframes_pipe(range=100).batch(8).batch(3)
dp_numbers = self._get_datapipe(range=100)
self.assertEqual(list(dp_numbers), list(df_numbers.unbatch(2)))
@skipIfNoDataFrames
@skipIfNoDill
def test_filter(self):
df_numbers = self._get_dataframes_pipe(range=10).filter(lambda x: x.i > 5)
self.assertEqual([(6, 0), (7, 1), (8, 2), (9, 0)], list(df_numbers))
class FileLoggerSimpleHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
def __init__(self, *args, logfile=None, **kwargs):
self.__loggerHandle = None
if logfile is not None:
self.__loggerHandle = open(logfile, 'a+')
super().__init__(*args, **kwargs)
def log_message(self, format, *args):
if self.__loggerHandle is not None:
self.__loggerHandle.write("%s - - [%s] %s\n" %
(self.address_string(),
self.log_date_time_string(),
format % args))
return
def finish(self):
if self.__loggerHandle is not None:
self.__loggerHandle.close()
super().finish()
def setUpLocalServerInThread():
try:
Handler = partial(FileLoggerSimpleHTTPRequestHandler, logfile=None)
socketserver.TCPServer.allow_reuse_address = True
server = socketserver.TCPServer(("", 0), Handler)
server_addr = "{host}:{port}".format(host=server.server_address[0], port=server.server_address[1])
server_thread = threading.Thread(target=server.serve_forever)
server_thread.start()
# Wait a bit for the server to come up
time.sleep(3)
return (server_thread, server_addr, server)
except Exception:
raise
def create_temp_files_for_serving(tmp_dir, file_count, file_size,
file_url_template):
furl_local_file = os.path.join(tmp_dir, "urls_list")
with open(furl_local_file, 'w') as fsum:
for i in range(0, file_count):
f = os.path.join(tmp_dir, "webfile_test_{num}.data".format(num=i))
write_chunk = 1024 * 1024 * 16
rmn_size = file_size
while rmn_size > 0:
with open(f, 'ab+') as fout:
fout.write(os.urandom(min(rmn_size, write_chunk)))
rmn_size = rmn_size - min(rmn_size, write_chunk)
fsum.write(file_url_template.format(num=i))
class TestIterableDataPipeHttp(TestCase):
__server_thread: threading.Thread
__server_addr: str
__server: socketserver.TCPServer
@classmethod
def setUpClass(cls):
try:
(cls.__server_thread, cls.__server_addr,
cls.__server) = setUpLocalServerInThread()
except Exception as e:
warnings.warn("TestIterableDataPipeHttp could\
not set up due to {0}".format(str(e)))
@classmethod
def tearDownClass(cls):
try:
cls.__server.shutdown()
cls.__server_thread.join(timeout=15)
except Exception as e:
warnings.warn("TestIterableDataPipeHttp could\
not tear down (clean up temp directory or terminate\
local server) due to {0}".format(str(e)))
def _http_test_base(self, test_file_size, test_file_count, timeout=None,
chunk=None):
def _get_data_from_tuple_fn(data, *args, **kwargs):
return data[args[0]]
with tempfile.TemporaryDirectory(dir=os.getcwd()) as tmpdir:
# create tmp dir and files for test
base_tmp_dir = os.path.basename(os.path.normpath(tmpdir))
file_url_template = ("http://{server_addr}/{tmp_dir}/"
"/webfile_test_{num}.data\n")\
.format(server_addr=self.__server_addr, tmp_dir=base_tmp_dir,
num='{num}')
create_temp_files_for_serving(tmpdir, test_file_count,
test_file_size, file_url_template)
datapipe_dir_f = dp.iter.FileLister(tmpdir, '*_list')
datapipe_stream = dp.iter.FileLoader(datapipe_dir_f)
datapipe_f_lines = dp.iter.LineReader(datapipe_stream)
datapipe_line_url: IterDataPipe[str] = \
dp.iter.Mapper(datapipe_f_lines, _get_data_from_tuple_fn, (1,))
datapipe_http = dp.iter.HttpReader(datapipe_line_url,
timeout=timeout)
datapipe_tob = dp.iter.StreamReader(datapipe_http, chunk=chunk)
for (url, data) in datapipe_tob:
self.assertGreater(len(url), 0)
self.assertRegex(url, r'^http://.+\d+.data$')
if chunk is not None:
self.assertEqual(len(data), chunk)
else:
self.assertEqual(len(data), test_file_size)
@unittest.skip("Stress test on large amount of files skipped\
due to the CI timing constraint.")
def test_stress_http_reader_iterable_datapipes(self):
test_file_size = 10
# STATS: It takes about 5 hours to stress test 16 * 1024 * 1024
# files locally
test_file_count = 1024
self._http_test_base(test_file_size, test_file_count)
@unittest.skip("Test on the very large file skipped\
due to the CI timing constraint.")
def test_large_files_http_reader_iterable_datapipes(self):
# STATS: It takes about 11 mins to test a large file of 64GB locally
test_file_size = 1024 * 1024 * 128
test_file_count = 1
timeout = 30
chunk = 1024 * 1024 * 8
self._http_test_base(test_file_size, test_file_count, timeout=timeout,
chunk=chunk)
class IDP_NoLen(IterDataPipe):
def __init__(self, input_dp):
super().__init__()
self.input_dp = input_dp
# Prevent in-place modification
def __iter__(self):
input_dp = self.input_dp if isinstance(self.input_dp, IterDataPipe) else copy.deepcopy(self.input_dp)
for i in input_dp:
yield i
def _fake_fn(data):
return data
def _fake_add(constant, data):
return constant + data
def _fake_filter_fn(data):
return data >= 5
def _fake_filter_fn_constant(constant, data):
return data >= constant
def _worker_init_fn(worker_id):
random.seed(123)
class TestFunctionalIterDataPipe(TestCase):
# TODO(VitalyFedyunin): If dill installed this test fails
def _test_picklable(self):
arr = range(10)
picklable_datapipes: List[Tuple[Type[IterDataPipe], IterDataPipe, Tuple, Dict[str, Any]]] = [
(dp.iter.Mapper, dp.iter.IterableWrapper(arr), (), {}),
(dp.iter.Mapper, dp.iter.IterableWrapper(arr), (_fake_fn, (0, )), {}),
(dp.iter.Mapper, dp.iter.IterableWrapper(arr), (partial(_fake_add, 1), (0,)), {}),
(dp.iter.Collator, dp.iter.IterableWrapper(arr), (), {}),
(dp.iter.Collator, dp.iter.IterableWrapper(arr), (_fake_fn, (0, )), {}),
(dp.iter.Filter, dp.iter.IterableWrapper(arr), (_fake_filter_fn, (0, )), {}),
(dp.iter.Filter, dp.iter.IterableWrapper(arr), (partial(_fake_filter_fn, 5), (0,)), {}),
]
for dpipe, input_dp, dp_args, dp_kwargs in picklable_datapipes:
p = pickle.dumps(dpipe(input_dp, *dp_args, **dp_kwargs)) # type: ignore[call-arg]
unpicklable_datapipes: List[Tuple[Type[IterDataPipe], IterDataPipe, Tuple, Dict[str, Any]]] = [
(dp.iter.Mapper, dp.iter.IterableWrapper(arr), (lambda x: x, ), {}),
(dp.iter.Collator, dp.iter.IterableWrapper(arr), (lambda x: x, ), {}),
(dp.iter.Filter, dp.iter.IterableWrapper(arr), (lambda x: x >= 5, ), {}),
]
for dpipe, input_dp, dp_args, dp_kwargs in unpicklable_datapipes:
with warnings.catch_warnings(record=True) as wa:
datapipe = dpipe(input_dp, *dp_args, **dp_kwargs) # type: ignore[call-arg]
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"^Lambda function is not supported for pickle")
with self.assertRaises(AttributeError):
p = pickle.dumps(datapipe)
def test_iterable_wrapper_datapipe(self):
input_ls = list(range(10))
input_dp = dp.iter.IterableWrapper(input_ls)
# Functional Test: values are unchanged and in the same order
self.assertEqual(input_ls, list(input_dp))
# Functional Test: deep copy by default when an iterator is initialized (first element is read)
it = iter(input_dp)
self.assertEqual(0, next(it)) # The deep copy only happens when the first element is read
input_ls.append(50)
self.assertEqual(list(range(1, 10)), list(it))
# Functional Test: shallow copy
input_ls2 = [1, 2, 3]
input_dp_shallow = dp.iter.IterableWrapper(input_ls2, deepcopy=False)
input_ls2.append(10)
self.assertEqual([1, 2, 3, 10], list(input_dp_shallow))
# Reset Test: reset the DataPipe
input_ls = list(range(10))
input_dp = dp.iter.IterableWrapper(input_ls)
n_elements_before_reset = 5
res_before_reset, res_after_reset = reset_after_n_next_calls(input_dp, n_elements_before_reset)
self.assertEqual(input_ls[:n_elements_before_reset], res_before_reset)
self.assertEqual(input_ls, res_after_reset)
# __len__ Test: inherits length from sequence
self.assertEqual(len(input_ls), len(input_dp))
def test_concat_datapipe(self):
input_dp1 = dp.iter.IterableWrapper(range(10))
input_dp2 = dp.iter.IterableWrapper(range(5))
with self.assertRaisesRegex(ValueError, r"Expected at least one DataPipe"):
dp.iter.Concater()
with self.assertRaisesRegex(TypeError, r"Expected all inputs to be `IterDataPipe`"):
dp.iter.Concater(input_dp1, ()) # type: ignore[arg-type]
concat_dp = input_dp1.concat(input_dp2)
self.assertEqual(len(concat_dp), 15)
self.assertEqual(list(concat_dp), list(range(10)) + list(range(5)))
# Test Reset
self.assertEqual(list(concat_dp), list(range(10)) + list(range(5)))
input_dp_nl = IDP_NoLen(range(5))
concat_dp = input_dp1.concat(input_dp_nl)
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(concat_dp)
self.assertEqual(list(concat_dp), list(range(10)) + list(range(5)))
def test_fork_datapipe(self):
input_dp = dp.iter.IterableWrapper(range(10))
with self.assertRaises(ValueError):
input_dp.fork(num_instances=0)
dp0 = input_dp.fork(num_instances=1)
self.assertEqual(dp0, input_dp)
# Test Case: making sure all child DataPipe shares the same reference
dp1, dp2, dp3 = input_dp.fork(num_instances=3)
self.assertTrue(all(n1 is n2 and n1 is n3 for n1, n2, n3 in zip(dp1, dp2, dp3)))
# Test Case: one child DataPipe yields all value at a time
output1, output2, output3 = list(dp1), list(dp2), list(dp3)
self.assertEqual(list(range(10)), output1)
self.assertEqual(list(range(10)), output2)
self.assertEqual(list(range(10)), output3)
# Test Case: two child DataPipes yield value together
dp1, dp2 = input_dp.fork(num_instances=2)
output = []
for n1, n2 in zip(dp1, dp2):
output.append((n1, n2))
self.assertEqual([(i, i) for i in range(10)], output)
# Test Case: one child DataPipe yields all value first, but buffer_size = 5 being too small
dp1, dp2 = input_dp.fork(num_instances=2, buffer_size=5)
it1 = iter(dp1)
for _ in range(5):
next(it1)
with self.assertRaises(BufferError):
next(it1)
with self.assertRaises(BufferError):
list(dp2)
# Test Case: one child DataPipe yields all value first with unlimited buffer
with warnings.catch_warnings(record=True) as wa:
dp1, dp2 = input_dp.fork(num_instances=2, buffer_size=-1)
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Unlimited buffer size is set")
l1, l2 = list(dp1), list(dp2)
for d1, d2 in zip(l1, l2):
self.assertEqual(d1, d2)
# Test Case: two child DataPipes yield value together with buffer size 1
dp1, dp2 = input_dp.fork(num_instances=2, buffer_size=1)
output = []
for n1, n2 in zip(dp1, dp2):
output.append((n1, n2))
self.assertEqual([(i, i) for i in range(10)], output)
# Test Case: make sure logic related to slowest_ptr is working properly
dp1, dp2, dp3 = input_dp.fork(num_instances=3)
output1, output2 , output3 = [], [], []
for i, (n1, n2) in enumerate(zip(dp1, dp2)):
output1.append(n1)
output2.append(n2)
if i == 4: # yield all of dp3 when halfway through dp1, dp2
output3 = list(dp3)
break
self.assertEqual(list(range(5)), output1)
self.assertEqual(list(range(5)), output2)
self.assertEqual(list(range(10)), output3)
# Test Case: DataPipe doesn't reset if this pipe hasn't been read
dp1, dp2 = input_dp.fork(num_instances=2)
i1, i2 = iter(dp1), iter(dp2)
output2 = []
for i, n2 in enumerate(i2):
output2.append(n2)
if i == 4:
i1 = iter(dp1) # Doesn't reset because i1 hasn't been read
self.assertEqual(list(range(10)), output2)
# Test Case: DataPipe reset when some of it have been read
dp1, dp2 = input_dp.fork(num_instances=2)
i1, i2 = iter(dp1), iter(dp2)
output1, output2 = [], []
for i, (n1, n2) in enumerate(zip(i1, i2)):
output1.append(n1)
output2.append(n2)
if i == 4:
with warnings.catch_warnings(record=True) as wa:
i1 = iter(dp1) # Reset both all child DataPipe
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
self.assertEqual(list(range(5)) + list(range(10)), output1)
self.assertEqual(list(range(5)) + list(range(10)), output2)
# Test Case: DataPipe reset, even when some other child DataPipes are not read
dp1, dp2, dp3 = input_dp.fork(num_instances=3)
output1, output2 = list(dp1), list(dp2)
self.assertEqual(list(range(10)), output1)
self.assertEqual(list(range(10)), output2)
with warnings.catch_warnings(record=True) as wa:
self.assertEqual(list(range(10)), list(dp1)) # Resets even though dp3 has not been read
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
output3 = []
for i, n3 in enumerate(dp3):
output3.append(n3)
if i == 4:
with warnings.catch_warnings(record=True) as wa:
output1 = list(dp1) # Resets even though dp3 is only partially read
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
self.assertEqual(list(range(5)), output3)
self.assertEqual(list(range(10)), output1)
break
self.assertEqual(list(range(10)), list(dp3)) # dp3 has to read from the start again
# Test Case: Each DataPipe inherits the source datapipe's length
dp1, dp2, dp3 = input_dp.fork(num_instances=3)
self.assertEqual(len(input_dp), len(dp1))
self.assertEqual(len(input_dp), len(dp2))
self.assertEqual(len(input_dp), len(dp3))
# Pickle Test:
dp1, dp2, dp3 = input_dp.fork(num_instances=3)
traverse(dp1) # This should not raise any error
for _ in zip(dp1, dp2, dp3):
pass
traverse(dp2) # This should not raise any error either
def test_mux_datapipe(self):
# Functional Test: Elements are yielded one at a time from each DataPipe, until they are all exhausted
input_dp1 = dp.iter.IterableWrapper(range(4))
input_dp2 = dp.iter.IterableWrapper(range(4, 8))
input_dp3 = dp.iter.IterableWrapper(range(8, 12))
output_dp = input_dp1.mux(input_dp2, input_dp3)
expected_output = [0, 4, 8, 1, 5, 9, 2, 6, 10, 3, 7, 11]
self.assertEqual(len(expected_output), len(output_dp))
self.assertEqual(expected_output, list(output_dp))
# Functional Test: Uneven input Data Pipes
input_dp1 = dp.iter.IterableWrapper([1, 2, 3, 4])
input_dp2 = dp.iter.IterableWrapper([10])
input_dp3 = dp.iter.IterableWrapper([100, 200, 300])
output_dp = input_dp1.mux(input_dp2, input_dp3)
expected_output = [1, 10, 100, 2, 200, 3, 300, 4]
self.assertEqual(len(expected_output), len(output_dp))
self.assertEqual(expected_output, list(output_dp))
# Functional Test: Empty Data Pipe
input_dp1 = dp.iter.IterableWrapper([0, 1, 2, 3])
input_dp2 = dp.iter.IterableWrapper([])
output_dp = input_dp1.mux(input_dp2)
self.assertEqual(len(input_dp1), len(output_dp))
self.assertEqual(list(input_dp1), list(output_dp))
# __len__ Test: raises TypeError when __len__ is called and an input doesn't have __len__
input_dp1 = dp.iter.IterableWrapper(range(10))
input_dp_no_len = IDP_NoLen(range(10))
output_dp = input_dp1.mux(input_dp_no_len)
with self.assertRaises(TypeError):
len(output_dp)
def test_demux_datapipe(self):
input_dp = dp.iter.IterableWrapper(range(10))
with self.assertRaises(ValueError):
input_dp.demux(num_instances=0, classifier_fn=lambda x: 0)
# Test Case: split into 2 DataPipes and output them one at a time
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
output1, output2 = list(dp1), list(dp2)
self.assertEqual(list(range(0, 10, 2)), output1)
self.assertEqual(list(range(1, 10, 2)), output2)
# Test Case: split into 2 DataPipes and output them together
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
output = []
for n1, n2 in zip(dp1, dp2):
output.append((n1, n2))
self.assertEqual([(i, i + 1) for i in range(0, 10, 2)], output)
# Test Case: values of the same classification are lumped together, and buffer_size = 3 being too small
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: 0 if x >= 5 else 1, buffer_size=4)
it1 = iter(dp1)
with self.assertRaises(BufferError):
next(it1) # Buffer raises because first 5 elements all belong to the a different child
with self.assertRaises(BufferError):
list(dp2)
# Test Case: values of the same classification are lumped together, and buffer_size = 5 is just enough
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: 0 if x >= 5 else 1, buffer_size=5)
output1, output2 = list(dp1), list(dp2)
self.assertEqual(list(range(5, 10)), output1)
self.assertEqual(list(range(0, 5)), output2)
# Test Case: values of the same classification are lumped together, and unlimited buffer
with warnings.catch_warnings(record=True) as wa:
dp1, dp2 = input_dp.demux(
num_instances=2,
classifier_fn=lambda x: 0 if x >= 5 else 1,
buffer_size=-1
)
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Unlimited buffer size is set")
output1, output2 = list(dp1), list(dp2)
self.assertEqual(list(range(5, 10)), output1)
self.assertEqual(list(range(0, 5)), output2)
# Test Case: classifer returns a value outside of [0, num_instance - 1]
dp0 = input_dp.demux(num_instances=1, classifier_fn=lambda x: x % 2)
it = iter(dp0[0])
with self.assertRaises(ValueError):
next(it)
next(it)
# Test Case: DataPipe doesn't reset when it has not been read
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
i1 = iter(dp1)
output2 = []
i = 0
for i, n2 in enumerate(dp2):
output2.append(n2)
if i == 4:
i1 = iter(dp1)
self.assertEqual(list(range(1, 10, 2)), output2)
# Test Case: DataPipe reset when some of it has been read
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
output1, output2 = [], []
for n1, n2 in zip(dp1, dp2):
output1.append(n1)
output2.append(n2)
if n1 == 4:
break
with warnings.catch_warnings(record=True) as wa:
i1 = iter(dp1) # Reset all child DataPipes
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
for n1, n2 in zip(dp1, dp2):
output1.append(n1)
output2.append(n2)
self.assertEqual([0, 2, 4] + list(range(0, 10, 2)), output1)
self.assertEqual([1, 3, 5] + list(range(1, 10, 2)), output2)
# Test Case: DataPipe reset, even when not all child DataPipes are exhausted
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
output1 = list(dp1)
self.assertEqual(list(range(0, 10, 2)), output1)
with warnings.catch_warnings(record=True) as wa:
self.assertEqual(list(range(0, 10, 2)), list(dp1)) # Reset even when dp2 is not read
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
output2 = []
for i, n2 in enumerate(dp2):
output2.append(n2)
if i == 1:
self.assertEqual(list(range(1, 5, 2)), output2)
with warnings.catch_warnings(record=True) as wa:
self.assertEqual(list(range(0, 10, 2)), list(dp1)) # Can reset even when dp2 is partially read
self.assertEqual(len(wa), 1)
self.assertRegex(str(wa[0].message), r"Some child DataPipes are not exhausted")
break
output2 = list(dp2) # output2 has to read from beginning again
self.assertEqual(list(range(1, 10, 2)), output2)
# Test Case: drop_none = True
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2 if x % 5 != 0 else None,
drop_none=True)
self.assertEqual([2, 4, 6, 8], list(dp1))
self.assertEqual([1, 3, 7, 9], list(dp2))
# Test Case: drop_none = False
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2 if x % 5 != 0 else None,
drop_none=False)
it1 = iter(dp1)
with self.assertRaises(ValueError):
next(it1)
# Test Case: __len__ not implemented
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=lambda x: x % 2)
with self.assertRaises(TypeError):
len(dp1) # It is not implemented as we do not know length for each child in advance
with self.assertRaises(TypeError):
len(dp2)
# Pickle Test:
dp1, dp2 = input_dp.demux(num_instances=2, classifier_fn=odd_or_even)
traverse(dp1) # This should not raise any error
for _ in zip(dp1, dp2):
pass
traverse(dp2) # This should not raise any error either
def test_map_datapipe(self):
input_dp = dp.iter.IterableWrapper(range(10))
def fn(item, dtype=torch.float, *, sum=False):
data = torch.tensor(item, dtype=dtype)
return data if not sum else data.sum()
map_dp = input_dp.map(fn)
self.assertEqual(len(input_dp), len(map_dp))
for x, y in zip(map_dp, input_dp):
self.assertEqual(x, torch.tensor(y, dtype=torch.float))
map_dp = input_dp.map(partial(fn, dtype=torch.int, sum=True))
self.assertEqual(len(input_dp), len(map_dp))
for x, y in zip(map_dp, input_dp):
self.assertEqual(x, torch.tensor(y, dtype=torch.int).sum())
input_dp_nl = IDP_NoLen(range(10))
map_dp_nl = input_dp_nl.map(lambda x: x)
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(map_dp_nl)
for x, y in zip(map_dp_nl, input_dp_nl):
self.assertEqual(x, torch.tensor(y, dtype=torch.float))
@suppress_warnings # Suppress warning for lambda fn
def test_map_tuple_list_with_col_datapipe(self):
def fn_11(d):
return -d
def fn_1n(d):
return -d, d
def fn_n1(d0, d1):
return d0 + d1
def fn_nn(d0, d1):
return -d0, -d1, d0 + d1
def _helper(ref_fn, fn, input_col=None, output_col=None):
for constr in (list, tuple):
datapipe = dp.iter.IterableWrapper([constr((0, 1, 2)), constr((3, 4, 5)), constr((6, 7, 8))])
res_dp = datapipe.map(fn, input_col, output_col)
ref_dp = datapipe.map(ref_fn)
self.assertEqual(list(res_dp), list(ref_dp))
# Reset
self.assertEqual(list(res_dp), list(ref_dp))
# Replacing with one input column and default output column
_helper(lambda data: (data[0], -data[1], data[2]), fn_11, 1)
_helper(lambda data: (data[0], (-data[1], data[1]), data[2]), fn_1n, 1)
# The index of input column is out of range
with self.assertRaises(IndexError):
_helper(None, fn_1n, 3)
# Unmatched input columns with fn arguments
with self.assertRaises(TypeError):
_helper(None, fn_n1, 1)
# Replacing with multiple input columns and default output column (the left-most input column)
_helper(lambda data: (data[1], data[2] + data[0]), fn_n1, [2, 0])
_helper(lambda data: (data[0], (-data[2], -data[1], data[2] + data[1])), fn_nn, [2, 1])
# output_col can only be specified when input_col is not None
with self.assertRaises(ValueError):
_helper(None, fn_n1, None, 1)
# output_col can only be single-element list or tuple
with self.assertRaises(ValueError):
_helper(None, fn_n1, None, [0, 1])
# Single-element list as output_col
_helper(lambda data: (-data[1], data[1], data[2]), fn_11, 1, [0])
# Replacing with one input column and single specified output column
_helper(lambda data: (-data[1], data[1], data[2]), fn_11, 1, 0)
_helper(lambda data: (data[0], data[1], (-data[1], data[1])), fn_1n, 1, 2)
# The index of output column is out of range
with self.assertRaises(IndexError):
_helper(None, fn_1n, 1, 3)
_helper(lambda data: (data[0], data[0] + data[2], data[2]), fn_n1, [0, 2], 1)
_helper(lambda data: ((-data[1], -data[2], data[1] + data[2]), data[1], data[2]), fn_nn, [1, 2], 0)
# Appending the output at the end
_helper(lambda data: (*data, -data[1]), fn_11, 1, -1)
_helper(lambda data: (*data, (-data[1], data[1])), fn_1n, 1, -1)
_helper(lambda data: (*data, data[0] + data[2]), fn_n1, [0, 2], -1)
_helper(lambda data: (*data, (-data[1], -data[2], data[1] + data[2])), fn_nn, [1, 2], -1)
@suppress_warnings # Suppress warning for lambda fn
def test_map_dict_with_col_datapipe(self):
def fn_11(d):
return -d
def fn_1n(d):
return -d, d
def fn_n1(d0, d1):
return d0 + d1
def fn_nn(d0, d1):
return -d0, -d1, d0 + d1
# Prevent modification in-place to support resetting
def _dict_update(data, newdata, remove_idx=None):
_data = dict(data)
_data.update(newdata)
if remove_idx:
for idx in remove_idx:
del _data[idx]
return _data
def _helper(ref_fn, fn, input_col=None, output_col=None):
datapipe = dp.iter.IterableWrapper(
[{"x": 0, "y": 1, "z": 2},
{"x": 3, "y": 4, "z": 5},
{"x": 6, "y": 7, "z": 8}]
)
res_dp = datapipe.map(fn, input_col, output_col)
ref_dp = datapipe.map(ref_fn)
self.assertEqual(list(res_dp), list(ref_dp))
# Reset
self.assertEqual(list(res_dp), list(ref_dp))
# Replacing with one input column and default output column
_helper(lambda data: _dict_update(data, {"y": -data["y"]}), fn_11, "y")
_helper(lambda data: _dict_update(data, {"y": (-data["y"], data["y"])}), fn_1n, "y")
# The key of input column is not in dict
with self.assertRaises(KeyError):
_helper(None, fn_1n, "a")
# Unmatched input columns with fn arguments
with self.assertRaises(TypeError):
_helper(None, fn_n1, "y")
# Replacing with multiple input columns and default output column (the left-most input column)
_helper(lambda data: _dict_update(data, {"z": data["x"] + data["z"]}, ["x"]), fn_n1, ["z", "x"])
_helper(lambda data: _dict_update(data, {"z": (-data["z"], -data["y"], data["y"] + data["z"])}, ["y"]), fn_nn, ["z", "y"])
# output_col can only be specified when input_col is not None
with self.assertRaises(ValueError):
_helper(None, fn_n1, None, "x")
# output_col can only be single-element list or tuple
with self.assertRaises(ValueError):
_helper(None, fn_n1, None, ["x", "y"])
# Single-element list as output_col
_helper(lambda data: _dict_update(data, {"x": -data["y"]}), fn_11, "y", ["x"])
# Replacing with one input column and single specified output column
_helper(lambda data: _dict_update(data, {"x": -data["y"]}), fn_11, "y", "x")
_helper(lambda data: _dict_update(data, {"z": (-data["y"], data["y"])}), fn_1n, "y", "z")
_helper(lambda data: _dict_update(data, {"y": data["x"] + data["z"]}), fn_n1, ["x", "z"], "y")
_helper(lambda data: _dict_update(data, {"x": (-data["y"], -data["z"], data["y"] + data["z"])}), fn_nn, ["y", "z"], "x")
# Adding new key to dict for the output
_helper(lambda data: _dict_update(data, {"a": -data["y"]}), fn_11, "y", "a")
_helper(lambda data: _dict_update(data, {"a": (-data["y"], data["y"])}), fn_1n, "y", "a")
_helper(lambda data: _dict_update(data, {"a": data["x"] + data["z"]}), fn_n1, ["x", "z"], "a")
_helper(lambda data: _dict_update(data, {"a": (-data["y"], -data["z"], data["y"] + data["z"])}), fn_nn, ["y", "z"], "a")
def test_collate_datapipe(self):
arrs = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
input_dp = dp.iter.IterableWrapper(arrs)
def _collate_fn(batch, default_type=torch.float):
return torch.tensor(sum(batch), dtype=default_type)
# Functional Test: defaults to the default collate function when a custom one is not specified
collate_dp = input_dp.collate()
for x, y in zip(input_dp, collate_dp):
self.assertEqual(torch.tensor(x), y)
# Functional Test: custom collate function
collate_dp = input_dp.collate(collate_fn=_collate_fn)
for x, y in zip(input_dp, collate_dp):
self.assertEqual(torch.tensor(sum(x), dtype=torch.float), y)
# Functional Test: custom, partial collate function
collate_dp = input_dp.collate(partial(_collate_fn, default_type=torch.int))
for x, y in zip(input_dp, collate_dp):
self.assertEqual(torch.tensor(sum(x), dtype=torch.int), y)
# Reset Test: reset the DataPipe and results are still correct
n_elements_before_reset = 1
res_before_reset, res_after_reset = reset_after_n_next_calls(collate_dp, n_elements_before_reset)
self.assertEqual([torch.tensor(6, dtype=torch.int)], res_before_reset)
for x, y in zip(input_dp, res_after_reset):
self.assertEqual(torch.tensor(sum(x), dtype=torch.int), y)
# __len__ Test: __len__ is inherited
self.assertEqual(len(input_dp), len(collate_dp))
# __len__ Test: verify that it has no valid __len__ when the source doesn't have it
input_dp_nl = IDP_NoLen(arrs)
collate_dp_nl = input_dp_nl.collate()
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(collate_dp_nl)
for x, y in zip(input_dp_nl, collate_dp_nl):
self.assertEqual(torch.tensor(x), y)
def test_batch_datapipe(self):
arrs = list(range(10))
input_dp = dp.iter.IterableWrapper(arrs)
with self.assertRaises(AssertionError):
input_dp.batch(batch_size=0)
# Default not drop the last batch
bs = 3
batch_dp = input_dp.batch(batch_size=bs)
self.assertEqual(len(batch_dp), 4)
for i, batch in enumerate(batch_dp):
self.assertEqual(len(batch), 1 if i == 3 else bs)
self.assertEqual(batch, arrs[i * bs: i * bs + len(batch)])
# Drop the last batch
bs = 4
batch_dp = input_dp.batch(batch_size=bs, drop_last=True)
self.assertEqual(len(batch_dp), 2)
for i, batch in enumerate(batch_dp):
self.assertEqual(len(batch), bs)
self.assertEqual(batch, arrs[i * bs: i * bs + len(batch)])
input_dp_nl = IDP_NoLen(range(10))
batch_dp_nl = input_dp_nl.batch(batch_size=2)
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(batch_dp_nl)
def test_unbatch_datapipe(self):
target_length = 6
prebatch_dp = dp.iter.IterableWrapper(range(target_length))
input_dp = prebatch_dp.batch(3)
unbatch_dp = input_dp.unbatch()
self.assertEqual(len(list(unbatch_dp)), target_length)
for i, res in zip(prebatch_dp, unbatch_dp):
self.assertEqual(i, res)
input_dp = dp.iter.IterableWrapper([[0, 1, 2], [3, 4, 5]])
unbatch_dp = input_dp.unbatch()
self.assertEqual(len(list(unbatch_dp)), target_length)
for i, res in zip(prebatch_dp, unbatch_dp):
self.assertEqual(i, res)
input_dp = dp.iter.IterableWrapper([[[0, 1], [2, 3]], [[4, 5], [6, 7]]])
unbatch_dp = input_dp.unbatch()
expected_dp = [[0, 1], [2, 3], [4, 5], [6, 7]]
self.assertEqual(len(list(unbatch_dp)), 4)
for i, res in zip(expected_dp, unbatch_dp):
self.assertEqual(i, res)
unbatch_dp = input_dp.unbatch(unbatch_level=2)
expected_dp2 = [0, 1, 2, 3, 4, 5, 6, 7]
self.assertEqual(len(list(unbatch_dp)), 8)
for i, res in zip(expected_dp2, unbatch_dp):
self.assertEqual(i, res)
unbatch_dp = input_dp.unbatch(unbatch_level=-1)
self.assertEqual(len(list(unbatch_dp)), 8)
for i, res in zip(expected_dp2, unbatch_dp):
self.assertEqual(i, res)
input_dp = dp.iter.IterableWrapper([[0, 1, 2], [3, 4, 5]])
with self.assertRaises(ValueError):
unbatch_dp = input_dp.unbatch(unbatch_level=-2)
for i in unbatch_dp:
print(i)
with self.assertRaises(IndexError):
unbatch_dp = input_dp.unbatch(unbatch_level=5)
for i in unbatch_dp:
print(i)
def test_bucket_batch_datapipe(self):
input_dp = dp.iter.IterableWrapper(range(20))
with self.assertRaises(AssertionError):
dp.iter.BucketBatcher(input_dp, batch_size=0)
input_dp_nl = IDP_NoLen(range(20))
bucket_dp_nl = dp.iter.BucketBatcher(input_dp_nl, batch_size=7)
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(bucket_dp_nl)
def _helper(**kwargs):
data_len = 100
arrs = list(range(data_len))
random.shuffle(arrs)
input_dp = dp.iter.IterableWrapper(arrs)
bucket_dp = dp.iter.BucketBatcher(input_dp, **kwargs)
self.assertEqual(len(bucket_dp), data_len // 3 if kwargs['drop_last'] else data_len // 3 + 1)
def _verify_bucket_sorted(bucket):
# Sort batch in a bucket
bucket = sorted(bucket, key=lambda x: x[0])
flat = [item for batch in bucket for item in batch]
# Elements in the bucket should be sorted
self.assertEqual(flat, sorted(flat))
batch_num = kwargs['batch_num'] if 'batch_num' in kwargs else 100
bucket = []
for idx, d in enumerate(bucket_dp):
self.assertEqual(d, sorted(d))
bucket.append(d)
if idx % batch_num == batch_num - 1:
_verify_bucket_sorted(bucket)
bucket = []
_verify_bucket_sorted(bucket)
def _sort_fn(data):
return sorted(data)
# In-batch shuffle
_helper(batch_size=3, drop_last=False, batch_num=5, sort_key=_sort_fn)
_helper(batch_size=3, drop_last=False, batch_num=2, bucket_num=2, sort_key=_sort_fn)
_helper(batch_size=3, drop_last=True, batch_num=2, sort_key=_sort_fn)
_helper(batch_size=3, drop_last=True, batch_num=2, bucket_num=2, sort_key=_sort_fn)
def test_filter_datapipe(self):
input_ds = dp.iter.IterableWrapper(range(10))
def _filter_fn(data, val, clip=False):
if clip:
return data >= val
return True
filter_dp = input_ds.filter(partial(_filter_fn, val=5))
for data, exp in zip(filter_dp, range(10)):
self.assertEqual(data, exp)
filter_dp = input_ds.filter(partial(_filter_fn, val=5, clip=True))
for data, exp in zip(filter_dp, range(5, 10)):
self.assertEqual(data, exp)
with self.assertRaisesRegex(TypeError, r"has no len"):
len(filter_dp)
def _non_bool_fn(data):
return 1
filter_dp = input_ds.filter(filter_fn=_non_bool_fn)
with self.assertRaises(ValueError):
temp = list(filter_dp)
def test_sampler_datapipe(self):
input_dp = dp.iter.IterableWrapper(range(10))
# Default SequentialSampler
sampled_dp = dp.iter.Sampler(input_dp) # type: ignore[var-annotated]
self.assertEqual(len(sampled_dp), 10)
for i, x in enumerate(sampled_dp):
self.assertEqual(x, i)
# RandomSampler
random_sampled_dp = dp.iter.Sampler(input_dp, sampler=RandomSampler, sampler_kwargs={'replacement': True}) # type: ignore[var-annotated] # noqa: B950
# Requires `__len__` to build SamplerDataPipe
input_dp_nolen = IDP_NoLen(range(10))
with self.assertRaises(AssertionError):
sampled_dp = dp.iter.Sampler(input_dp_nolen)
def test_shuffle_datapipe(self):
exp = list(range(20))
input_ds = dp.iter.IterableWrapper(exp)
with self.assertRaises(AssertionError):
shuffle_dp = input_ds.shuffle(buffer_size=0)
for bs in (5, 20, 25):
shuffle_dp = input_ds.shuffle(buffer_size=bs)
self.assertEqual(len(shuffle_dp), len(input_ds))
random.seed(123)
res = list(shuffle_dp)
self.assertEqual(sorted(res), exp)
# Test Deterministic
for num_workers in (0, 1):
random.seed(123)
dl = DataLoader(shuffle_dp, num_workers=num_workers, worker_init_fn=_worker_init_fn, shuffle=True)
dl_res = list(dl)
self.assertEqual(res, dl_res)
shuffle_dp_nl = IDP_NoLen(range(20)).shuffle(buffer_size=5)
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(shuffle_dp_nl)
def test_zip_datapipe(self):
with self.assertRaises(TypeError):
dp.iter.Zipper(dp.iter.IterableWrapper(range(10)), list(range(10))) # type: ignore[arg-type]
zipped_dp = dp.iter.Zipper(dp.iter.IterableWrapper(range(10)), IDP_NoLen(range(5))) # type: ignore[var-annotated]
with self.assertRaisesRegex(TypeError, r"instance doesn't have valid length$"):
len(zipped_dp)
exp = list((i, i) for i in range(5))
self.assertEqual(list(zipped_dp), exp)
zipped_dp = dp.iter.Zipper(dp.iter.IterableWrapper(range(10)), dp.iter.IterableWrapper(range(5)))
self.assertEqual(len(zipped_dp), 5)
self.assertEqual(list(zipped_dp), exp)
# Reset
self.assertEqual(list(zipped_dp), exp)
class TestFunctionalMapDataPipe(TestCase):
# TODO(VitalyFedyunin): If dill installed this test fails
def _test_picklable(self):
arr = range(10)
picklable_datapipes: List[
Tuple[Type[MapDataPipe], MapDataPipe, Tuple, Dict[str, Any]]
] = [
(dp.map.Mapper, dp.map.SequenceWrapper(arr), (), {}),
(dp.map.Mapper, dp.map.SequenceWrapper(arr), (_fake_fn, (0,)), {}),
(dp.map.Mapper, dp.map.SequenceWrapper(arr), (partial(_fake_add, 1), (0,)), {}),
]
for dpipe, input_dp, dp_args, dp_kwargs in picklable_datapipes:
p = pickle.dumps(dpipe(input_dp, *dp_args, **dp_kwargs)) # type: ignore[call-arg]
unpicklable_datapipes: List[
Tuple[Type[MapDataPipe], MapDataPipe, Tuple, Dict[str, Any]]
] = [
(dp.map.Mapper, dp.map.SequenceWrapper(arr), (lambda x: x,), {}),
]
for dpipe, input_dp, dp_args, dp_kwargs in unpicklable_datapipes:
with warnings.catch_warnings(record=True) as wa:
datapipe = dpipe(input_dp, *dp_args, **dp_kwargs) # type: ignore[call-arg]
self.assertEqual(len(wa), 1)
self.assertRegex(
str(wa[0].message), r"^Lambda function is not supported for pickle"
)
with self.assertRaises(AttributeError):
p = pickle.dumps(datapipe)
def test_sequence_wrapper_datapipe(self):
seq = list(range(10))
input_dp = dp.map.SequenceWrapper(seq)
# Functional Test: all elements are equal in the same order
self.assertEqual(seq, list(input_dp))
# Functional Test: confirm deepcopy works by default
seq.append(11)
self.assertEqual(list(range(10)), list(input_dp)) # input_dp shouldn't have 11
# Functional Test: non-deepcopy version is working
seq2 = [1, 2, 3]
input_dp_non_deep = dp.map.SequenceWrapper(seq2, deepcopy=False)
seq2.append(4)
self.assertEqual(list(seq2), list(input_dp_non_deep)) # should have 4
# Reset Test: reset the DataPipe
seq = list(range(10))
n_elements_before_reset = 5
res_before_reset, res_after_reset = reset_after_n_next_calls(input_dp, n_elements_before_reset)
self.assertEqual(list(range(5)), res_before_reset)
self.assertEqual(seq, res_after_reset)
# __len__ Test: inherits length from sequence
self.assertEqual(len(seq), len(input_dp))
def test_concat_datapipe(self):
input_dp1 = dp.map.SequenceWrapper(range(10))
input_dp2 = dp.map.SequenceWrapper(range(5))
with self.assertRaisesRegex(ValueError, r"Expected at least one DataPipe"):
dp.map.Concater()
with self.assertRaisesRegex(TypeError, r"Expected all inputs to be `MapDataPipe`"):
dp.map.Concater(input_dp1, ()) # type: ignore[arg-type]
concat_dp = input_dp1.concat(input_dp2)
self.assertEqual(len(concat_dp), 15)
for index in range(15):
self.assertEqual(concat_dp[index], (list(range(10)) + list(range(5)))[index])
self.assertEqual(list(concat_dp), list(range(10)) + list(range(5)))
def test_zip_datapipe(self):
input_dp1 = dp.map.SequenceWrapper(range(10))
input_dp2 = dp.map.SequenceWrapper(range(5))
input_dp3 = dp.map.SequenceWrapper(range(15))
# Functional Test: requires at least one input DataPipe
with self.assertRaisesRegex(ValueError, r"Expected at least one DataPipe"):
dp.map.Zipper()
# Functional Test: all inputs must be MapDataPipes
with self.assertRaisesRegex(TypeError, r"Expected all inputs to be `MapDataPipe`"):
dp.map.Zipper(input_dp1, ()) # type: ignore[arg-type]
# Functional Test: Zip the elements up as a tuples
zip_dp = input_dp1.zip(input_dp2, input_dp3)
self.assertEqual([(i, i, i) for i in range(5)], [zip_dp[i] for i in range(5)])
# Functional Test: Raise IndexError when index equal or exceed the length of the shortest DataPipe
with self.assertRaisesRegex(IndexError, r"out of range"):
input_dp1.zip(input_dp2, input_dp3)[5]
# __len__ Test: returns the length of the shortest DataPipe
zip_dp = input_dp1.zip(input_dp2, input_dp3)
self.assertEqual(5, len(zip_dp))
def test_shuffler_datapipe(self):
input_dp1 = dp.map.SequenceWrapper(range(10))
input_dp2 = dp.map.SequenceWrapper({'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5})
# Functional Test: Assumes 0-index when indices is not given
shuffler_dp = input_dp1.shuffle()
self.assertEqual(set(range(10)), set(shuffler_dp))
# Functional Test: Custom indices are working
shuffler_dp = dp.map.Shuffler(input_dp2, indices=['a', 'b', 'c', 'd', 'e'])
self.assertEqual(set(range(1, 6)), set(shuffler_dp))
# # Reset Test:
shuffler_dp = input_dp1.shuffle()
n_elements_before_reset = 5
res_before_reset, res_after_reset = reset_after_n_next_calls(shuffler_dp, n_elements_before_reset)
self.assertEqual(5, len(res_before_reset))
for x in res_before_reset:
self.assertTrue(x in set(range(10)))
self.assertEqual(set(range(10)), set(res_after_reset))
# __len__ Test: returns the length of the input DataPipe
shuffler_dp = input_dp1.shuffle()
self.assertEqual(10, len(shuffler_dp))
def test_map_datapipe(self):
arr = range(10)
input_dp = dp.map.SequenceWrapper(arr)
def fn(item, dtype=torch.float, *, sum=False):
data = torch.tensor(item, dtype=dtype)
return data if not sum else data.sum()
map_dp = input_dp.map(fn)
self.assertEqual(len(input_dp), len(map_dp))
for index in arr:
self.assertEqual(
map_dp[index], torch.tensor(input_dp[index], dtype=torch.float)
)
map_dp = input_dp.map(partial(fn, dtype=torch.int, sum=True))
self.assertEqual(len(input_dp), len(map_dp))
for index in arr:
self.assertEqual(
map_dp[index], torch.tensor(input_dp[index], dtype=torch.int).sum()
)
def test_batch_datapipe(self):
arr = list(range(13))
input_dp = dp.map.SequenceWrapper(arr)
# Functional Test: batches top level by default
batch_dp = dp.map.Batcher(input_dp, batch_size=2)
self.assertEqual([[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10, 11], [12]], list(batch_dp))
# Functional Test: drop_last on command
batch_dp = dp.map.Batcher(input_dp, batch_size=2, drop_last=True)
self.assertEqual([[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10, 11]], list(batch_dp))
# Functional Test: nested batching
batch_dp_2 = batch_dp.batch(batch_size=3)
self.assertEqual([[[0, 1], [2, 3], [4, 5]], [[6, 7], [8, 9], [10, 11]]], list(batch_dp_2))
# Reset Test:
n_elements_before_reset = 3
res_before_reset, res_after_reset = reset_after_n_next_calls(batch_dp, n_elements_before_reset)
self.assertEqual([[0, 1], [2, 3], [4, 5]], res_before_reset)
self.assertEqual([[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10, 11]], res_after_reset)
# __len__ Test:
self.assertEqual(6, len(batch_dp))
self.assertEqual(2, len(batch_dp_2))
# Metaclass conflict for Python 3.6
# Multiple inheritance with NamedTuple is not supported for Python 3.9
_generic_namedtuple_allowed = sys.version_info >= (3, 7) and sys.version_info < (3, 9)
if _generic_namedtuple_allowed:
class InvalidData(Generic[T_co], NamedTuple):
name: str
data: T_co
class TestTyping(TestCase):
def test_subtype(self):
from torch.utils.data._typing import issubtype
basic_type = (int, str, bool, float, complex,
list, tuple, dict, set, T_co)
for t in basic_type:
self.assertTrue(issubtype(t, t))
self.assertTrue(issubtype(t, Any))
if t == T_co:
self.assertTrue(issubtype(Any, t))
else:
self.assertFalse(issubtype(Any, t))
for t1, t2 in itertools.product(basic_type, basic_type):
if t1 == t2 or t2 == T_co:
self.assertTrue(issubtype(t1, t2))
else:
self.assertFalse(issubtype(t1, t2))
T = TypeVar('T', int, str)
S = TypeVar('S', bool, Union[str, int], Tuple[int, T]) # type: ignore[valid-type]
types = ((int, Optional[int]),
(List, Union[int, list]),
(Tuple[int, str], S),
(Tuple[int, str], tuple),
(T, S),
(S, T_co),
(T, Union[S, Set]))
for sub, par in types:
self.assertTrue(issubtype(sub, par))
self.assertFalse(issubtype(par, sub))
subscriptable_types = {
List: 1,
Tuple: 2, # use 2 parameters
Set: 1,
Dict: 2,
}
for subscript_type, n in subscriptable_types.items():
for ts in itertools.combinations(types, n):
subs, pars = zip(*ts)
sub = subscript_type[subs] # type: ignore[index]
par = subscript_type[pars] # type: ignore[index]
self.assertTrue(issubtype(sub, par))
self.assertFalse(issubtype(par, sub))
# Non-recursive check
self.assertTrue(issubtype(par, sub, recursive=False))
def test_issubinstance(self):
from torch.utils.data._typing import issubinstance
basic_data = (1, '1', True, 1., complex(1., 0.))
basic_type = (int, str, bool, float, complex)
S = TypeVar('S', bool, Union[str, int])
for d in basic_data:
self.assertTrue(issubinstance(d, Any))
self.assertTrue(issubinstance(d, T_co))
if type(d) in (bool, int, str):
self.assertTrue(issubinstance(d, S))
else:
self.assertFalse(issubinstance(d, S))
for t in basic_type:
if type(d) == t:
self.assertTrue(issubinstance(d, t))
else:
self.assertFalse(issubinstance(d, t))
# list/set
dt = (([1, '1', 2], List), (set({1, '1', 2}), Set))
for d, t in dt:
self.assertTrue(issubinstance(d, t))
self.assertTrue(issubinstance(d, t[T_co])) # type: ignore[index]
self.assertFalse(issubinstance(d, t[int])) # type: ignore[index]
# dict
d = dict({'1': 1, '2': 2.})
self.assertTrue(issubinstance(d, Dict))
self.assertTrue(issubinstance(d, Dict[str, T_co]))
self.assertFalse(issubinstance(d, Dict[str, int]))
# tuple
d = (1, '1', 2)
self.assertTrue(issubinstance(d, Tuple))
self.assertTrue(issubinstance(d, Tuple[int, str, T_co]))
self.assertFalse(issubinstance(d, Tuple[int, Any]))
self.assertFalse(issubinstance(d, Tuple[int, int, int]))
# Static checking annotation
def test_compile_time(self):
with self.assertRaisesRegex(TypeError, r"Expected 'Iterator' as the return"):
class InvalidDP1(IterDataPipe[int]):
def __iter__(self) -> str: # type: ignore[misc, override]
yield 0
with self.assertRaisesRegex(TypeError, r"Expected return type of '__iter__'"):
class InvalidDP2(IterDataPipe[Tuple]):
def __iter__(self) -> Iterator[int]: # type: ignore[override]
yield 0
with self.assertRaisesRegex(TypeError, r"Expected return type of '__iter__'"):
class InvalidDP3(IterDataPipe[Tuple[int, str]]):
def __iter__(self) -> Iterator[tuple]: # type: ignore[override]
yield (0, )
if _generic_namedtuple_allowed:
with self.assertRaisesRegex(TypeError, r"is not supported by Python typing"):
class InvalidDP4(IterDataPipe["InvalidData[int]"]): # type: ignore[type-arg, misc]
pass
class DP1(IterDataPipe[Tuple[int, str]]):
def __init__(self, length):
self.length = length
def __iter__(self) -> Iterator[Tuple[int, str]]:
for d in range(self.length):
yield d, str(d)
self.assertTrue(issubclass(DP1, IterDataPipe))
dp1 = DP1(10)
self.assertTrue(DP1.type.issubtype(dp1.type) and dp1.type.issubtype(DP1.type))
dp1_ = DP1(5)
self.assertEqual(dp1.type, dp1_.type)
with self.assertRaisesRegex(TypeError, r"is not a generic class"):
class InvalidDP5(DP1[tuple]): # type: ignore[type-arg]
def __iter__(self) -> Iterator[tuple]: # type: ignore[override]
yield (0, )
class DP2(IterDataPipe[T_co]):
def __iter__(self) -> Iterator[T_co]:
for d in range(10):
yield d # type: ignore[misc]
self.assertTrue(issubclass(DP2, IterDataPipe))
dp2 = DP2() # type: ignore[var-annotated]
self.assertTrue(DP2.type.issubtype(dp2.type) and dp2.type.issubtype(DP2.type))
dp2_ = DP2() # type: ignore[var-annotated]
self.assertEqual(dp2.type, dp2_.type)
class DP3(IterDataPipe[Tuple[T_co, str]]):
r""" DataPipe without fixed type with __init__ function"""
def __init__(self, datasource):
self.datasource = datasource
def __iter__(self) -> Iterator[Tuple[T_co, str]]:
for d in self.datasource:
yield d, str(d)
self.assertTrue(issubclass(DP3, IterDataPipe))
dp3 = DP3(range(10)) # type: ignore[var-annotated]
self.assertTrue(DP3.type.issubtype(dp3.type) and dp3.type.issubtype(DP3.type))
dp3_ = DP3(5) # type: ignore[var-annotated]
self.assertEqual(dp3.type, dp3_.type)
class DP4(IterDataPipe[tuple]):
r""" DataPipe without __iter__ annotation"""
def __iter__(self):
raise NotImplementedError
self.assertTrue(issubclass(DP4, IterDataPipe))
dp4 = DP4()
self.assertTrue(dp4.type.param == tuple)
class DP5(IterDataPipe):
r""" DataPipe without type annotation"""
def __iter__(self) -> Iterator[str]:
raise NotImplementedError
self.assertTrue(issubclass(DP5, IterDataPipe))
dp5 = DP5()
from torch.utils.data._typing import issubtype
self.assertTrue(issubtype(dp5.type.param, Any) and issubtype(Any, dp5.type.param))
class DP6(IterDataPipe[int]):
r""" DataPipe with plain Iterator"""
def __iter__(self) -> Iterator:
raise NotImplementedError
self.assertTrue(issubclass(DP6, IterDataPipe))
dp6 = DP6()
self.assertTrue(dp6.type.param == int)
class DP7(IterDataPipe[Awaitable[T_co]]):
r""" DataPipe with abstract base class"""
self.assertTrue(issubclass(DP7, IterDataPipe))
self.assertTrue(DP7.type.param == Awaitable[T_co])
class DP8(DP7[str]):
r""" DataPipe subclass from a DataPipe with abc type"""
self.assertTrue(issubclass(DP8, IterDataPipe))
self.assertTrue(DP8.type.param == Awaitable[str])
def test_construct_time(self):
class DP0(IterDataPipe[Tuple]):
@argument_validation
def __init__(self, dp: IterDataPipe):
self.dp = dp
def __iter__(self) -> Iterator[Tuple]:
for d in self.dp:
yield d, str(d)
class DP1(IterDataPipe[int]):
@argument_validation
def __init__(self, dp: IterDataPipe[Tuple[int, str]]):
self.dp = dp
def __iter__(self) -> Iterator[int]:
for a, b in self.dp:
yield a
# Non-DataPipe input with DataPipe hint
datasource = [(1, '1'), (2, '2'), (3, '3')]
with self.assertRaisesRegex(TypeError, r"Expected argument 'dp' as a IterDataPipe"):
dp0 = DP0(datasource)
dp0 = DP0(dp.iter.IterableWrapper(range(10)))
with self.assertRaisesRegex(TypeError, r"Expected type of argument 'dp' as a subtype"):
dp1 = DP1(dp0)
def test_runtime(self):
class DP(IterDataPipe[Tuple[int, T_co]]):
def __init__(self, datasource):
self.ds = datasource
@runtime_validation
def __iter__(self) -> Iterator[Tuple[int, T_co]]:
for d in self.ds:
yield d
dss = ([(1, '1'), (2, '2')],
[(1, 1), (2, '2')])
for ds in dss:
dp0 = DP(ds) # type: ignore[var-annotated]
self.assertEqual(list(dp0), ds)
# Reset __iter__
self.assertEqual(list(dp0), ds)
dss = ([(1, 1), ('2', 2)], # type: ignore[assignment, list-item]
[[1, '1'], [2, '2']], # type: ignore[list-item]
[1, '1', 2, '2'])
for ds in dss:
dp0 = DP(ds)
with self.assertRaisesRegex(RuntimeError, r"Expected an instance as subtype"):
list(dp0)
with runtime_validation_disabled():
self.assertEqual(list(dp0), ds)
with runtime_validation_disabled():
self.assertEqual(list(dp0), ds)
with self.assertRaisesRegex(RuntimeError, r"Expected an instance as subtype"):
list(dp0)
def test_reinforce(self):
T = TypeVar('T', int, str)
class DP(IterDataPipe[T]):
def __init__(self, ds):
self.ds = ds
@runtime_validation
def __iter__(self) -> Iterator[T]:
for d in self.ds:
yield d
ds = list(range(10))
# Valid type reinforcement
dp0 = DP(ds).reinforce_type(int)
self.assertTrue(dp0.type, int)
self.assertEqual(list(dp0), ds)
# Invalid type
with self.assertRaisesRegex(TypeError, r"'expected_type' must be a type"):
dp1 = DP(ds).reinforce_type(1)
# Type is not subtype
with self.assertRaisesRegex(TypeError, r"Expected 'expected_type' as subtype of"):
dp2 = DP(ds).reinforce_type(float)
# Invalid data at runtime
dp3 = DP(ds).reinforce_type(str)
with self.assertRaisesRegex(RuntimeError, r"Expected an instance as subtype"):
list(dp3)
# Context Manager to disable the runtime validation
with runtime_validation_disabled():
self.assertEqual(list(d for d in dp3), ds)
class NumbersDataset(IterDataPipe):
def __init__(self, size=10):
self.size = size
def __iter__(self):
for i in range(self.size):
yield i
class TestGraph(TestCase):
@skipIfNoDill
def test_simple_traverse(self):
numbers_dp = NumbersDataset(size=50)
mapped_dp = numbers_dp.map(lambda x: x * 10)
graph = torch.utils.data.graph.traverse(mapped_dp)
expected: Dict[Any, Any] = {mapped_dp: {numbers_dp: {}}}
self.assertEqual(expected, graph)
@skipIfNoDill
def test_traverse_forked(self):
numbers_dp = NumbersDataset(size=50)
dp0, dp1, dp2 = numbers_dp.fork(num_instances=3)
dp0_upd = dp0.map(lambda x: x * 10)
dp1_upd = dp1.filter(lambda x: x % 3 == 1)
combined_dp = dp0_upd.mux(dp1_upd, dp2)
graph = torch.utils.data.graph.traverse(combined_dp)
expected = {combined_dp: {dp0_upd: {dp0: {dp0.main_datapipe: {dp0.main_datapipe.main_datapipe: {}}}},
dp1_upd: {dp1: {dp1.main_datapipe: {dp1.main_datapipe.main_datapipe: {}}}},
dp2: {dp2.main_datapipe: {dp2.main_datapipe.main_datapipe: {}}}}}
self.assertEqual(expected, graph)
class TestSharding(TestCase):
def _get_pipeline(self):
numbers_dp = NumbersDataset(size=10)
dp0, dp1 = numbers_dp.fork(num_instances=2)
dp0_upd = dp0.map(lambda x: x * 10)
dp1_upd = dp1.filter(lambda x: x % 3 == 1)
combined_dp = dp0_upd.mux(dp1_upd)
return combined_dp
@skipIfNoDill
def test_simple_sharding(self):
sharded_dp = self._get_pipeline().sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp, 3, 1)
items = list(sharded_dp)
self.assertEqual([1, 20, 40, 70], items)
all_items = list(self._get_pipeline())
items = []
for i in range(3):
sharded_dp = self._get_pipeline().sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp, 3, i)
items += list(sharded_dp)
self.assertEqual(sorted(all_items), sorted(items))
def test_sharding_length(self):
numbers_dp = dp.iter.IterableWrapper(range(13))
sharded_dp0 = numbers_dp.sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp0, 3, 0)
sharded_dp1 = numbers_dp.sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp1, 3, 1)
sharded_dp2 = numbers_dp.sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp2, 3, 2)
self.assertEqual(13, len(numbers_dp))
self.assertEqual(5, len(sharded_dp0))
self.assertEqual(4, len(sharded_dp1))
self.assertEqual(4, len(sharded_dp2))
numbers_dp = dp.iter.IterableWrapper(range(1))
sharded_dp0 = numbers_dp.sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp0, 2, 0)
sharded_dp1 = numbers_dp.sharding_filter()
torch.utils.data.graph_settings.apply_sharding(sharded_dp1, 2, 1)
self.assertEqual(1, len(sharded_dp0))
self.assertEqual(0, len(sharded_dp1))
@skipIfNoDill
def test_old_dataloader(self):
dp0 = self._get_pipeline()
expected = list(dp0)
dp0 = self._get_pipeline().sharding_filter()
dl = DataLoader(dp0, batch_size=1, shuffle=False, num_workers=2,
worker_init_fn=torch.utils.data.backward_compatibility.worker_init_fn)
items = []
for i in dl:
items.append(i)
self.assertEqual(sorted(expected), sorted(items))
if __name__ == '__main__':
run_tests()
|
"""Backer Tests."""
|
from django.contrib import admin
from django.db import models
from django.forms import Textarea, Select
from django.utils.html import format_html
from research.models import Paper
class PaperAdmin(admin.ModelAdmin):
list_per_page = 10
list_display = (
'title',
'created_date',
'is_active',
'pdf_link'
)
list_filter = ('is_active',)
date_hierarchy = 'created_date'
ordering = ('-created_date',)
search_fields = ('title', 'authors__full_name',)
actions_on_top = True
actions_on_bottom = False
fieldsets = [
('Main',
{
'fields':
[
'status',
'version',
'title',
'authors',
'abstract',
'keywords',
]
}
),
('Optional',
{
'fields':
[
'pdf',
'project_link',
'binder_link',
'is_active'
],
'classes': ['collapse']
}
),
('Journal Info',
{
'fields': [
'papertype',
'institution',
'journal',
'pages',
'volume',
'number',
'link',
'note'
],
'classes': ['collapse']
}
),
('Meta',
{
'fields': [
'created_date',
'modified_date',
'slug',
],
'classes': ['collapse']
}
),
]
readonly_fields = ('created_date', 'modified_date', 'mime', 'slug')
formfield_overrides = {
models.TextField: {'widget': Textarea(attrs={'rows': 15, 'cols': 100})},
models.BooleanField: {'widget': Select(choices=((False, 'False'), (True, 'True')))},
}
@admin.action(description='PDF-link')
def pdf_link(self, item):
if pdf_url := item.get_absolute_url():
return format_html('<a href="{url}" target="_blank">PDF</a>', url=pdf_url)
admin.site.register(Paper, PaperAdmin)
|
# Copyright (c) OpenMMLab. All rights reserved.
import importlib
from mmdeploy.utils import Codebase
from .base import BaseTask, MMCodebase, get_codebase_class
extra_dependent_library = {
Codebase.MMOCR: ['mmdet'],
Codebase.MMROTATE: ['mmdet']
}
def import_codebase(codebase: Codebase):
"""Import a codebase package in `mmdeploy.codebase`
The function will check if all dependent libraries are installed.
For example, to import `mmdeploy.codebase.mmdet`, `mmdet` must be
installed. To import `mmdeploy.codebase.mmocr`, `mmdet` and `mmocr`
must be installed.
Args:
codebase (Codebase): The codebase to import.
"""
codebase_name = codebase.value
dependent_library = [codebase_name] + \
extra_dependent_library.get(codebase, [])
for lib in dependent_library:
if not importlib.util.find_spec(lib):
raise ImportError(
f'{lib} has not been installed. '
f'Import mmdeploy.codebase.{codebase_name} failed.')
importlib.import_module(f'mmdeploy.codebase.{lib}')
__all__ = ['MMCodebase', 'BaseTask', 'get_codebase_class']
|
# DO NOT EDIT! This file is automatically generated
import datetime
import typing
from commercetools.types._abstract import _BaseType
from commercetools.types._common import BaseResource
if typing.TYPE_CHECKING:
from ._cart import (
DiscountCodeState,
DiscountedLineItemPriceForQuantity,
LineItem,
ProductPublishScope,
ShippingInfo,
ShippingRateInput,
TaxedItemPrice,
)
from ._category import Category, CategoryReference
from ._channel import ChannelReference
from ._common import (
Address,
CreatedBy,
DiscountedPrice,
Image,
LastModifiedBy,
LocalizedString,
Money,
Reference,
)
from ._customer import Customer, CustomerReference
from ._customer_group import CustomerGroupReference
from ._discount_code import DiscountCodeReference
from ._inventory import InventoryEntry
from ._order import (
Delivery,
DeliveryItem,
Order,
OrderState,
Parcel,
ParcelMeasurements,
PaymentState,
ReturnInfo,
ReturnShipmentState,
ShipmentState,
TrackingData,
)
from ._order_edit import OrderEditApplied, OrderEditReference
from ._payment import Payment, Transaction, TransactionState
from ._product import ProductProjection, ProductVariant
from ._review import Review
from ._state import StateReference
from ._store import StoreKeyReference
from ._type import CustomFields
__all__ = [
"CategoryCreatedMessage",
"CategoryCreatedMessagePayload",
"CategorySlugChangedMessage",
"CategorySlugChangedMessagePayload",
"CustomLineItemStateTransitionMessage",
"CustomLineItemStateTransitionMessagePayload",
"CustomerAddressAddedMessage",
"CustomerAddressAddedMessagePayload",
"CustomerAddressChangedMessage",
"CustomerAddressChangedMessagePayload",
"CustomerAddressRemovedMessage",
"CustomerAddressRemovedMessagePayload",
"CustomerCompanyNameSetMessage",
"CustomerCompanyNameSetMessagePayload",
"CustomerCreatedMessage",
"CustomerCreatedMessagePayload",
"CustomerDateOfBirthSetMessage",
"CustomerDateOfBirthSetMessagePayload",
"CustomerEmailChangedMessage",
"CustomerEmailChangedMessagePayload",
"CustomerEmailVerifiedMessage",
"CustomerEmailVerifiedMessagePayload",
"CustomerGroupSetMessage",
"CustomerGroupSetMessagePayload",
"DeliveryAddedMessage",
"DeliveryAddedMessagePayload",
"DeliveryAddressSetMessage",
"DeliveryAddressSetMessagePayload",
"DeliveryItemsUpdatedMessage",
"DeliveryItemsUpdatedMessagePayload",
"DeliveryRemovedMessage",
"DeliveryRemovedMessagePayload",
"InventoryEntryCreatedMessage",
"InventoryEntryCreatedMessagePayload",
"InventoryEntryDeletedMessage",
"InventoryEntryDeletedMessagePayload",
"InventoryEntryQuantitySetMessage",
"InventoryEntryQuantitySetMessagePayload",
"LineItemStateTransitionMessage",
"LineItemStateTransitionMessagePayload",
"Message",
"MessageConfiguration",
"MessageConfigurationDraft",
"MessagePagedQueryResponse",
"MessagePayload",
"OrderBillingAddressSetMessage",
"OrderBillingAddressSetMessagePayload",
"OrderCreatedMessage",
"OrderCreatedMessagePayload",
"OrderCustomLineItemDiscountSetMessage",
"OrderCustomLineItemDiscountSetMessagePayload",
"OrderCustomerEmailSetMessage",
"OrderCustomerEmailSetMessagePayload",
"OrderCustomerGroupSetMessage",
"OrderCustomerGroupSetMessagePayload",
"OrderCustomerSetMessage",
"OrderCustomerSetMessagePayload",
"OrderDeletedMessage",
"OrderDeletedMessagePayload",
"OrderDiscountCodeAddedMessage",
"OrderDiscountCodeAddedMessagePayload",
"OrderDiscountCodeRemovedMessage",
"OrderDiscountCodeRemovedMessagePayload",
"OrderDiscountCodeStateSetMessage",
"OrderDiscountCodeStateSetMessagePayload",
"OrderEditAppliedMessage",
"OrderEditAppliedMessagePayload",
"OrderImportedMessage",
"OrderImportedMessagePayload",
"OrderLineItemAddedMessage",
"OrderLineItemAddedMessagePayload",
"OrderLineItemDiscountSetMessage",
"OrderLineItemDiscountSetMessagePayload",
"OrderPaymentStateChangedMessage",
"OrderPaymentStateChangedMessagePayload",
"OrderReturnInfoAddedMessage",
"OrderReturnInfoAddedMessagePayload",
"OrderReturnShipmentStateChangedMessage",
"OrderReturnShipmentStateChangedMessagePayload",
"OrderShipmentStateChangedMessage",
"OrderShipmentStateChangedMessagePayload",
"OrderShippingAddressSetMessage",
"OrderShippingAddressSetMessagePayload",
"OrderShippingInfoSetMessage",
"OrderShippingInfoSetMessagePayload",
"OrderShippingRateInputSetMessage",
"OrderShippingRateInputSetMessagePayload",
"OrderStateChangedMessage",
"OrderStateChangedMessagePayload",
"OrderStateTransitionMessage",
"OrderStateTransitionMessagePayload",
"OrderStoreSetMessage",
"OrderStoreSetMessagePayload",
"ParcelAddedToDeliveryMessage",
"ParcelAddedToDeliveryMessagePayload",
"ParcelItemsUpdatedMessage",
"ParcelItemsUpdatedMessagePayload",
"ParcelMeasurementsUpdatedMessage",
"ParcelMeasurementsUpdatedMessagePayload",
"ParcelRemovedFromDeliveryMessage",
"ParcelRemovedFromDeliveryMessagePayload",
"ParcelTrackingDataUpdatedMessage",
"ParcelTrackingDataUpdatedMessagePayload",
"PaymentCreatedMessage",
"PaymentCreatedMessagePayload",
"PaymentInteractionAddedMessage",
"PaymentInteractionAddedMessagePayload",
"PaymentStatusInterfaceCodeSetMessage",
"PaymentStatusInterfaceCodeSetMessagePayload",
"PaymentStatusStateTransitionMessage",
"PaymentStatusStateTransitionMessagePayload",
"PaymentTransactionAddedMessage",
"PaymentTransactionAddedMessagePayload",
"PaymentTransactionStateChangedMessage",
"PaymentTransactionStateChangedMessagePayload",
"ProductAddedToCategoryMessage",
"ProductAddedToCategoryMessagePayload",
"ProductCreatedMessage",
"ProductCreatedMessagePayload",
"ProductDeletedMessage",
"ProductDeletedMessagePayload",
"ProductImageAddedMessage",
"ProductImageAddedMessagePayload",
"ProductPriceDiscountsSetMessage",
"ProductPriceDiscountsSetMessagePayload",
"ProductPriceDiscountsSetUpdatedPrice",
"ProductPriceExternalDiscountSetMessage",
"ProductPriceExternalDiscountSetMessagePayload",
"ProductPublishedMessage",
"ProductPublishedMessagePayload",
"ProductRemovedFromCategoryMessage",
"ProductRemovedFromCategoryMessagePayload",
"ProductRevertedStagedChangesMessage",
"ProductRevertedStagedChangesMessagePayload",
"ProductSlugChangedMessage",
"ProductSlugChangedMessagePayload",
"ProductStateTransitionMessage",
"ProductStateTransitionMessagePayload",
"ProductUnpublishedMessage",
"ProductUnpublishedMessagePayload",
"ProductVariantDeletedMessage",
"ProductVariantDeletedMessagePayload",
"ReviewCreatedMessage",
"ReviewCreatedMessagePayload",
"ReviewRatingSetMessage",
"ReviewRatingSetMessagePayload",
"ReviewStateTransitionMessage",
"ReviewStateTransitionMessagePayload",
"UserProvidedIdentifiers",
]
class Message(BaseResource):
#: :class:`str`
id: str
#: :class:`int`
version: int
#: :class:`datetime.datetime` `(Named` ``createdAt`` `in Commercetools)`
created_at: datetime.datetime
#: :class:`datetime.datetime` `(Named` ``lastModifiedAt`` `in Commercetools)`
last_modified_at: datetime.datetime
#: Optional :class:`commercetools.types.LastModifiedBy` `(Named` ``lastModifiedBy`` `in Commercetools)`
last_modified_by: typing.Optional["LastModifiedBy"]
#: Optional :class:`commercetools.types.CreatedBy` `(Named` ``createdBy`` `in Commercetools)`
created_by: typing.Optional["CreatedBy"]
#: :class:`int` `(Named` ``sequenceNumber`` `in Commercetools)`
sequence_number: int
#: :class:`commercetools.types.Reference`
resource: "Reference"
#: :class:`int` `(Named` ``resourceVersion`` `in Commercetools)`
resource_version: int
#: :class:`str`
type: str
#: Optional :class:`commercetools.types.UserProvidedIdentifiers` `(Named` ``resourceUserProvidedIdentifiers`` `in Commercetools)`
resource_user_provided_identifiers: typing.Optional["UserProvidedIdentifiers"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
) -> None:
self.id = id
self.version = version
self.created_at = created_at
self.last_modified_at = last_modified_at
self.last_modified_by = last_modified_by
self.created_by = created_by
self.sequence_number = sequence_number
self.resource = resource
self.resource_version = resource_version
self.type = type
self.resource_user_provided_identifiers = resource_user_provided_identifiers
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
)
def __repr__(self) -> str:
return (
"Message(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
)
)
class MessageConfiguration(_BaseType):
#: :class:`bool`
enabled: bool
#: Optional :class:`int` `(Named` ``deleteDaysAfterCreation`` `in Commercetools)`
delete_days_after_creation: typing.Optional[int]
def __init__(
self,
*,
enabled: bool = None,
delete_days_after_creation: typing.Optional[int] = None
) -> None:
self.enabled = enabled
self.delete_days_after_creation = delete_days_after_creation
super().__init__()
def __repr__(self) -> str:
return "MessageConfiguration(enabled=%r, delete_days_after_creation=%r)" % (
self.enabled,
self.delete_days_after_creation,
)
class MessageConfigurationDraft(_BaseType):
#: :class:`bool`
enabled: bool
#: :class:`int` `(Named` ``deleteDaysAfterCreation`` `in Commercetools)`
delete_days_after_creation: int
def __init__(
self, *, enabled: bool = None, delete_days_after_creation: int = None
) -> None:
self.enabled = enabled
self.delete_days_after_creation = delete_days_after_creation
super().__init__()
def __repr__(self) -> str:
return (
"MessageConfigurationDraft(enabled=%r, delete_days_after_creation=%r)"
% (self.enabled, self.delete_days_after_creation)
)
class MessagePagedQueryResponse(_BaseType):
#: :class:`int`
limit: int
#: :class:`int`
count: int
#: Optional :class:`int`
total: typing.Optional[int]
#: :class:`int`
offset: int
#: List of :class:`commercetools.types.Message`
results: typing.Sequence["Message"]
def __init__(
self,
*,
limit: int = None,
count: int = None,
total: typing.Optional[int] = None,
offset: int = None,
results: typing.Sequence["Message"] = None
) -> None:
self.limit = limit
self.count = count
self.total = total
self.offset = offset
self.results = results
super().__init__()
def __repr__(self) -> str:
return (
"MessagePagedQueryResponse(limit=%r, count=%r, total=%r, offset=%r, results=%r)"
% (self.limit, self.count, self.total, self.offset, self.results)
)
class MessagePayload(_BaseType):
#: :class:`str`
type: str
def __init__(self, *, type: str = None) -> None:
self.type = type
super().__init__()
def __repr__(self) -> str:
return "MessagePayload(type=%r)" % (self.type,)
class ProductPriceDiscountsSetUpdatedPrice(_BaseType):
#: :class:`int` `(Named` ``variantId`` `in Commercetools)`
variant_id: int
#: Optional :class:`str` `(Named` ``variantKey`` `in Commercetools)`
variant_key: typing.Optional[str]
#: Optional :class:`str`
sku: typing.Optional[str]
#: :class:`str` `(Named` ``priceId`` `in Commercetools)`
price_id: str
#: Optional :class:`commercetools.types.DiscountedPrice`
discounted: typing.Optional["DiscountedPrice"]
#: :class:`bool`
staged: bool
def __init__(
self,
*,
variant_id: int = None,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str = None,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool = None
) -> None:
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__()
def __repr__(self) -> str:
return (
"ProductPriceDiscountsSetUpdatedPrice(variant_id=%r, variant_key=%r, sku=%r, price_id=%r, discounted=%r, staged=%r)"
% (
self.variant_id,
self.variant_key,
self.sku,
self.price_id,
self.discounted,
self.staged,
)
)
class UserProvidedIdentifiers(_BaseType):
#: Optional :class:`str`
key: typing.Optional[str]
#: Optional :class:`str` `(Named` ``externalId`` `in Commercetools)`
external_id: typing.Optional[str]
#: Optional :class:`str` `(Named` ``orderNumber`` `in Commercetools)`
order_number: typing.Optional[str]
#: Optional :class:`str` `(Named` ``customerNumber`` `in Commercetools)`
customer_number: typing.Optional[str]
#: Optional :class:`str`
sku: typing.Optional[str]
#: Optional :class:`commercetools.types.LocalizedString`
slug: typing.Optional["LocalizedString"]
def __init__(
self,
*,
key: typing.Optional[str] = None,
external_id: typing.Optional[str] = None,
order_number: typing.Optional[str] = None,
customer_number: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
slug: typing.Optional["LocalizedString"] = None
) -> None:
self.key = key
self.external_id = external_id
self.order_number = order_number
self.customer_number = customer_number
self.sku = sku
self.slug = slug
super().__init__()
def __repr__(self) -> str:
return (
"UserProvidedIdentifiers(key=%r, external_id=%r, order_number=%r, customer_number=%r, sku=%r, slug=%r)"
% (
self.key,
self.external_id,
self.order_number,
self.customer_number,
self.sku,
self.slug,
)
)
class CategoryCreatedMessage(Message):
#: :class:`commercetools.types.Category`
category: "Category"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "Category" = None
) -> None:
self.category = category
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CategoryCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CategoryCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, category=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.category,
)
)
class CategoryCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Category`
category: "Category"
def __init__(self, *, type: str = None, category: "Category" = None) -> None:
self.category = category
super().__init__(type="CategoryCreated")
def __repr__(self) -> str:
return "CategoryCreatedMessagePayload(type=%r, category=%r)" % (
self.type,
self.category,
)
class CategorySlugChangedMessage(Message):
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
slug: "LocalizedString" = None
) -> None:
self.slug = slug
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CategorySlugChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CategorySlugChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, slug=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.slug,
)
)
class CategorySlugChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(self, *, type: str = None, slug: "LocalizedString" = None) -> None:
self.slug = slug
super().__init__(type="CategorySlugChanged")
def __repr__(self) -> str:
return "CategorySlugChangedMessagePayload(type=%r, slug=%r)" % (
self.type,
self.slug,
)
class CustomLineItemStateTransitionMessage(Message):
#: :class:`str` `(Named` ``customLineItemId`` `in Commercetools)`
custom_line_item_id: str
#: :class:`datetime.datetime` `(Named` ``transitionDate`` `in Commercetools)`
transition_date: datetime.datetime
#: :class:`int`
quantity: int
#: :class:`commercetools.types.StateReference` `(Named` ``fromState`` `in Commercetools)`
from_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``toState`` `in Commercetools)`
to_state: "StateReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
custom_line_item_id: str = None,
transition_date: datetime.datetime = None,
quantity: int = None,
from_state: "StateReference" = None,
to_state: "StateReference" = None
) -> None:
self.custom_line_item_id = custom_line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomLineItemStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomLineItemStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, custom_line_item_id=%r, transition_date=%r, quantity=%r, from_state=%r, to_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.custom_line_item_id,
self.transition_date,
self.quantity,
self.from_state,
self.to_state,
)
)
class CustomLineItemStateTransitionMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``customLineItemId`` `in Commercetools)`
custom_line_item_id: str
#: :class:`datetime.datetime` `(Named` ``transitionDate`` `in Commercetools)`
transition_date: datetime.datetime
#: :class:`int`
quantity: int
#: :class:`commercetools.types.StateReference` `(Named` ``fromState`` `in Commercetools)`
from_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``toState`` `in Commercetools)`
to_state: "StateReference"
def __init__(
self,
*,
type: str = None,
custom_line_item_id: str = None,
transition_date: datetime.datetime = None,
quantity: int = None,
from_state: "StateReference" = None,
to_state: "StateReference" = None
) -> None:
self.custom_line_item_id = custom_line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(type="CustomLineItemStateTransition")
def __repr__(self) -> str:
return (
"CustomLineItemStateTransitionMessagePayload(type=%r, custom_line_item_id=%r, transition_date=%r, quantity=%r, from_state=%r, to_state=%r)"
% (
self.type,
self.custom_line_item_id,
self.transition_date,
self.quantity,
self.from_state,
self.to_state,
)
)
class CustomerAddressAddedMessage(Message):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address" = None
) -> None:
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerAddressAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerAddressAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.address,
)
)
class CustomerAddressAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(self, *, type: str = None, address: "Address" = None) -> None:
self.address = address
super().__init__(type="CustomerAddressAdded")
def __repr__(self) -> str:
return "CustomerAddressAddedMessagePayload(type=%r, address=%r)" % (
self.type,
self.address,
)
class CustomerAddressChangedMessage(Message):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address" = None
) -> None:
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerAddressChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerAddressChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.address,
)
)
class CustomerAddressChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(self, *, type: str = None, address: "Address" = None) -> None:
self.address = address
super().__init__(type="CustomerAddressChanged")
def __repr__(self) -> str:
return "CustomerAddressChangedMessagePayload(type=%r, address=%r)" % (
self.type,
self.address,
)
class CustomerAddressRemovedMessage(Message):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: "Address" = None
) -> None:
self.address = address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerAddressRemoved",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerAddressRemovedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.address,
)
)
class CustomerAddressRemovedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Address`
address: "Address"
def __init__(self, *, type: str = None, address: "Address" = None) -> None:
self.address = address
super().__init__(type="CustomerAddressRemoved")
def __repr__(self) -> str:
return "CustomerAddressRemovedMessagePayload(type=%r, address=%r)" % (
self.type,
self.address,
)
class CustomerCompanyNameSetMessage(Message):
#: :class:`str` `(Named` ``companyName`` `in Commercetools)`
company_name: str
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
company_name: str = None
) -> None:
self.company_name = company_name
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerCompanyNameSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerCompanyNameSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, company_name=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.company_name,
)
)
class CustomerCompanyNameSetMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``companyName`` `in Commercetools)`
company_name: str
def __init__(self, *, type: str = None, company_name: str = None) -> None:
self.company_name = company_name
super().__init__(type="CustomerCompanyNameSet")
def __repr__(self) -> str:
return "CustomerCompanyNameSetMessagePayload(type=%r, company_name=%r)" % (
self.type,
self.company_name,
)
class CustomerCreatedMessage(Message):
#: :class:`commercetools.types.Customer`
customer: "Customer"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer: "Customer" = None
) -> None:
self.customer = customer
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, customer=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.customer,
)
)
class CustomerCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Customer`
customer: "Customer"
def __init__(self, *, type: str = None, customer: "Customer" = None) -> None:
self.customer = customer
super().__init__(type="CustomerCreated")
def __repr__(self) -> str:
return "CustomerCreatedMessagePayload(type=%r, customer=%r)" % (
self.type,
self.customer,
)
class CustomerDateOfBirthSetMessage(Message):
#: :class:`datetime.date` `(Named` ``dateOfBirth`` `in Commercetools)`
date_of_birth: datetime.date
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
date_of_birth: datetime.date = None
) -> None:
self.date_of_birth = date_of_birth
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerDateOfBirthSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerDateOfBirthSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, date_of_birth=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.date_of_birth,
)
)
class CustomerDateOfBirthSetMessagePayload(MessagePayload):
#: :class:`datetime.date` `(Named` ``dateOfBirth`` `in Commercetools)`
date_of_birth: datetime.date
def __init__(
self, *, type: str = None, date_of_birth: datetime.date = None
) -> None:
self.date_of_birth = date_of_birth
super().__init__(type="CustomerDateOfBirthSet")
def __repr__(self) -> str:
return "CustomerDateOfBirthSetMessagePayload(type=%r, date_of_birth=%r)" % (
self.type,
self.date_of_birth,
)
class CustomerEmailChangedMessage(Message):
#: :class:`str`
email: str
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
email: str = None
) -> None:
self.email = email
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerEmailChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerEmailChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, email=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.email,
)
)
class CustomerEmailChangedMessagePayload(MessagePayload):
#: :class:`str`
email: str
def __init__(self, *, type: str = None, email: str = None) -> None:
self.email = email
super().__init__(type="CustomerEmailChanged")
def __repr__(self) -> str:
return "CustomerEmailChangedMessagePayload(type=%r, email=%r)" % (
self.type,
self.email,
)
class CustomerEmailVerifiedMessage(Message):
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
) -> None:
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerEmailVerified",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerEmailVerifiedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
)
)
class CustomerEmailVerifiedMessagePayload(MessagePayload):
def __init__(self, *, type: str = None) -> None:
super().__init__(type="CustomerEmailVerified")
def __repr__(self) -> str:
return "CustomerEmailVerifiedMessagePayload(type=%r)" % (self.type,)
class CustomerGroupSetMessage(Message):
#: :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: "CustomerGroupReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer_group: "CustomerGroupReference" = None
) -> None:
self.customer_group = customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="CustomerGroupSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"CustomerGroupSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, customer_group=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.customer_group,
)
)
class CustomerGroupSetMessagePayload(MessagePayload):
#: :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: "CustomerGroupReference"
def __init__(
self, *, type: str = None, customer_group: "CustomerGroupReference" = None
) -> None:
self.customer_group = customer_group
super().__init__(type="CustomerGroupSet")
def __repr__(self) -> str:
return "CustomerGroupSetMessagePayload(type=%r, customer_group=%r)" % (
self.type,
self.customer_group,
)
class DeliveryAddedMessage(Message):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery" = None
) -> None:
self.delivery = delivery
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="DeliveryAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"DeliveryAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery,
)
)
class DeliveryAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
def __init__(self, *, type: str = None, delivery: "Delivery" = None) -> None:
self.delivery = delivery
super().__init__(type="DeliveryAdded")
def __repr__(self) -> str:
return "DeliveryAddedMessagePayload(type=%r, delivery=%r)" % (
self.type,
self.delivery,
)
class DeliveryAddressSetMessage(Message):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.delivery_id = delivery_id
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="DeliveryAddressSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"DeliveryAddressSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery_id=%r, address=%r, old_address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery_id,
self.address,
self.old_address,
)
)
class DeliveryAddressSetMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
type: str = None,
delivery_id: str = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.delivery_id = delivery_id
self.address = address
self.old_address = old_address
super().__init__(type="DeliveryAddressSet")
def __repr__(self) -> str:
return (
"DeliveryAddressSetMessagePayload(type=%r, delivery_id=%r, address=%r, old_address=%r)"
% (self.type, self.delivery_id, self.address, self.old_address)
)
class DeliveryItemsUpdatedMessage(Message):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: List of :class:`commercetools.types.DeliveryItem`
items: typing.List["DeliveryItem"]
#: List of :class:`commercetools.types.DeliveryItem` `(Named` ``oldItems`` `in Commercetools)`
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str = None,
items: typing.List["DeliveryItem"] = None,
old_items: typing.List["DeliveryItem"] = None
) -> None:
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="DeliveryItemsUpdated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"DeliveryItemsUpdatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery_id=%r, items=%r, old_items=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery_id,
self.items,
self.old_items,
)
)
class DeliveryItemsUpdatedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: List of :class:`commercetools.types.DeliveryItem`
items: typing.List["DeliveryItem"]
#: List of :class:`commercetools.types.DeliveryItem` `(Named` ``oldItems`` `in Commercetools)`
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
type: str = None,
delivery_id: str = None,
items: typing.List["DeliveryItem"] = None,
old_items: typing.List["DeliveryItem"] = None
) -> None:
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(type="DeliveryItemsUpdated")
def __repr__(self) -> str:
return (
"DeliveryItemsUpdatedMessagePayload(type=%r, delivery_id=%r, items=%r, old_items=%r)"
% (self.type, self.delivery_id, self.items, self.old_items)
)
class DeliveryRemovedMessage(Message):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery" = None
) -> None:
self.delivery = delivery
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="DeliveryRemoved",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"DeliveryRemovedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery,
)
)
class DeliveryRemovedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
def __init__(self, *, type: str = None, delivery: "Delivery" = None) -> None:
self.delivery = delivery
super().__init__(type="DeliveryRemoved")
def __repr__(self) -> str:
return "DeliveryRemovedMessagePayload(type=%r, delivery=%r)" % (
self.type,
self.delivery,
)
class InventoryEntryCreatedMessage(Message):
#: :class:`commercetools.types.InventoryEntry` `(Named` ``inventoryEntry`` `in Commercetools)`
inventory_entry: "InventoryEntry"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
inventory_entry: "InventoryEntry" = None
) -> None:
self.inventory_entry = inventory_entry
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="InventoryEntryCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"InventoryEntryCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, inventory_entry=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.inventory_entry,
)
)
class InventoryEntryCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.InventoryEntry` `(Named` ``inventoryEntry`` `in Commercetools)`
inventory_entry: "InventoryEntry"
def __init__(
self, *, type: str = None, inventory_entry: "InventoryEntry" = None
) -> None:
self.inventory_entry = inventory_entry
super().__init__(type="InventoryEntryCreated")
def __repr__(self) -> str:
return "InventoryEntryCreatedMessagePayload(type=%r, inventory_entry=%r)" % (
self.type,
self.inventory_entry,
)
class InventoryEntryDeletedMessage(Message):
#: :class:`str`
sku: str
#: :class:`commercetools.types.ChannelReference` `(Named` ``supplyChannel`` `in Commercetools)`
supply_channel: "ChannelReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
sku: str = None,
supply_channel: "ChannelReference" = None
) -> None:
self.sku = sku
self.supply_channel = supply_channel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="InventoryEntryDeleted",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"InventoryEntryDeletedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, sku=%r, supply_channel=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.sku,
self.supply_channel,
)
)
class InventoryEntryDeletedMessagePayload(MessagePayload):
#: :class:`str`
sku: str
#: :class:`commercetools.types.ChannelReference` `(Named` ``supplyChannel`` `in Commercetools)`
supply_channel: "ChannelReference"
def __init__(
self,
*,
type: str = None,
sku: str = None,
supply_channel: "ChannelReference" = None
) -> None:
self.sku = sku
self.supply_channel = supply_channel
super().__init__(type="InventoryEntryDeleted")
def __repr__(self) -> str:
return (
"InventoryEntryDeletedMessagePayload(type=%r, sku=%r, supply_channel=%r)"
% (self.type, self.sku, self.supply_channel)
)
class InventoryEntryQuantitySetMessage(Message):
#: :class:`int` `(Named` ``oldQuantityOnStock`` `in Commercetools)`
old_quantity_on_stock: int
#: :class:`int` `(Named` ``newQuantityOnStock`` `in Commercetools)`
new_quantity_on_stock: int
#: :class:`int` `(Named` ``oldAvailableQuantity`` `in Commercetools)`
old_available_quantity: int
#: :class:`int` `(Named` ``newAvailableQuantity`` `in Commercetools)`
new_available_quantity: int
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_quantity_on_stock: int = None,
new_quantity_on_stock: int = None,
old_available_quantity: int = None,
new_available_quantity: int = None
) -> None:
self.old_quantity_on_stock = old_quantity_on_stock
self.new_quantity_on_stock = new_quantity_on_stock
self.old_available_quantity = old_available_quantity
self.new_available_quantity = new_available_quantity
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="InventoryEntryQuantitySet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"InventoryEntryQuantitySetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, old_quantity_on_stock=%r, new_quantity_on_stock=%r, old_available_quantity=%r, new_available_quantity=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.old_quantity_on_stock,
self.new_quantity_on_stock,
self.old_available_quantity,
self.new_available_quantity,
)
)
class InventoryEntryQuantitySetMessagePayload(MessagePayload):
#: :class:`int` `(Named` ``oldQuantityOnStock`` `in Commercetools)`
old_quantity_on_stock: int
#: :class:`int` `(Named` ``newQuantityOnStock`` `in Commercetools)`
new_quantity_on_stock: int
#: :class:`int` `(Named` ``oldAvailableQuantity`` `in Commercetools)`
old_available_quantity: int
#: :class:`int` `(Named` ``newAvailableQuantity`` `in Commercetools)`
new_available_quantity: int
def __init__(
self,
*,
type: str = None,
old_quantity_on_stock: int = None,
new_quantity_on_stock: int = None,
old_available_quantity: int = None,
new_available_quantity: int = None
) -> None:
self.old_quantity_on_stock = old_quantity_on_stock
self.new_quantity_on_stock = new_quantity_on_stock
self.old_available_quantity = old_available_quantity
self.new_available_quantity = new_available_quantity
super().__init__(type="InventoryEntryQuantitySet")
def __repr__(self) -> str:
return (
"InventoryEntryQuantitySetMessagePayload(type=%r, old_quantity_on_stock=%r, new_quantity_on_stock=%r, old_available_quantity=%r, new_available_quantity=%r)"
% (
self.type,
self.old_quantity_on_stock,
self.new_quantity_on_stock,
self.old_available_quantity,
self.new_available_quantity,
)
)
class LineItemStateTransitionMessage(Message):
#: :class:`str` `(Named` ``lineItemId`` `in Commercetools)`
line_item_id: str
#: :class:`datetime.datetime` `(Named` ``transitionDate`` `in Commercetools)`
transition_date: datetime.datetime
#: :class:`int`
quantity: int
#: :class:`commercetools.types.StateReference` `(Named` ``fromState`` `in Commercetools)`
from_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``toState`` `in Commercetools)`
to_state: "StateReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item_id: str = None,
transition_date: datetime.datetime = None,
quantity: int = None,
from_state: "StateReference" = None,
to_state: "StateReference" = None
) -> None:
self.line_item_id = line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="LineItemStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"LineItemStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, line_item_id=%r, transition_date=%r, quantity=%r, from_state=%r, to_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.line_item_id,
self.transition_date,
self.quantity,
self.from_state,
self.to_state,
)
)
class LineItemStateTransitionMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``lineItemId`` `in Commercetools)`
line_item_id: str
#: :class:`datetime.datetime` `(Named` ``transitionDate`` `in Commercetools)`
transition_date: datetime.datetime
#: :class:`int`
quantity: int
#: :class:`commercetools.types.StateReference` `(Named` ``fromState`` `in Commercetools)`
from_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``toState`` `in Commercetools)`
to_state: "StateReference"
def __init__(
self,
*,
type: str = None,
line_item_id: str = None,
transition_date: datetime.datetime = None,
quantity: int = None,
from_state: "StateReference" = None,
to_state: "StateReference" = None
) -> None:
self.line_item_id = line_item_id
self.transition_date = transition_date
self.quantity = quantity
self.from_state = from_state
self.to_state = to_state
super().__init__(type="LineItemStateTransition")
def __repr__(self) -> str:
return (
"LineItemStateTransitionMessagePayload(type=%r, line_item_id=%r, transition_date=%r, quantity=%r, from_state=%r, to_state=%r)"
% (
self.type,
self.line_item_id,
self.transition_date,
self.quantity,
self.from_state,
self.to_state,
)
)
class OrderBillingAddressSetMessage(Message):
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderBillingAddressSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderBillingAddressSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, address=%r, old_address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.address,
self.old_address,
)
)
class OrderBillingAddressSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
type: str = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.address = address
self.old_address = old_address
super().__init__(type="OrderBillingAddressSet")
def __repr__(self) -> str:
return (
"OrderBillingAddressSetMessagePayload(type=%r, address=%r, old_address=%r)"
% (self.type, self.address, self.old_address)
)
class OrderCreatedMessage(Message):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order" = None
) -> None:
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, order=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.order,
)
)
class OrderCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(self, *, type: str = None, order: "Order" = None) -> None:
self.order = order
super().__init__(type="OrderCreated")
def __repr__(self) -> str:
return "OrderCreatedMessagePayload(type=%r, order=%r)" % (self.type, self.order)
class OrderCustomLineItemDiscountSetMessage(Message):
#: :class:`str` `(Named` ``customLineItemId`` `in Commercetools)`
custom_line_item_id: str
#: List of :class:`commercetools.types.DiscountedLineItemPriceForQuantity` `(Named` ``discountedPricePerQuantity`` `in Commercetools)`
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
#: Optional :class:`commercetools.types.TaxedItemPrice` `(Named` ``taxedPrice`` `in Commercetools)`
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
custom_line_item_id: str = None,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
] = None,
taxed_price: typing.Optional["TaxedItemPrice"] = None
) -> None:
self.custom_line_item_id = custom_line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.taxed_price = taxed_price
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderCustomLineItemDiscountSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderCustomLineItemDiscountSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, custom_line_item_id=%r, discounted_price_per_quantity=%r, taxed_price=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.custom_line_item_id,
self.discounted_price_per_quantity,
self.taxed_price,
)
)
class OrderCustomLineItemDiscountSetMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``customLineItemId`` `in Commercetools)`
custom_line_item_id: str
#: List of :class:`commercetools.types.DiscountedLineItemPriceForQuantity` `(Named` ``discountedPricePerQuantity`` `in Commercetools)`
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
#: Optional :class:`commercetools.types.TaxedItemPrice` `(Named` ``taxedPrice`` `in Commercetools)`
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
type: str = None,
custom_line_item_id: str = None,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
] = None,
taxed_price: typing.Optional["TaxedItemPrice"] = None
) -> None:
self.custom_line_item_id = custom_line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.taxed_price = taxed_price
super().__init__(type="OrderCustomLineItemDiscountSet")
def __repr__(self) -> str:
return (
"OrderCustomLineItemDiscountSetMessagePayload(type=%r, custom_line_item_id=%r, discounted_price_per_quantity=%r, taxed_price=%r)"
% (
self.type,
self.custom_line_item_id,
self.discounted_price_per_quantity,
self.taxed_price,
)
)
class OrderCustomerEmailSetMessage(Message):
#: Optional :class:`str`
email: typing.Optional[str]
#: Optional :class:`str` `(Named` ``oldEmail`` `in Commercetools)`
old_email: typing.Optional[str]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
email: typing.Optional[str] = None,
old_email: typing.Optional[str] = None
) -> None:
self.email = email
self.old_email = old_email
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderCustomerEmailSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderCustomerEmailSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, email=%r, old_email=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.email,
self.old_email,
)
)
class OrderCustomerEmailSetMessagePayload(MessagePayload):
#: Optional :class:`str`
email: typing.Optional[str]
#: Optional :class:`str` `(Named` ``oldEmail`` `in Commercetools)`
old_email: typing.Optional[str]
def __init__(
self,
*,
type: str = None,
email: typing.Optional[str] = None,
old_email: typing.Optional[str] = None
) -> None:
self.email = email
self.old_email = old_email
super().__init__(type="OrderCustomerEmailSet")
def __repr__(self) -> str:
return (
"OrderCustomerEmailSetMessagePayload(type=%r, email=%r, old_email=%r)"
% (self.type, self.email, self.old_email)
)
class OrderCustomerGroupSetMessage(Message):
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: typing.Optional["CustomerGroupReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``oldCustomerGroup`` `in Commercetools)`
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
) -> None:
self.customer_group = customer_group
self.old_customer_group = old_customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderCustomerGroupSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderCustomerGroupSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, customer_group=%r, old_customer_group=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.customer_group,
self.old_customer_group,
)
)
class OrderCustomerGroupSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: typing.Optional["CustomerGroupReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``oldCustomerGroup`` `in Commercetools)`
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
type: str = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
) -> None:
self.customer_group = customer_group
self.old_customer_group = old_customer_group
super().__init__(type="OrderCustomerGroupSet")
def __repr__(self) -> str:
return (
"OrderCustomerGroupSetMessagePayload(type=%r, customer_group=%r, old_customer_group=%r)"
% (self.type, self.customer_group, self.old_customer_group)
)
class OrderCustomerSetMessage(Message):
#: Optional :class:`commercetools.types.CustomerReference`
customer: typing.Optional["CustomerReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: typing.Optional["CustomerGroupReference"]
#: Optional :class:`commercetools.types.CustomerReference` `(Named` ``oldCustomer`` `in Commercetools)`
old_customer: typing.Optional["CustomerReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``oldCustomerGroup`` `in Commercetools)`
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
customer: typing.Optional["CustomerReference"] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer: typing.Optional["CustomerReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
) -> None:
self.customer = customer
self.customer_group = customer_group
self.old_customer = old_customer
self.old_customer_group = old_customer_group
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderCustomerSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderCustomerSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, customer=%r, customer_group=%r, old_customer=%r, old_customer_group=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.customer,
self.customer_group,
self.old_customer,
self.old_customer_group,
)
)
class OrderCustomerSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.CustomerReference`
customer: typing.Optional["CustomerReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``customerGroup`` `in Commercetools)`
customer_group: typing.Optional["CustomerGroupReference"]
#: Optional :class:`commercetools.types.CustomerReference` `(Named` ``oldCustomer`` `in Commercetools)`
old_customer: typing.Optional["CustomerReference"]
#: Optional :class:`commercetools.types.CustomerGroupReference` `(Named` ``oldCustomerGroup`` `in Commercetools)`
old_customer_group: typing.Optional["CustomerGroupReference"]
def __init__(
self,
*,
type: str = None,
customer: typing.Optional["CustomerReference"] = None,
customer_group: typing.Optional["CustomerGroupReference"] = None,
old_customer: typing.Optional["CustomerReference"] = None,
old_customer_group: typing.Optional["CustomerGroupReference"] = None
) -> None:
self.customer = customer
self.customer_group = customer_group
self.old_customer = old_customer
self.old_customer_group = old_customer_group
super().__init__(type="OrderCustomerSet")
def __repr__(self) -> str:
return (
"OrderCustomerSetMessagePayload(type=%r, customer=%r, customer_group=%r, old_customer=%r, old_customer_group=%r)"
% (
self.type,
self.customer,
self.customer_group,
self.old_customer,
self.old_customer_group,
)
)
class OrderDeletedMessage(Message):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order" = None
) -> None:
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderDeleted",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderDeletedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, order=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.order,
)
)
class OrderDeletedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(self, *, type: str = None, order: "Order" = None) -> None:
self.order = order
super().__init__(type="OrderDeleted")
def __repr__(self) -> str:
return "OrderDeletedMessagePayload(type=%r, order=%r)" % (self.type, self.order)
class OrderDiscountCodeAddedMessage(Message):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference" = None
) -> None:
self.discount_code = discount_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderDiscountCodeAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderDiscountCodeAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, discount_code=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.discount_code,
)
)
class OrderDiscountCodeAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
def __init__(
self, *, type: str = None, discount_code: "DiscountCodeReference" = None
) -> None:
self.discount_code = discount_code
super().__init__(type="OrderDiscountCodeAdded")
def __repr__(self) -> str:
return "OrderDiscountCodeAddedMessagePayload(type=%r, discount_code=%r)" % (
self.type,
self.discount_code,
)
class OrderDiscountCodeRemovedMessage(Message):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference" = None
) -> None:
self.discount_code = discount_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderDiscountCodeRemoved",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderDiscountCodeRemovedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, discount_code=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.discount_code,
)
)
class OrderDiscountCodeRemovedMessagePayload(MessagePayload):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
def __init__(
self, *, type: str = None, discount_code: "DiscountCodeReference" = None
) -> None:
self.discount_code = discount_code
super().__init__(type="OrderDiscountCodeRemoved")
def __repr__(self) -> str:
return "OrderDiscountCodeRemovedMessagePayload(type=%r, discount_code=%r)" % (
self.type,
self.discount_code,
)
class OrderDiscountCodeStateSetMessage(Message):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
#: :class:`commercetools.types.DiscountCodeState`
state: "DiscountCodeState"
#: Optional :class:`commercetools.types.DiscountCodeState` `(Named` ``oldState`` `in Commercetools)`
old_state: typing.Optional["DiscountCodeState"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
discount_code: "DiscountCodeReference" = None,
state: "DiscountCodeState" = None,
old_state: typing.Optional["DiscountCodeState"] = None
) -> None:
self.discount_code = discount_code
self.state = state
self.old_state = old_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderDiscountCodeStateSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderDiscountCodeStateSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, discount_code=%r, state=%r, old_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.discount_code,
self.state,
self.old_state,
)
)
class OrderDiscountCodeStateSetMessagePayload(MessagePayload):
#: :class:`commercetools.types.DiscountCodeReference` `(Named` ``discountCode`` `in Commercetools)`
discount_code: "DiscountCodeReference"
#: :class:`commercetools.types.DiscountCodeState`
state: "DiscountCodeState"
#: Optional :class:`commercetools.types.DiscountCodeState` `(Named` ``oldState`` `in Commercetools)`
old_state: typing.Optional["DiscountCodeState"]
def __init__(
self,
*,
type: str = None,
discount_code: "DiscountCodeReference" = None,
state: "DiscountCodeState" = None,
old_state: typing.Optional["DiscountCodeState"] = None
) -> None:
self.discount_code = discount_code
self.state = state
self.old_state = old_state
super().__init__(type="OrderDiscountCodeStateSet")
def __repr__(self) -> str:
return (
"OrderDiscountCodeStateSetMessagePayload(type=%r, discount_code=%r, state=%r, old_state=%r)"
% (self.type, self.discount_code, self.state, self.old_state)
)
class OrderEditAppliedMessage(Message):
#: :class:`commercetools.types.OrderEditReference`
edit: "OrderEditReference"
#: :class:`commercetools.types.OrderEditApplied`
result: "OrderEditApplied"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
edit: "OrderEditReference" = None,
result: "OrderEditApplied" = None
) -> None:
self.edit = edit
self.result = result
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderEditApplied",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderEditAppliedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, edit=%r, result=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.edit,
self.result,
)
)
class OrderEditAppliedMessagePayload(MessagePayload):
#: :class:`commercetools.types.OrderEditReference`
edit: "OrderEditReference"
#: :class:`commercetools.types.OrderEditApplied`
result: "OrderEditApplied"
def __init__(
self,
*,
type: str = None,
edit: "OrderEditReference" = None,
result: "OrderEditApplied" = None
) -> None:
self.edit = edit
self.result = result
super().__init__(type="OrderEditApplied")
def __repr__(self) -> str:
return "OrderEditAppliedMessagePayload(type=%r, edit=%r, result=%r)" % (
self.type,
self.edit,
self.result,
)
class OrderImportedMessage(Message):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order: "Order" = None
) -> None:
self.order = order
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderImported",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderImportedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, order=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.order,
)
)
class OrderImportedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Order`
order: "Order"
def __init__(self, *, type: str = None, order: "Order" = None) -> None:
self.order = order
super().__init__(type="OrderImported")
def __repr__(self) -> str:
return "OrderImportedMessagePayload(type=%r, order=%r)" % (
self.type,
self.order,
)
class OrderLineItemAddedMessage(Message):
#: :class:`commercetools.types.LineItem` `(Named` ``lineItem`` `in Commercetools)`
line_item: "LineItem"
#: :class:`int` `(Named` ``addedQuantity`` `in Commercetools)`
added_quantity: int
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item: "LineItem" = None,
added_quantity: int = None
) -> None:
self.line_item = line_item
self.added_quantity = added_quantity
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderLineItemAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderLineItemAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, line_item=%r, added_quantity=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.line_item,
self.added_quantity,
)
)
class OrderLineItemAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.LineItem` `(Named` ``lineItem`` `in Commercetools)`
line_item: "LineItem"
#: :class:`int` `(Named` ``addedQuantity`` `in Commercetools)`
added_quantity: int
def __init__(
self,
*,
type: str = None,
line_item: "LineItem" = None,
added_quantity: int = None
) -> None:
self.line_item = line_item
self.added_quantity = added_quantity
super().__init__(type="OrderLineItemAdded")
def __repr__(self) -> str:
return (
"OrderLineItemAddedMessagePayload(type=%r, line_item=%r, added_quantity=%r)"
% (self.type, self.line_item, self.added_quantity)
)
class OrderLineItemDiscountSetMessage(Message):
#: :class:`str` `(Named` ``lineItemId`` `in Commercetools)`
line_item_id: str
#: List of :class:`commercetools.types.DiscountedLineItemPriceForQuantity` `(Named` ``discountedPricePerQuantity`` `in Commercetools)`
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
#: :class:`commercetools.types.Money` `(Named` ``totalPrice`` `in Commercetools)`
total_price: "Money"
#: Optional :class:`commercetools.types.TaxedItemPrice` `(Named` ``taxedPrice`` `in Commercetools)`
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
line_item_id: str = None,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
] = None,
total_price: "Money" = None,
taxed_price: typing.Optional["TaxedItemPrice"] = None
) -> None:
self.line_item_id = line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.total_price = total_price
self.taxed_price = taxed_price
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderLineItemDiscountSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderLineItemDiscountSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, line_item_id=%r, discounted_price_per_quantity=%r, total_price=%r, taxed_price=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.line_item_id,
self.discounted_price_per_quantity,
self.total_price,
self.taxed_price,
)
)
class OrderLineItemDiscountSetMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``lineItemId`` `in Commercetools)`
line_item_id: str
#: List of :class:`commercetools.types.DiscountedLineItemPriceForQuantity` `(Named` ``discountedPricePerQuantity`` `in Commercetools)`
discounted_price_per_quantity: typing.List["DiscountedLineItemPriceForQuantity"]
#: :class:`commercetools.types.Money` `(Named` ``totalPrice`` `in Commercetools)`
total_price: "Money"
#: Optional :class:`commercetools.types.TaxedItemPrice` `(Named` ``taxedPrice`` `in Commercetools)`
taxed_price: typing.Optional["TaxedItemPrice"]
def __init__(
self,
*,
type: str = None,
line_item_id: str = None,
discounted_price_per_quantity: typing.List[
"DiscountedLineItemPriceForQuantity"
] = None,
total_price: "Money" = None,
taxed_price: typing.Optional["TaxedItemPrice"] = None
) -> None:
self.line_item_id = line_item_id
self.discounted_price_per_quantity = discounted_price_per_quantity
self.total_price = total_price
self.taxed_price = taxed_price
super().__init__(type="OrderLineItemDiscountSet")
def __repr__(self) -> str:
return (
"OrderLineItemDiscountSetMessagePayload(type=%r, line_item_id=%r, discounted_price_per_quantity=%r, total_price=%r, taxed_price=%r)"
% (
self.type,
self.line_item_id,
self.discounted_price_per_quantity,
self.total_price,
self.taxed_price,
)
)
class OrderPaymentStateChangedMessage(Message):
#: :class:`commercetools.types.PaymentState` `(Named` ``paymentState`` `in Commercetools)`
payment_state: "PaymentState"
#: Optional :class:`commercetools.types.PaymentState` `(Named` ``oldPaymentState`` `in Commercetools)`
old_payment_state: typing.Optional["PaymentState"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment_state: "PaymentState" = None,
old_payment_state: typing.Optional["PaymentState"] = None
) -> None:
self.payment_state = payment_state
self.old_payment_state = old_payment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderPaymentStateChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderPaymentStateChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, payment_state=%r, old_payment_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.payment_state,
self.old_payment_state,
)
)
class OrderPaymentStateChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.PaymentState` `(Named` ``paymentState`` `in Commercetools)`
payment_state: "PaymentState"
#: Optional :class:`commercetools.types.PaymentState` `(Named` ``oldPaymentState`` `in Commercetools)`
old_payment_state: typing.Optional["PaymentState"]
def __init__(
self,
*,
type: str = None,
payment_state: "PaymentState" = None,
old_payment_state: typing.Optional["PaymentState"] = None
) -> None:
self.payment_state = payment_state
self.old_payment_state = old_payment_state
super().__init__(type="OrderPaymentStateChanged")
def __repr__(self) -> str:
return (
"OrderPaymentStateChangedMessagePayload(type=%r, payment_state=%r, old_payment_state=%r)"
% (self.type, self.payment_state, self.old_payment_state)
)
class OrderReturnInfoAddedMessage(Message):
#: :class:`commercetools.types.ReturnInfo` `(Named` ``returnInfo`` `in Commercetools)`
return_info: "ReturnInfo"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
return_info: "ReturnInfo" = None
) -> None:
self.return_info = return_info
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ReturnInfoAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderReturnInfoAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, return_info=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.return_info,
)
)
class OrderReturnInfoAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.ReturnInfo` `(Named` ``returnInfo`` `in Commercetools)`
return_info: "ReturnInfo"
def __init__(self, *, type: str = None, return_info: "ReturnInfo" = None) -> None:
self.return_info = return_info
super().__init__(type="ReturnInfoAdded")
def __repr__(self) -> str:
return "OrderReturnInfoAddedMessagePayload(type=%r, return_info=%r)" % (
self.type,
self.return_info,
)
class OrderReturnShipmentStateChangedMessage(Message):
#: :class:`str` `(Named` ``returnItemId`` `in Commercetools)`
return_item_id: str
#: :class:`commercetools.types.ReturnShipmentState` `(Named` ``returnShipmentState`` `in Commercetools)`
return_shipment_state: "ReturnShipmentState"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
return_item_id: str = None,
return_shipment_state: "ReturnShipmentState" = None
) -> None:
self.return_item_id = return_item_id
self.return_shipment_state = return_shipment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderReturnShipmentStateChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderReturnShipmentStateChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, return_item_id=%r, return_shipment_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.return_item_id,
self.return_shipment_state,
)
)
class OrderReturnShipmentStateChangedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``returnItemId`` `in Commercetools)`
return_item_id: str
#: :class:`commercetools.types.ReturnShipmentState` `(Named` ``returnShipmentState`` `in Commercetools)`
return_shipment_state: "ReturnShipmentState"
def __init__(
self,
*,
type: str = None,
return_item_id: str = None,
return_shipment_state: "ReturnShipmentState" = None
) -> None:
self.return_item_id = return_item_id
self.return_shipment_state = return_shipment_state
super().__init__(type="OrderReturnShipmentStateChanged")
def __repr__(self) -> str:
return (
"OrderReturnShipmentStateChangedMessagePayload(type=%r, return_item_id=%r, return_shipment_state=%r)"
% (self.type, self.return_item_id, self.return_shipment_state)
)
class OrderShipmentStateChangedMessage(Message):
#: :class:`commercetools.types.ShipmentState` `(Named` ``shipmentState`` `in Commercetools)`
shipment_state: "ShipmentState"
#: Optional :class:`commercetools.types.ShipmentState` `(Named` ``oldShipmentState`` `in Commercetools)`
old_shipment_state: typing.Optional["ShipmentState"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipment_state: "ShipmentState" = None,
old_shipment_state: typing.Optional["ShipmentState"] = None
) -> None:
self.shipment_state = shipment_state
self.old_shipment_state = old_shipment_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderShipmentStateChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderShipmentStateChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, shipment_state=%r, old_shipment_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.shipment_state,
self.old_shipment_state,
)
)
class OrderShipmentStateChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.ShipmentState` `(Named` ``shipmentState`` `in Commercetools)`
shipment_state: "ShipmentState"
#: Optional :class:`commercetools.types.ShipmentState` `(Named` ``oldShipmentState`` `in Commercetools)`
old_shipment_state: typing.Optional["ShipmentState"]
def __init__(
self,
*,
type: str = None,
shipment_state: "ShipmentState" = None,
old_shipment_state: typing.Optional["ShipmentState"] = None
) -> None:
self.shipment_state = shipment_state
self.old_shipment_state = old_shipment_state
super().__init__(type="OrderShipmentStateChanged")
def __repr__(self) -> str:
return (
"OrderShipmentStateChangedMessagePayload(type=%r, shipment_state=%r, old_shipment_state=%r)"
% (self.type, self.shipment_state, self.old_shipment_state)
)
class OrderShippingAddressSetMessage(Message):
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.address = address
self.old_address = old_address
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderShippingAddressSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderShippingAddressSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, address=%r, old_address=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.address,
self.old_address,
)
)
class OrderShippingAddressSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.Address`
address: typing.Optional["Address"]
#: Optional :class:`commercetools.types.Address` `(Named` ``oldAddress`` `in Commercetools)`
old_address: typing.Optional["Address"]
def __init__(
self,
*,
type: str = None,
address: typing.Optional["Address"] = None,
old_address: typing.Optional["Address"] = None
) -> None:
self.address = address
self.old_address = old_address
super().__init__(type="OrderShippingAddressSet")
def __repr__(self) -> str:
return (
"OrderShippingAddressSetMessagePayload(type=%r, address=%r, old_address=%r)"
% (self.type, self.address, self.old_address)
)
class OrderShippingInfoSetMessage(Message):
#: Optional :class:`commercetools.types.ShippingInfo` `(Named` ``shippingInfo`` `in Commercetools)`
shipping_info: typing.Optional["ShippingInfo"]
#: Optional :class:`commercetools.types.ShippingInfo` `(Named` ``oldShippingInfo`` `in Commercetools)`
old_shipping_info: typing.Optional["ShippingInfo"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipping_info: typing.Optional["ShippingInfo"] = None,
old_shipping_info: typing.Optional["ShippingInfo"] = None
) -> None:
self.shipping_info = shipping_info
self.old_shipping_info = old_shipping_info
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderShippingInfoSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderShippingInfoSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, shipping_info=%r, old_shipping_info=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.shipping_info,
self.old_shipping_info,
)
)
class OrderShippingInfoSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.ShippingInfo` `(Named` ``shippingInfo`` `in Commercetools)`
shipping_info: typing.Optional["ShippingInfo"]
#: Optional :class:`commercetools.types.ShippingInfo` `(Named` ``oldShippingInfo`` `in Commercetools)`
old_shipping_info: typing.Optional["ShippingInfo"]
def __init__(
self,
*,
type: str = None,
shipping_info: typing.Optional["ShippingInfo"] = None,
old_shipping_info: typing.Optional["ShippingInfo"] = None
) -> None:
self.shipping_info = shipping_info
self.old_shipping_info = old_shipping_info
super().__init__(type="OrderShippingInfoSet")
def __repr__(self) -> str:
return (
"OrderShippingInfoSetMessagePayload(type=%r, shipping_info=%r, old_shipping_info=%r)"
% (self.type, self.shipping_info, self.old_shipping_info)
)
class OrderShippingRateInputSetMessage(Message):
#: Optional :class:`commercetools.types.ShippingRateInput` `(Named` ``shippingRateInput`` `in Commercetools)`
shipping_rate_input: typing.Optional["ShippingRateInput"]
#: Optional :class:`commercetools.types.ShippingRateInput` `(Named` ``oldShippingRateInput`` `in Commercetools)`
old_shipping_rate_input: typing.Optional["ShippingRateInput"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
shipping_rate_input: typing.Optional["ShippingRateInput"] = None,
old_shipping_rate_input: typing.Optional["ShippingRateInput"] = None
) -> None:
self.shipping_rate_input = shipping_rate_input
self.old_shipping_rate_input = old_shipping_rate_input
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderShippingRateInputSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderShippingRateInputSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, shipping_rate_input=%r, old_shipping_rate_input=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.shipping_rate_input,
self.old_shipping_rate_input,
)
)
class OrderShippingRateInputSetMessagePayload(MessagePayload):
#: Optional :class:`commercetools.types.ShippingRateInput` `(Named` ``shippingRateInput`` `in Commercetools)`
shipping_rate_input: typing.Optional["ShippingRateInput"]
#: Optional :class:`commercetools.types.ShippingRateInput` `(Named` ``oldShippingRateInput`` `in Commercetools)`
old_shipping_rate_input: typing.Optional["ShippingRateInput"]
def __init__(
self,
*,
type: str = None,
shipping_rate_input: typing.Optional["ShippingRateInput"] = None,
old_shipping_rate_input: typing.Optional["ShippingRateInput"] = None
) -> None:
self.shipping_rate_input = shipping_rate_input
self.old_shipping_rate_input = old_shipping_rate_input
super().__init__(type="OrderShippingRateInputSet")
def __repr__(self) -> str:
return (
"OrderShippingRateInputSetMessagePayload(type=%r, shipping_rate_input=%r, old_shipping_rate_input=%r)"
% (self.type, self.shipping_rate_input, self.old_shipping_rate_input)
)
class OrderStateChangedMessage(Message):
#: :class:`commercetools.types.OrderState` `(Named` ``orderState`` `in Commercetools)`
order_state: "OrderState"
#: :class:`commercetools.types.OrderState` `(Named` ``oldOrderState`` `in Commercetools)`
old_order_state: "OrderState"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
order_state: "OrderState" = None,
old_order_state: "OrderState" = None
) -> None:
self.order_state = order_state
self.old_order_state = old_order_state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderStateChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderStateChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, order_state=%r, old_order_state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.order_state,
self.old_order_state,
)
)
class OrderStateChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.OrderState` `(Named` ``orderState`` `in Commercetools)`
order_state: "OrderState"
#: :class:`commercetools.types.OrderState` `(Named` ``oldOrderState`` `in Commercetools)`
old_order_state: "OrderState"
def __init__(
self,
*,
type: str = None,
order_state: "OrderState" = None,
old_order_state: "OrderState" = None
) -> None:
self.order_state = order_state
self.old_order_state = old_order_state
super().__init__(type="OrderStateChanged")
def __repr__(self) -> str:
return (
"OrderStateChangedMessagePayload(type=%r, order_state=%r, old_order_state=%r)"
% (self.type, self.order_state, self.old_order_state)
)
class OrderStateTransitionMessage(Message):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference" = None,
force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, state=%r, force=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.state,
self.force,
)
)
class OrderStateTransitionMessagePayload(MessagePayload):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self, *, type: str = None, state: "StateReference" = None, force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(type="OrderStateTransition")
def __repr__(self) -> str:
return "OrderStateTransitionMessagePayload(type=%r, state=%r, force=%r)" % (
self.type,
self.state,
self.force,
)
class OrderStoreSetMessage(Message):
#: :class:`commercetools.types.StoreKeyReference`
store: "StoreKeyReference"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
store: "StoreKeyReference" = None
) -> None:
self.store = store
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="OrderStoreSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"OrderStoreSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, store=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.store,
)
)
class OrderStoreSetMessagePayload(MessagePayload):
#: :class:`commercetools.types.StoreKeyReference`
store: "StoreKeyReference"
def __init__(self, *, type: str = None, store: "StoreKeyReference" = None) -> None:
self.store = store
super().__init__(type="OrderStoreSet")
def __repr__(self) -> str:
return "OrderStoreSetMessagePayload(type=%r, store=%r)" % (
self.type,
self.store,
)
class ParcelAddedToDeliveryMessage(Message):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
#: :class:`commercetools.types.Parcel`
parcel: "Parcel"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery: "Delivery" = None,
parcel: "Parcel" = None
) -> None:
self.delivery = delivery
self.parcel = parcel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ParcelAddedToDelivery",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ParcelAddedToDeliveryMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery=%r, parcel=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery,
self.parcel,
)
)
class ParcelAddedToDeliveryMessagePayload(MessagePayload):
#: :class:`commercetools.types.Delivery`
delivery: "Delivery"
#: :class:`commercetools.types.Parcel`
parcel: "Parcel"
def __init__(
self, *, type: str = None, delivery: "Delivery" = None, parcel: "Parcel" = None
) -> None:
self.delivery = delivery
self.parcel = parcel
super().__init__(type="ParcelAddedToDelivery")
def __repr__(self) -> str:
return (
"ParcelAddedToDeliveryMessagePayload(type=%r, delivery=%r, parcel=%r)"
% (self.type, self.delivery, self.parcel)
)
class ParcelItemsUpdatedMessage(Message):
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: typing.Optional[str]
#: List of :class:`commercetools.types.DeliveryItem`
items: typing.List["DeliveryItem"]
#: List of :class:`commercetools.types.DeliveryItem` `(Named` ``oldItems`` `in Commercetools)`
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
parcel_id: str = None,
delivery_id: typing.Optional[str] = None,
items: typing.List["DeliveryItem"] = None,
old_items: typing.List["DeliveryItem"] = None
) -> None:
self.parcel_id = parcel_id
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ParcelItemsUpdated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ParcelItemsUpdatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, parcel_id=%r, delivery_id=%r, items=%r, old_items=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.parcel_id,
self.delivery_id,
self.items,
self.old_items,
)
)
class ParcelItemsUpdatedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: typing.Optional[str]
#: List of :class:`commercetools.types.DeliveryItem`
items: typing.List["DeliveryItem"]
#: List of :class:`commercetools.types.DeliveryItem` `(Named` ``oldItems`` `in Commercetools)`
old_items: typing.List["DeliveryItem"]
def __init__(
self,
*,
type: str = None,
parcel_id: str = None,
delivery_id: typing.Optional[str] = None,
items: typing.List["DeliveryItem"] = None,
old_items: typing.List["DeliveryItem"] = None
) -> None:
self.parcel_id = parcel_id
self.delivery_id = delivery_id
self.items = items
self.old_items = old_items
super().__init__(type="ParcelItemsUpdated")
def __repr__(self) -> str:
return (
"ParcelItemsUpdatedMessagePayload(type=%r, parcel_id=%r, delivery_id=%r, items=%r, old_items=%r)"
% (self.type, self.parcel_id, self.delivery_id, self.items, self.old_items)
)
class ParcelMeasurementsUpdatedMessage(Message):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`commercetools.types.ParcelMeasurements`
measurements: typing.Optional["ParcelMeasurements"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str = None,
parcel_id: str = None,
measurements: typing.Optional["ParcelMeasurements"] = None
) -> None:
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.measurements = measurements
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ParcelMeasurementsUpdated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ParcelMeasurementsUpdatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery_id=%r, parcel_id=%r, measurements=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery_id,
self.parcel_id,
self.measurements,
)
)
class ParcelMeasurementsUpdatedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`commercetools.types.ParcelMeasurements`
measurements: typing.Optional["ParcelMeasurements"]
def __init__(
self,
*,
type: str = None,
delivery_id: str = None,
parcel_id: str = None,
measurements: typing.Optional["ParcelMeasurements"] = None
) -> None:
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.measurements = measurements
super().__init__(type="ParcelMeasurementsUpdated")
def __repr__(self) -> str:
return (
"ParcelMeasurementsUpdatedMessagePayload(type=%r, delivery_id=%r, parcel_id=%r, measurements=%r)"
% (self.type, self.delivery_id, self.parcel_id, self.measurements)
)
class ParcelRemovedFromDeliveryMessage(Message):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`commercetools.types.Parcel`
parcel: "Parcel"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str = None,
parcel: "Parcel" = None
) -> None:
self.delivery_id = delivery_id
self.parcel = parcel
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ParcelRemovedFromDelivery",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ParcelRemovedFromDeliveryMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery_id=%r, parcel=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery_id,
self.parcel,
)
)
class ParcelRemovedFromDeliveryMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`commercetools.types.Parcel`
parcel: "Parcel"
def __init__(
self, *, type: str = None, delivery_id: str = None, parcel: "Parcel" = None
) -> None:
self.delivery_id = delivery_id
self.parcel = parcel
super().__init__(type="ParcelRemovedFromDelivery")
def __repr__(self) -> str:
return (
"ParcelRemovedFromDeliveryMessagePayload(type=%r, delivery_id=%r, parcel=%r)"
% (self.type, self.delivery_id, self.parcel)
)
class ParcelTrackingDataUpdatedMessage(Message):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`commercetools.types.TrackingData` `(Named` ``trackingData`` `in Commercetools)`
tracking_data: typing.Optional["TrackingData"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
delivery_id: str = None,
parcel_id: str = None,
tracking_data: typing.Optional["TrackingData"] = None
) -> None:
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.tracking_data = tracking_data
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ParcelTrackingDataUpdated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ParcelTrackingDataUpdatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, delivery_id=%r, parcel_id=%r, tracking_data=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.delivery_id,
self.parcel_id,
self.tracking_data,
)
)
class ParcelTrackingDataUpdatedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``deliveryId`` `in Commercetools)`
delivery_id: str
#: :class:`str` `(Named` ``parcelId`` `in Commercetools)`
parcel_id: str
#: Optional :class:`commercetools.types.TrackingData` `(Named` ``trackingData`` `in Commercetools)`
tracking_data: typing.Optional["TrackingData"]
def __init__(
self,
*,
type: str = None,
delivery_id: str = None,
parcel_id: str = None,
tracking_data: typing.Optional["TrackingData"] = None
) -> None:
self.delivery_id = delivery_id
self.parcel_id = parcel_id
self.tracking_data = tracking_data
super().__init__(type="ParcelTrackingDataUpdated")
def __repr__(self) -> str:
return (
"ParcelTrackingDataUpdatedMessagePayload(type=%r, delivery_id=%r, parcel_id=%r, tracking_data=%r)"
% (self.type, self.delivery_id, self.parcel_id, self.tracking_data)
)
class PaymentCreatedMessage(Message):
#: :class:`commercetools.types.Payment`
payment: "Payment"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment: "Payment" = None
) -> None:
self.payment = payment
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, payment=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.payment,
)
)
class PaymentCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Payment`
payment: "Payment"
def __init__(self, *, type: str = None, payment: "Payment" = None) -> None:
self.payment = payment
super().__init__(type="PaymentCreated")
def __repr__(self) -> str:
return "PaymentCreatedMessagePayload(type=%r, payment=%r)" % (
self.type,
self.payment,
)
class PaymentInteractionAddedMessage(Message):
#: :class:`commercetools.types.CustomFields`
interaction: "CustomFields"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
interaction: "CustomFields" = None
) -> None:
self.interaction = interaction
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentInteractionAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentInteractionAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, interaction=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.interaction,
)
)
class PaymentInteractionAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.CustomFields`
interaction: "CustomFields"
def __init__(self, *, type: str = None, interaction: "CustomFields" = None) -> None:
self.interaction = interaction
super().__init__(type="PaymentInteractionAdded")
def __repr__(self) -> str:
return "PaymentInteractionAddedMessagePayload(type=%r, interaction=%r)" % (
self.type,
self.interaction,
)
class PaymentStatusInterfaceCodeSetMessage(Message):
#: :class:`str` `(Named` ``paymentId`` `in Commercetools)`
payment_id: str
#: :class:`str` `(Named` ``interfaceCode`` `in Commercetools)`
interface_code: str
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
payment_id: str = None,
interface_code: str = None
) -> None:
self.payment_id = payment_id
self.interface_code = interface_code
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentStatusInterfaceCodeSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentStatusInterfaceCodeSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, payment_id=%r, interface_code=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.payment_id,
self.interface_code,
)
)
class PaymentStatusInterfaceCodeSetMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``paymentId`` `in Commercetools)`
payment_id: str
#: :class:`str` `(Named` ``interfaceCode`` `in Commercetools)`
interface_code: str
def __init__(
self, *, type: str = None, payment_id: str = None, interface_code: str = None
) -> None:
self.payment_id = payment_id
self.interface_code = interface_code
super().__init__(type="PaymentStatusInterfaceCodeSet")
def __repr__(self) -> str:
return (
"PaymentStatusInterfaceCodeSetMessagePayload(type=%r, payment_id=%r, interface_code=%r)"
% (self.type, self.payment_id, self.interface_code)
)
class PaymentStatusStateTransitionMessage(Message):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference" = None,
force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentStatusStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentStatusStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, state=%r, force=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.state,
self.force,
)
)
class PaymentStatusStateTransitionMessagePayload(MessagePayload):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self, *, type: str = None, state: "StateReference" = None, force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(type="PaymentStatusStateTransition")
def __repr__(self) -> str:
return (
"PaymentStatusStateTransitionMessagePayload(type=%r, state=%r, force=%r)"
% (self.type, self.state, self.force)
)
class PaymentTransactionAddedMessage(Message):
#: :class:`commercetools.types.Transaction`
transaction: "Transaction"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
transaction: "Transaction" = None
) -> None:
self.transaction = transaction
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentTransactionAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentTransactionAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, transaction=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.transaction,
)
)
class PaymentTransactionAddedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Transaction`
transaction: "Transaction"
def __init__(self, *, type: str = None, transaction: "Transaction" = None) -> None:
self.transaction = transaction
super().__init__(type="PaymentTransactionAdded")
def __repr__(self) -> str:
return "PaymentTransactionAddedMessagePayload(type=%r, transaction=%r)" % (
self.type,
self.transaction,
)
class PaymentTransactionStateChangedMessage(Message):
#: :class:`str` `(Named` ``transactionId`` `in Commercetools)`
transaction_id: str
#: :class:`commercetools.types.TransactionState`
state: "TransactionState"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
transaction_id: str = None,
state: "TransactionState" = None
) -> None:
self.transaction_id = transaction_id
self.state = state
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="PaymentTransactionStateChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"PaymentTransactionStateChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, transaction_id=%r, state=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.transaction_id,
self.state,
)
)
class PaymentTransactionStateChangedMessagePayload(MessagePayload):
#: :class:`str` `(Named` ``transactionId`` `in Commercetools)`
transaction_id: str
#: :class:`commercetools.types.TransactionState`
state: "TransactionState"
def __init__(
self,
*,
type: str = None,
transaction_id: str = None,
state: "TransactionState" = None
) -> None:
self.transaction_id = transaction_id
self.state = state
super().__init__(type="PaymentTransactionStateChanged")
def __repr__(self) -> str:
return (
"PaymentTransactionStateChangedMessagePayload(type=%r, transaction_id=%r, state=%r)"
% (self.type, self.transaction_id, self.state)
)
class ProductAddedToCategoryMessage(Message):
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductAddedToCategory",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductAddedToCategoryMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, category=%r, staged=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.category,
self.staged,
)
)
class ProductAddedToCategoryMessagePayload(MessagePayload):
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
type: str = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(type="ProductAddedToCategory")
def __repr__(self) -> str:
return (
"ProductAddedToCategoryMessagePayload(type=%r, category=%r, staged=%r)"
% (self.type, self.category, self.staged)
)
class ProductCreatedMessage(Message):
#: :class:`commercetools.types.ProductProjection` `(Named` ``productProjection`` `in Commercetools)`
product_projection: "ProductProjection"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
product_projection: "ProductProjection" = None
) -> None:
self.product_projection = product_projection
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, product_projection=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.product_projection,
)
)
class ProductCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.ProductProjection` `(Named` ``productProjection`` `in Commercetools)`
product_projection: "ProductProjection"
def __init__(
self, *, type: str = None, product_projection: "ProductProjection" = None
) -> None:
self.product_projection = product_projection
super().__init__(type="ProductCreated")
def __repr__(self) -> str:
return "ProductCreatedMessagePayload(type=%r, product_projection=%r)" % (
self.type,
self.product_projection,
)
class ProductDeletedMessage(Message):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
#: :class:`commercetools.types.ProductProjection` `(Named` ``currentProjection`` `in Commercetools)`
current_projection: "ProductProjection"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: list = None,
current_projection: "ProductProjection" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.current_projection = current_projection
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductDeleted",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductDeletedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, removed_image_urls=%r, current_projection=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.removed_image_urls,
self.current_projection,
)
)
class ProductDeletedMessagePayload(MessagePayload):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
#: :class:`commercetools.types.ProductProjection` `(Named` ``currentProjection`` `in Commercetools)`
current_projection: "ProductProjection"
def __init__(
self,
*,
type: str = None,
removed_image_urls: list = None,
current_projection: "ProductProjection" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.current_projection = current_projection
super().__init__(type="ProductDeleted")
def __repr__(self) -> str:
return (
"ProductDeletedMessagePayload(type=%r, removed_image_urls=%r, current_projection=%r)"
% (self.type, self.removed_image_urls, self.current_projection)
)
class ProductImageAddedMessage(Message):
#: :class:`int` `(Named` ``variantId`` `in Commercetools)`
variant_id: int
#: :class:`commercetools.types.Image`
image: "Image"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant_id: int = None,
image: "Image" = None,
staged: bool = None
) -> None:
self.variant_id = variant_id
self.image = image
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductImageAdded",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductImageAddedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, variant_id=%r, image=%r, staged=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.variant_id,
self.image,
self.staged,
)
)
class ProductImageAddedMessagePayload(MessagePayload):
#: :class:`int` `(Named` ``variantId`` `in Commercetools)`
variant_id: int
#: :class:`commercetools.types.Image`
image: "Image"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
type: str = None,
variant_id: int = None,
image: "Image" = None,
staged: bool = None
) -> None:
self.variant_id = variant_id
self.image = image
self.staged = staged
super().__init__(type="ProductImageAdded")
def __repr__(self) -> str:
return (
"ProductImageAddedMessagePayload(type=%r, variant_id=%r, image=%r, staged=%r)"
% (self.type, self.variant_id, self.image, self.staged)
)
class ProductPriceDiscountsSetMessage(Message):
#: List of :class:`commercetools.types.ProductPriceDiscountsSetUpdatedPrice` `(Named` ``updatedPrices`` `in Commercetools)`
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"] = None
) -> None:
self.updated_prices = updated_prices
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductPriceDiscountsSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductPriceDiscountsSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, updated_prices=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.updated_prices,
)
)
class ProductPriceDiscountsSetMessagePayload(MessagePayload):
#: List of :class:`commercetools.types.ProductPriceDiscountsSetUpdatedPrice` `(Named` ``updatedPrices`` `in Commercetools)`
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"]
def __init__(
self,
*,
type: str = None,
updated_prices: typing.List["ProductPriceDiscountsSetUpdatedPrice"] = None
) -> None:
self.updated_prices = updated_prices
super().__init__(type="ProductPriceDiscountsSet")
def __repr__(self) -> str:
return "ProductPriceDiscountsSetMessagePayload(type=%r, updated_prices=%r)" % (
self.type,
self.updated_prices,
)
class ProductPriceExternalDiscountSetMessage(Message):
#: :class:`int` `(Named` ``variantId`` `in Commercetools)`
variant_id: int
#: Optional :class:`str` `(Named` ``variantKey`` `in Commercetools)`
variant_key: typing.Optional[str]
#: Optional :class:`str`
sku: typing.Optional[str]
#: :class:`str` `(Named` ``priceId`` `in Commercetools)`
price_id: str
#: Optional :class:`commercetools.types.DiscountedPrice`
discounted: typing.Optional["DiscountedPrice"]
#: :class:`bool`
staged: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant_id: int = None,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str = None,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool = None
) -> None:
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductPriceExternalDiscountSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductPriceExternalDiscountSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, variant_id=%r, variant_key=%r, sku=%r, price_id=%r, discounted=%r, staged=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.variant_id,
self.variant_key,
self.sku,
self.price_id,
self.discounted,
self.staged,
)
)
class ProductPriceExternalDiscountSetMessagePayload(MessagePayload):
#: :class:`int` `(Named` ``variantId`` `in Commercetools)`
variant_id: int
#: Optional :class:`str` `(Named` ``variantKey`` `in Commercetools)`
variant_key: typing.Optional[str]
#: Optional :class:`str`
sku: typing.Optional[str]
#: :class:`str` `(Named` ``priceId`` `in Commercetools)`
price_id: str
#: Optional :class:`commercetools.types.DiscountedPrice`
discounted: typing.Optional["DiscountedPrice"]
#: :class:`bool`
staged: bool
def __init__(
self,
*,
type: str = None,
variant_id: int = None,
variant_key: typing.Optional[str] = None,
sku: typing.Optional[str] = None,
price_id: str = None,
discounted: typing.Optional["DiscountedPrice"] = None,
staged: bool = None
) -> None:
self.variant_id = variant_id
self.variant_key = variant_key
self.sku = sku
self.price_id = price_id
self.discounted = discounted
self.staged = staged
super().__init__(type="ProductPriceExternalDiscountSet")
def __repr__(self) -> str:
return (
"ProductPriceExternalDiscountSetMessagePayload(type=%r, variant_id=%r, variant_key=%r, sku=%r, price_id=%r, discounted=%r, staged=%r)"
% (
self.type,
self.variant_id,
self.variant_key,
self.sku,
self.price_id,
self.discounted,
self.staged,
)
)
class ProductPublishedMessage(Message):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
#: :class:`commercetools.types.ProductProjection` `(Named` ``productProjection`` `in Commercetools)`
product_projection: "ProductProjection"
#: :class:`commercetools.types.ProductPublishScope`
scope: "ProductPublishScope"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: list = None,
product_projection: "ProductProjection" = None,
scope: "ProductPublishScope" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductPublished",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductPublishedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, removed_image_urls=%r, product_projection=%r, scope=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.removed_image_urls,
self.product_projection,
self.scope,
)
)
class ProductPublishedMessagePayload(MessagePayload):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
#: :class:`commercetools.types.ProductProjection` `(Named` ``productProjection`` `in Commercetools)`
product_projection: "ProductProjection"
#: :class:`commercetools.types.ProductPublishScope`
scope: "ProductPublishScope"
def __init__(
self,
*,
type: str = None,
removed_image_urls: list = None,
product_projection: "ProductProjection" = None,
scope: "ProductPublishScope" = None
) -> None:
self.removed_image_urls = removed_image_urls
self.product_projection = product_projection
self.scope = scope
super().__init__(type="ProductPublished")
def __repr__(self) -> str:
return (
"ProductPublishedMessagePayload(type=%r, removed_image_urls=%r, product_projection=%r, scope=%r)"
% (self.type, self.removed_image_urls, self.product_projection, self.scope)
)
class ProductRemovedFromCategoryMessage(Message):
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductRemovedFromCategory",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductRemovedFromCategoryMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, category=%r, staged=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.category,
self.staged,
)
)
class ProductRemovedFromCategoryMessagePayload(MessagePayload):
#: :class:`commercetools.types.CategoryReference`
category: "CategoryReference"
#: :class:`bool`
staged: bool
def __init__(
self,
*,
type: str = None,
category: "CategoryReference" = None,
staged: bool = None
) -> None:
self.category = category
self.staged = staged
super().__init__(type="ProductRemovedFromCategory")
def __repr__(self) -> str:
return (
"ProductRemovedFromCategoryMessagePayload(type=%r, category=%r, staged=%r)"
% (self.type, self.category, self.staged)
)
class ProductRevertedStagedChangesMessage(Message):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
removed_image_urls: list = None
) -> None:
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductRevertedStagedChanges",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductRevertedStagedChangesMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, removed_image_urls=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.removed_image_urls,
)
)
class ProductRevertedStagedChangesMessagePayload(MessagePayload):
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(self, *, type: str = None, removed_image_urls: list = None) -> None:
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductRevertedStagedChanges")
def __repr__(self) -> str:
return (
"ProductRevertedStagedChangesMessagePayload(type=%r, removed_image_urls=%r)"
% (self.type, self.removed_image_urls)
)
class ProductSlugChangedMessage(Message):
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
slug: "LocalizedString" = None
) -> None:
self.slug = slug
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductSlugChanged",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductSlugChangedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, slug=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.slug,
)
)
class ProductSlugChangedMessagePayload(MessagePayload):
#: :class:`commercetools.types.LocalizedString`
slug: "LocalizedString"
def __init__(self, *, type: str = None, slug: "LocalizedString" = None) -> None:
self.slug = slug
super().__init__(type="ProductSlugChanged")
def __repr__(self) -> str:
return "ProductSlugChangedMessagePayload(type=%r, slug=%r)" % (
self.type,
self.slug,
)
class ProductStateTransitionMessage(Message):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
state: "StateReference" = None,
force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, state=%r, force=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.state,
self.force,
)
)
class ProductStateTransitionMessagePayload(MessagePayload):
#: :class:`commercetools.types.StateReference`
state: "StateReference"
#: :class:`bool`
force: bool
def __init__(
self, *, type: str = None, state: "StateReference" = None, force: bool = None
) -> None:
self.state = state
self.force = force
super().__init__(type="ProductStateTransition")
def __repr__(self) -> str:
return "ProductStateTransitionMessagePayload(type=%r, state=%r, force=%r)" % (
self.type,
self.state,
self.force,
)
class ProductUnpublishedMessage(Message):
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None
) -> None:
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductUnpublished",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductUnpublishedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
)
)
class ProductUnpublishedMessagePayload(MessagePayload):
def __init__(self, *, type: str = None) -> None:
super().__init__(type="ProductUnpublished")
def __repr__(self) -> str:
return "ProductUnpublishedMessagePayload(type=%r)" % (self.type,)
class ProductVariantDeletedMessage(Message):
#: :class:`commercetools.types.ProductVariant`
variant: "ProductVariant"
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
variant: "ProductVariant" = None,
removed_image_urls: list = None
) -> None:
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ProductVariantDeleted",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ProductVariantDeletedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, variant=%r, removed_image_urls=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.variant,
self.removed_image_urls,
)
)
class ProductVariantDeletedMessagePayload(MessagePayload):
#: :class:`commercetools.types.ProductVariant`
variant: "ProductVariant"
#: :class:`list` `(Named` ``removedImageUrls`` `in Commercetools)`
removed_image_urls: list
def __init__(
self,
*,
type: str = None,
variant: "ProductVariant" = None,
removed_image_urls: list = None
) -> None:
self.variant = variant
self.removed_image_urls = removed_image_urls
super().__init__(type="ProductVariantDeleted")
def __repr__(self) -> str:
return (
"ProductVariantDeletedMessagePayload(type=%r, variant=%r, removed_image_urls=%r)"
% (self.type, self.variant, self.removed_image_urls)
)
class ReviewCreatedMessage(Message):
#: :class:`commercetools.types.Review`
review: "Review"
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
review: "Review" = None
) -> None:
self.review = review
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ReviewCreated",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ReviewCreatedMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, review=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.review,
)
)
class ReviewCreatedMessagePayload(MessagePayload):
#: :class:`commercetools.types.Review`
review: "Review"
def __init__(self, *, type: str = None, review: "Review" = None) -> None:
self.review = review
super().__init__(type="ReviewCreated")
def __repr__(self) -> str:
return "ReviewCreatedMessagePayload(type=%r, review=%r)" % (
self.type,
self.review,
)
class ReviewRatingSetMessage(Message):
#: Optional :class:`int` `(Named` ``oldRating`` `in Commercetools)`
old_rating: typing.Optional[int]
#: Optional :class:`int` `(Named` ``newRating`` `in Commercetools)`
new_rating: typing.Optional[int]
#: :class:`bool` `(Named` ``includedInStatistics`` `in Commercetools)`
included_in_statistics: bool
#: Optional :class:`commercetools.types.Reference`
target: typing.Optional["Reference"]
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_rating: typing.Optional[int] = None,
new_rating: typing.Optional[int] = None,
included_in_statistics: bool = None,
target: typing.Optional["Reference"] = None
) -> None:
self.old_rating = old_rating
self.new_rating = new_rating
self.included_in_statistics = included_in_statistics
self.target = target
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ReviewRatingSet",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ReviewRatingSetMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, old_rating=%r, new_rating=%r, included_in_statistics=%r, target=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.old_rating,
self.new_rating,
self.included_in_statistics,
self.target,
)
)
class ReviewRatingSetMessagePayload(MessagePayload):
#: Optional :class:`int` `(Named` ``oldRating`` `in Commercetools)`
old_rating: typing.Optional[int]
#: Optional :class:`int` `(Named` ``newRating`` `in Commercetools)`
new_rating: typing.Optional[int]
#: :class:`bool` `(Named` ``includedInStatistics`` `in Commercetools)`
included_in_statistics: bool
#: Optional :class:`commercetools.types.Reference`
target: typing.Optional["Reference"]
def __init__(
self,
*,
type: str = None,
old_rating: typing.Optional[int] = None,
new_rating: typing.Optional[int] = None,
included_in_statistics: bool = None,
target: typing.Optional["Reference"] = None
) -> None:
self.old_rating = old_rating
self.new_rating = new_rating
self.included_in_statistics = included_in_statistics
self.target = target
super().__init__(type="ReviewRatingSet")
def __repr__(self) -> str:
return (
"ReviewRatingSetMessagePayload(type=%r, old_rating=%r, new_rating=%r, included_in_statistics=%r, target=%r)"
% (
self.type,
self.old_rating,
self.new_rating,
self.included_in_statistics,
self.target,
)
)
class ReviewStateTransitionMessage(Message):
#: :class:`commercetools.types.StateReference` `(Named` ``oldState`` `in Commercetools)`
old_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``newState`` `in Commercetools)`
new_state: "StateReference"
#: :class:`bool` `(Named` ``oldIncludedInStatistics`` `in Commercetools)`
old_included_in_statistics: bool
#: :class:`bool` `(Named` ``newIncludedInStatistics`` `in Commercetools)`
new_included_in_statistics: bool
#: :class:`commercetools.types.Reference`
target: "Reference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
id: str = None,
version: int = None,
created_at: datetime.datetime = None,
last_modified_at: datetime.datetime = None,
last_modified_by: typing.Optional["LastModifiedBy"] = None,
created_by: typing.Optional["CreatedBy"] = None,
sequence_number: int = None,
resource: "Reference" = None,
resource_version: int = None,
type: str = None,
resource_user_provided_identifiers: typing.Optional[
"UserProvidedIdentifiers"
] = None,
old_state: "StateReference" = None,
new_state: "StateReference" = None,
old_included_in_statistics: bool = None,
new_included_in_statistics: bool = None,
target: "Reference" = None,
force: bool = None
) -> None:
self.old_state = old_state
self.new_state = new_state
self.old_included_in_statistics = old_included_in_statistics
self.new_included_in_statistics = new_included_in_statistics
self.target = target
self.force = force
super().__init__(
id=id,
version=version,
created_at=created_at,
last_modified_at=last_modified_at,
last_modified_by=last_modified_by,
created_by=created_by,
sequence_number=sequence_number,
resource=resource,
resource_version=resource_version,
type="ReviewStateTransition",
resource_user_provided_identifiers=resource_user_provided_identifiers,
)
def __repr__(self) -> str:
return (
"ReviewStateTransitionMessage(id=%r, version=%r, created_at=%r, last_modified_at=%r, last_modified_by=%r, created_by=%r, sequence_number=%r, resource=%r, resource_version=%r, type=%r, resource_user_provided_identifiers=%r, old_state=%r, new_state=%r, old_included_in_statistics=%r, new_included_in_statistics=%r, target=%r, force=%r)"
% (
self.id,
self.version,
self.created_at,
self.last_modified_at,
self.last_modified_by,
self.created_by,
self.sequence_number,
self.resource,
self.resource_version,
self.type,
self.resource_user_provided_identifiers,
self.old_state,
self.new_state,
self.old_included_in_statistics,
self.new_included_in_statistics,
self.target,
self.force,
)
)
class ReviewStateTransitionMessagePayload(MessagePayload):
#: :class:`commercetools.types.StateReference` `(Named` ``oldState`` `in Commercetools)`
old_state: "StateReference"
#: :class:`commercetools.types.StateReference` `(Named` ``newState`` `in Commercetools)`
new_state: "StateReference"
#: :class:`bool` `(Named` ``oldIncludedInStatistics`` `in Commercetools)`
old_included_in_statistics: bool
#: :class:`bool` `(Named` ``newIncludedInStatistics`` `in Commercetools)`
new_included_in_statistics: bool
#: :class:`commercetools.types.Reference`
target: "Reference"
#: :class:`bool`
force: bool
def __init__(
self,
*,
type: str = None,
old_state: "StateReference" = None,
new_state: "StateReference" = None,
old_included_in_statistics: bool = None,
new_included_in_statistics: bool = None,
target: "Reference" = None,
force: bool = None
) -> None:
self.old_state = old_state
self.new_state = new_state
self.old_included_in_statistics = old_included_in_statistics
self.new_included_in_statistics = new_included_in_statistics
self.target = target
self.force = force
super().__init__(type="ReviewStateTransition")
def __repr__(self) -> str:
return (
"ReviewStateTransitionMessagePayload(type=%r, old_state=%r, new_state=%r, old_included_in_statistics=%r, new_included_in_statistics=%r, target=%r, force=%r)"
% (
self.type,
self.old_state,
self.new_state,
self.old_included_in_statistics,
self.new_included_in_statistics,
self.target,
self.force,
)
)
|
from django.urls import path, include
from .user_admin import urls as user_admin_urls
app_name = "baserow_premium.api"
urlpatterns = [
path("admin/user/", include(user_admin_urls, namespace="admin_user")),
]
|
latest_block_redis_key = 'latest_block_from_chain'
latest_block_hash_redis_key = 'latest_blockhash_from_chain'
most_recent_indexed_block_redis_key = 'most_recently_indexed_block_from_db'
most_recent_indexed_block_hash_redis_key = 'most_recently_indexed_block_hash_from_db'
|
from django.core.management.base import BaseCommand
from parkings.importers import ParkingAreaImporter
class Command(BaseCommand):
help = 'Uses the ParkingAreaImporter to import parking areas.'
def add_arguments(self, parser):
parser.add_argument('geojson_file_path')
parser.add_argument('--geojson_file_url', type=str, default=None)
parser.add_argument('--srid', '-s', type=int, default=None)
parser.add_argument('--domain', '-d', type=str, default=None)
def handle(self, *, geojson_file_path, geojson_file_url=None,
srid=None, domain=None, **kwargs):
importer = ParkingAreaImporter(srid=srid, default_domain_code=domain)
importer.import_parking_areas(geojson_file_path, geojson_file_url)
|
# Generated by Django 2.1.15 on 2021-09-05 00:18
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='id_number',
field=models.PositiveIntegerField(default=2222, unique=True, validators=[django.core.validators.MinValueValidator(1000), django.core.validators.MaxValueValidator(9999)]),
preserve_default=False,
),
migrations.AddField(
model_name='user',
name='role',
field=models.CharField(choices=[('Employee', 'Employee'), ('Admin', 'Admin')], default='admin', max_length=20),
preserve_default=False,
),
]
|
from . import views
from django.conf import settings
from django.urls import path, re_path
from django.conf.urls.static import static
from rest_framework.authtoken.views import ObtainAuthToken
urlpatterns=[
#post and get urls
path('api/hood/',views.NeighborhoodList.as_view(),name='neighbor'),
path('api/business/',views.BusinessList.as_view(),name='business'),
path('api/users/',views.UserList.as_view(),name='users'),
path('api/profile/',views.ProfileList.as_view(),name='profiles'),
path('api/post/',views.PostList.as_view(),name='post'),
#update urls
path('api/update/profile/<int:pk>/',views.ProfileList.as_view(),name='update_profile'),
path('api/update/users/<int:pk>/',views.UserList.as_view(),name='update_users'),
re_path('api/update/business/(?P<pk>[0-9]+)/',views.BusinessList.as_view(),name='update_business'),
path('api/update/hood/<int:pk>/',views.NeighborhoodList.as_view(),name='update_neighbors'),
path('api/update/post/<int:pk>/',views.PostList.as_view(),name='update_post'),
#delete urls
path('api/delete/users/<int:pk>/',views.UserList.as_view(),name='delete_users'),
re_path('api/delete/hood/(?P<pk>[0-9]+)/',views.NeighborhoodList.as_view(),name='delete_neighbors'),
path('api/delete/business/<int:pk>/',views.BusinessList.as_view(),name='delete_business'),
path('api/delete/post/<int:pk>/',views.PostList.as_view(),name='delete_post'),
#search urls
path('api/business/list/<name>',views.BusinessSearchList.as_view(),name='search'),
# path('loginuser/', views.LoginUser.as_view(), name="loginuser"),
path('authlogin/', ObtainAuthToken.as_view(), name="authlogin"),
#singleitems
path('api/single-hood/<int:pk>/',views.singleHood.as_view()),
path('api/single-business/<int:pk>/',views.singleBusiness.as_view()),
path('api/single-post/<int:pk>/',views.singlePost.as_view()),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
#!/usr/bin/env python3
import time
import re
import os
import logging
import glob
import argparse
import change_dir
import abunpack
import acb2wav
__version__ = "2.2.8"
def main():
# argparse 설정
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("-o", "--output_dir", help="Master output dir", type=str, default="./")
arg_parser.add_argument("target", help="*.ab or *.acb.bytes file or folder's path", type=str, nargs="+")
args = arg_parser.parse_args()
output_dir = args.output_dir
file_list = args.target
# Logging 모듈 설정
logger = logging.getLogger("")
logger.setLevel(logging.INFO)
# 로거 포맷 설정
formatter = logging.Formatter('%(levelname)s | %(message)s')
# 핸들러 설정 + 추가
stream_hander = logging.StreamHandler()
stream_hander.setFormatter(formatter)
logger.addHandler(stream_hander)
# 파일 핸들러
if change_dir.config.getboolean("logger", "logger_save"):
file_handler = logging.FileHandler('log.txt', encoding='utf-8')
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# 추출 시작 메시지
logger.info(f"girlsfrontline-resources-extractor: {__version__}")
logger.info(f"Start Extracting: {time.strftime('%Y-%m-%d %I:%M:%S')}")
# 지원가능한 파일인지 정규표현식으로 판단하기 위한 정규식 컴파일
re_ab = re.compile(".+[.]ab")
re_acb = re.compile(".+[.]acb[.]bytes")
# 받은 파일 목록으로 반복 구문 실행
for file_dir in file_list:
# 폴더를 넣으면 폴더 내 ab, acb.bytes 파일들을 추출
if os.path.isdir(file_dir):
file_dirs = glob.glob(f"{file_dir}/*.ab") + glob.glob(f"{file_dir}/*.acb.bytes")
else:
file_dirs = [file_dir]
for fd in file_dirs:
# AssetBunle 파일 (*.ab) 인 경우
if re_ab.match(fd):
logger.info(f"\n=== AssetBundle File: {os.path.split(fd)[1]} ===")
abunpack.abunpack(fd, output_dir)
# ACB 파일 (*.acb.bytes) 인 경우
elif re_acb.match(fd):
logger.info(f"=== ACB File: {os.path.split(fd)[1]} ===")
acb2wav.acb2wav(fd, output_dir)
# 둘다 아닌 경우 로거에 경고 반환
else:
logger.warning(f"=== Unknown file: {os.path.split(fd)[1]}===")
else:
# 반복문 종료 이후
logger.info(f"Finish Extracting : {time.strftime('%Y-%m-%d %I:%M:%S')}\n\n")
return
if __name__ == "__main__":
# 시간측정용
start_time = time.time()
# 메인 함수
main()
# 시간측정 종료
print("=== 소모시간 : %s초 ===" % (time.time() - start_time))
#os.system('pause')
|
# MIT License
#
# Copyright (c) 2018-2019 Red Hat, Inc.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from typing import Optional
import gitlab
from ogr.abstract import GitUser
from ogr.exceptions import GitlabAPIException
from ogr.factory import use_for_service
from ogr.services.base import BaseGitService
from ogr.services.gitlab.project import GitlabProject
from ogr.services.gitlab.user import GitlabUser
@use_for_service("gitlab")
class GitlabService(BaseGitService):
name = "gitlab"
def __init__(self, token=None, instance_url=None, ssl_verify=True):
super().__init__(token=token)
self.instance_url = instance_url or "https://gitlab.com"
self.token = token
self.ssl_verify = ssl_verify
self._gitlab_instance = None
@property
def gitlab_instance(self) -> gitlab.Gitlab:
if not self._gitlab_instance:
self._gitlab_instance = gitlab.Gitlab(
url=self.instance_url,
private_token=self.token,
ssl_verify=self.ssl_verify,
)
if self.token:
self._gitlab_instance.auth()
return self._gitlab_instance
@property
def user(self) -> GitUser:
return GitlabUser(service=self)
def __str__(self) -> str:
token_str = (
f", token='{self.token[:1]}***{self.token[-1:]}'" if self.token else ""
)
ssl_str = ", ssl_verify=False" if not self.ssl_verify else ""
str_result = (
f"GitlabService(instance_url='{self.instance_url}'"
f"{token_str}"
f"{ssl_str})"
)
return str_result
def __eq__(self, o: object) -> bool:
if not issubclass(o.__class__, GitlabService):
return False
return (
self.token == o.token # type: ignore
and self.instance_url == o.instance_url # type: ignore
and self.ssl_verify == o.ssl_verify # type: ignore
)
def __hash__(self) -> int:
return hash(str(self))
def get_project(
self, repo=None, namespace=None, is_fork=False, **kwargs
) -> "GitlabProject":
if is_fork:
namespace = self.user.get_username()
return GitlabProject(repo=repo, namespace=namespace, service=self, **kwargs)
def get_project_from_project_id(self, iid: int) -> "GitlabProject":
gitlab_repo = self.gitlab_instance.projects.get(iid)
return GitlabProject(
repo=gitlab_repo.attributes["path"],
namespace=gitlab_repo.attributes["namespace"]["full_path"],
service=self,
gitlab_repo=gitlab_repo,
)
def change_token(self, new_token: str) -> None:
self.token = new_token
self._gitlab_instance = None
def project_create(
self,
repo: str,
namespace: Optional[str] = None,
description: Optional[str] = None,
) -> "GitlabProject":
data = {"name": repo}
if namespace:
try:
group = self.gitlab_instance.groups.get(namespace)
except gitlab.GitlabGetError:
raise GitlabAPIException(f"Group {namespace} not found.")
data["namespace_id"] = group.id
if description:
data["description"] = description
new_project = self.gitlab_instance.projects.create(data)
return GitlabProject(
repo=repo, namespace=namespace, service=self, gitlab_repo=new_project
)
|
#! /usr/bin/python
import sys, os
import pytest
from utils import *
from dynaconfig.render import *
def test_requirements_simple():
logging.info("multi-pass")
data = { 'size' : 100
,'x':
{ 'min' : -1.
, 'max' : 2.
, 'dx' : '$( (${max} - ${min}) / ${/size} )'
}
,'y':
{ 'min' : 1.
, 'max' : 5.
, 'dy' : '$( (${max} - ${min}) / ${../size} )'
}
, 'layers' : [
{'thickness' : 0.1}
,{'thickness' : 0.2}
]
, 'depth' : '$(${layers/0/thickness} + ${layers/1/thickness})'
}
rendered_data = render_tree( data )
assert rendered_data['x']['dx'] == (2.-(-1.))/100
assert rendered_data['y']['dy'] == (5.-1.)/100
assert type(rendered_data['x']['dx']) == float
assert type(rendered_data['y']['dy']) == float
@pytest.mark.skip(reason="not working with switch to fspathtree.")
def test_misdirection():
logging.info("multi-pass")
data = { 'x' : 5
,'y' : '$(2*{x})'
,'z' : '$(2*{y})'
, 'nested' : { 'x' : '$(2*${y})'
,'y' : '$(2*${z})'
,'z' : 15
}
}
rendered_data = render_tree( data )
assert rendered_data['x'] == 5
assert rendered_data['y'] == 10
assert rendered_data['z'] == 20
assert type(rendered_data['x']) == int
assert type(rendered_data['y']) == int
assert type(rendered_data['z']) == int
assert rendered_data['nested']['x'] == 60
assert rendered_data['nested']['y'] == 30
assert rendered_data['nested']['z'] == 15
def test_circular_dependency_detection():
logging.info("circular dependency")
data = { 'x' : 5
,'y' : '$(2*${z})'
,'z' : '$(2*${y})'
}
with pytest.raises(CircularDependency):
rendered_data = render_tree( data )
def test_replacement_types():
'''Test that the type of expression replacements are as expected.'''
logging.info("types")
data = { 'x' : 5
,'y' : '$(2*${x})'
,'z' : '$(2.*${x})'
,'w' : ' $(2*${x})'
,'v' : '$(2*${x}) & $(3*${x})'
,'u' : '2x = $(2*${x}) and 3x = $(3*${x}).'
}
rendered_data = render_tree( data )
assert type(rendered_data['x']) == int
assert type(rendered_data['y']) == int
assert type(rendered_data['z']) == float
assert type(rendered_data['w']) == str
assert type(rendered_data['v']) == str
assert type(rendered_data['u']) == str
assert rendered_data['x'] == 5
assert rendered_data['y'] == 10
assert rendered_data['z'] == 10.
assert rendered_data['w'] == ' 10'
assert rendered_data['v'] == '10 & 15'
assert rendered_data['u'] == '2x = 10 and 3x = 15.'
def test_math():
'''Test math module functions.'''
logging.info("math")
data = { 'x' : 5
,'y' : '$(sin(${x}))'
,'z' : '$(pi)'
,'w' : -2
,'v' : '$(abs(${w}))'
}
rendered_data = render_tree( data )
assert rendered_data['x'] == 5
assert rendered_data['y'] == Approx(math.sin(5))
assert rendered_data['z'] == Approx(3.1415)
assert rendered_data['w'] == -2
assert rendered_data['v'] == 2
@pytest.mark.skip(reason="filters have been removed for now. not sure if we need/will bring them back")
def test_expression_filters():
'''Test expression filters.'''
logging.info("filters")
data = { 'x' : 1.2345
,'y' : '$(${x} | int )'
,'z' : '$(${x} | ceil | int )'
,'w' : '$(25 | mod 4)'
}
rendered_data = render_tree( data )
assert rendered_data['x'] == Approx(1.2345)
assert rendered_data['y'] == 1
assert rendered_data['z'] == 2
assert rendered_data['w'] == 1
|
import yaml
import os.path
import re
import sys
kwarg_pattern = re.compile('<([a-zA-Z0-9_]+=?[a-zA-Z0-9_, \(\)\'\"]*)>')
def to_string(value):
if type(value) in (list, tuple):
return ",".join(map(str, value))
elif value is None:
return ""
else:
return str(value)
def indent(text, levels, pad=" "):
padding = "".join([pad] * levels)
return padding + text.replace("\n", "\n" + padding)
def isnumeric(value):
try:
float(value)
return True
except ValueError:
return False
def process_kwarg_default(value):
if value[0] + value[-1] in ("()", "[]"):
return value # default is a list or tuple, assume values were entered correctly
elif value[0] + value[-1] in ('""', "''"):
return value # value is an explicit string, return as is
elif isnumeric(value):
return str(value)
else:
return '"{:}"'.format(value) # treat as string, must have quotes to use as a kwarg default value
def parse_command_string(command_string):
args = kwarg_pattern.findall(command_string)
kwargs = list()
for arg in args:
if "=" in arg:
kwarg, val = arg.split("=")
val = process_kwarg_default(val)
else:
kwarg = arg
val = '""'
kwargs.append([kwarg, val])
kwargs_string = "".join([', ' + kwarg + "=" + val for kwarg, val in kwargs])
if len(args) > 0:
command_base = kwarg_pattern.sub("{:}", command_string)
args_string = ", ".join(kwarg for kwarg, val in kwargs)
scpi_command = 'scpi_preprocess("{:}", {:})'.format(command_base, args_string)
else:
scpi_command = '"{:}"'.format(command_string)
return kwargs_string, scpi_command
def parse_write_values_string(command_string):
"""
parse the command string for the write_values scpi command which is a little different than the others
Parameters
----------
command_string : str
the input string that will be parsed for keyword arguments
"""
args = kwarg_pattern.findall(command_string)
kwargs = list()
for arg in args:
if "=" in arg:
kwarg, val = arg.split("=")
val = process_kwarg_default(val)
else:
kwarg = arg
val = '""'
kwargs.append([kwarg, val])
kwargs[-1][1] = "None" # data_values will be set to None as default
kwargs_string = "".join([', ' + kwarg + "=" + val for kwarg, val in kwargs])
command_string = command_string.replace("<{:}>".format(args[-1]), "")
command_base = kwarg_pattern.sub("{:}", command_string)
args_string = ", ".join(kwarg for kwarg, val in kwargs[:-1]) # last arg is the data we pass in
scpi_command = 'scpi_preprocess("{:}", {:})'.format(command_base, args_string)
return kwargs_string, scpi_command, kwargs[-1][0]
def generate_set_string(command, command_root):
command_string = " ".join((command_root, to_string(command["set"]))).strip()
kwargs_string, scpi_command = parse_command_string(command_string)
if 'help' not in command:
command['help'] = 'no help available'
command['help'] = command['help'].replace('\t', ' ')
function_string = \
"""def set_{:s}(self{:}):
\"\"\"{:s}\"\"\"
scpi_command = {:}
self.write(scpi_command)""".format(command['name'], kwargs_string, command['help'], scpi_command)
return function_string
def generate_set_values_string(command, command_root):
command_string = " ".join((command_root, to_string(command["set_values"]))).strip()
kwargs_string, scpi_command, data_variable = parse_write_values_string(command_string)
if 'help' not in command:
command['help'] = 'no help available'
command['help'] = command['help'].replace('\t', ' ')
function_string = \
"""def set_{name:s}_ascii(self{args:}):
\"\"\"{help:s}\"\"\"
scpi_command = {scpi:}
self.write_ascii_values(scpi_command, {data:})
def set_{name:s}_binary(self{args:}):
\"\"\"{help:s}\"\"\"
scpi_command = {scpi:}
self.write_binary_values(scpi_command, {data:})
""".format(
name=command['name'], args=kwargs_string, help=command['help'], scpi=scpi_command, data=data_variable)
return function_string
def generate_query_string(command, command_root):
command_string = "? ".join((command_root, to_string(command["query"]))).strip()
kwargs_string, scpi_command = parse_command_string(command_string)
if 'help' not in command:
command['help'] = 'no help available'
command['help'] = command['help'].replace('\t', ' ')
converter = command.get('returns', "str")
valid_converters = ("int", "str", "float", "bool")
if converter not in valid_converters:
raise ValueError("""error in processing command {:}
returns value '{:}' is invalid
must be one of {:}
""".format(command_string, converter, ", ".join(valid_converters)))
pre_line = ""
strip_outer_quotes = bool(command.get("strip_outer_quotes", True))
csv = bool(command.get('csv', False))
if csv or strip_outer_quotes or converter != "str":
pre_line = \
"\n value = process_query(value, csv={:}, strip_outer_quotes={:}, returns='{:}')".format(
csv, strip_outer_quotes, converter
)
function_string = \
"""def query_{:s}(self{:}):
\"\"\"{:s}\"\"\"
scpi_command = {:}
value = self.query(scpi_command){:}
return value""".format(command['name'], kwargs_string, command['help'], scpi_command, pre_line)
return function_string
def generate_query_values_string(command, command_root):
command_string = "? ".join((command_root, to_string(command["query_values"]))).strip()
kwargs_string, scpi_command = parse_command_string(command_string)
if 'help' not in command:
command['help'] = 'no help available'
command['help'] = command['help'].replace('\t', ' ')
function_string = \
"""def query_{name:s}_ascii(self{args:}):
\"\"\"{help:s}\"\"\"
scpi_command = {scpi:}
return self.query_ascii_values(scpi_command)
def query_{name:s}_binary(self{args:}):
\"\"\"{help:s}\"\"\"
scpi_command = {scpi:}
return self.query_binary_values(scpi_command)
""".format(
name=command['name'], args=kwargs_string, help=command['help'], scpi=scpi_command)
return function_string
def parse_branch(branch, set_strings=None, query_strings=None, query_value_strings=None, root=""):
if set_strings is None:
set_strings = list()
if query_strings is None:
query_strings = list()
if query_value_strings is None:
query_value_strings = list()
for key, value in branch.items():
command_root = root + ":" + key
command = None
branch = None
try:
if "name" in value.keys():
command = value
elif "command" in value.keys():
command = value["command"]
branch = value["branch"]
else:
branch = value
except Exception as e:
print(key, value)
raise Exception(e)
if command:
if "set" in command.keys():
set_strings.append(generate_set_string(command, command_root))
if "set_values" in command.keys():
set_strings.append(generate_set_values_string(command, command_root))
if "query" in command.keys():
query_strings.append(generate_query_string(command, command_root))
if "query_values" in command.keys():
query_strings.append(generate_query_values_string(command, command_root))
if branch:
parse_branch(branch, set_strings, query_strings, query_value_strings, command_root)
return set_strings, query_strings
header_string = """import re
null_parameter = re.compile(",{2,}") # detect optional null parameter as two consecutive commas, and remove
converters = {
"str": str,
"int": int,
"float": float,
"bool": lambda x: bool(int(x)),
}"""
string_converter = """def to_string(value):
tval = type(value)
if tval is str:
return value
elif tval is bool:
return str(int(value))
elif tval in (list, tuple):
return ",".join(map(to_string, value))
elif value is None:
return ""
else:
return str(value)"""
scpi_preprocessor = """def scpi_preprocess(command_string, *args):
args = list(args)
for i, arg in enumerate(args):
args[i] = to_string(arg)
cmd = command_string.format(*args)
return null_parameter.sub(",", cmd)"""
query_processor = """def process_query(query, csv=False, strip_outer_quotes=True, returns="str"):
if strip_outer_quotes is True:
if query[0] + query[-1] in ('""', "''"):
query = query[1:-1]
if csv is True:
query = query.split(",")
converter = None if returns == "str" else converters.get(returns, None)
if converter:
if csv is True:
query = list(map(converter, query))
else:
query = converter(query)
return query"""
class_header = """class SCPI(object):
def __init__(self, resource):
self.resource = resource
self.echo = False # print scpi command string to scpi out
def write(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
self.resource.write(scpi, *args, **kwargs)
def query(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
return self.resource.query(scpi, *args, **kwargs)
def write_ascii_values(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
self.resource.write_ascii_values(scpi, *args, **kwargs)
def write_binary_values(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
self.resource.write_binary_values(scpi, *args, **kwargs)
def query_ascii_values(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
return self.resource.query_ascii_values(scpi, *args, **kwargs)
def query_binary_values(self, scpi, *args, **kwargs):
if self.echo:
print(scpi)
return self.resource.query_binary_values(scpi, *args, **kwargs)
"""
def parse_yaml_file(driver_yaml_file):
driver = os.path.splitext(driver_yaml_file)[0] + ".py"
driver_template = None
with open(driver_yaml_file, 'r', encoding='utf-8') as yaml_file:
driver_template = yaml.safe_load(yaml_file)
sets, queries = parse_branch(driver_template["COMMAND_TREE"])
driver_str = "\n\n\n".join((header_string, string_converter, scpi_preprocessor, query_processor)) + "\n\n\n"
driver_str += class_header
for s in sorted(sets, key=str.lower):
driver_str += "\n" + indent(s, 1) + "\n"
for q in sorted(queries, key=str.lower):
driver_str += "\n" + indent(q, 1) + "\n"
with open(driver, 'w', encoding='utf8') as scpi_driver:
scpi_driver.write(driver_str)
if __name__ == "__main__":
driver_yaml_file = os.path.abspath(sys.argv[1])
parse_yaml_file(driver_yaml_file)
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
class NodeLintIntegrationTest(PantsRunIntegrationTest):
def test_lint_success(self):
command = ['lint',
'contrib/node/examples/src/node/hello::']
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_lint_success_with_target_level_ignore(self):
path = 'contrib/node/examples/src/node/javascriptstyle-empty/index.js'
content = 'const console = require(\'console\');\nconsole.log("Double Quotes");\n'
with self.temporary_file_content(path, content, binary_mode=False):
command = ['lint',
'contrib/node/examples/src/node/javascriptstyle-empty']
pants_run = self.run_pants(command=command)
self.assert_success(pants_run)
def test_lint_failure_without_target_level_ignore(self):
path = 'contrib/node/examples/src/node/javascriptstyle-empty/not_ignored_index.js'
content = 'const console = require(\'console\');\nconsole.log("Double Quotes");\n'
with self.temporary_file_content(path, content, binary_mode=False):
command = ['lint',
'contrib/node/examples/src/node/javascriptstyle-empty']
pants_run = self.run_pants(command=command)
self.assert_failure(pants_run)
|
'''Immutable dict
'''
from collections import Mapping
__all__ = ['FrozenDict']
class FrozenDict(Mapping):
'''An immutable dict'''
def __init__(self, items):
self._m = dict()
for k, v in items.items():
self._m[k] = v
self._h = None
def __len__(self):
return len(self._m)
def __iter__(self):
return self._m.__iter__()
def __contains__(self, key):
return self._m.__contains__(key)
def __eq__(self, other):
return self._m.__eq__(other)
def __ne__(self, other):
return self._m.__ne__(other)
def __getitem__(self, key):
return self._m.__getitem__(key)
def __hash__(self):
if self._h is None:
#TODO: do this properly
mask = sum([ord(b) << (i * 8)
for (i, b) in enumerate(bytes('frzd'))])
i = sum(list(hash(k) ^ hash(v) for (k, v) in self._m.items()))
self._h = hash(i) ^ mask
return self._h
def keys(self):
return self._m.keys()
def items(self):
return self._m.items()
def values(self):
return self._m.values()
def get(self, key, default=None):
return self._m.get(key, default)
|
import pygame
WIDTH, HEIGHT = 800, 800
ROWS, COLS = 8, 8
SQUARE_SIZE = WIDTH//COLS
# RGB colours
RED = (255, 0, 0)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
BLUE = (0, 0, 255)
GREY = (128, 128, 128)
WHITE_STRING = "White"
RED_STRING = "Red"
CROWN = pygame.transform.scale(pygame.image.load('checkers/assets/crown.png'), (44, 25))
|
"""
Use: "python ...\Tools\visitor_edit.py string rootdir?".
Add auto-editor startup to SearchVisitor in an external subclass component;
Automatically pops up an editor on each file containing string as it traverses;
can also use editor='edit' or 'notepad' on Windows; to use texteditor from
later in the book, try r'python Gui\TextEditor\textEditor.py'; could also
send a search command to go to the first match on start in some editors;
"""
import os, sys
from visitor import SearchVisitor
class EditVisitor(SearchVisitor):
"""
edit files at and below startDir having string
"""
editor = r'C:\cygwin\bin\vim-nox.exe' # ymmv!
def visitmatch(self, fpathname, text):
os.system('%s %s' % (self.editor, fpathname))
if __name__ == '__main__':
visitor = EditVisitor(sys.argv[1])
visitor.run('.' if len(sys.argv) < 3 else sys.argv[2])
print('Edited %d files, visited %d' % (visitor.scount, visitor.fcount))
|
from tkinter import *
import time
from pygame import mixer
import datetime
root=Tk()
"""Define the count down function"""
def click(event):
global t
text=event.widget.cget('text')
if text=="Start":
try:
for i in range (int(t.get()),-2,-1):
if i==-1:
t.set("Times Up... \U0001F923")
e.update()
time.sleep(2)
t.set("")
break
time.sleep(1)
t.set(f"{i} Sec..")
e.update()
except Exception as f:
t.set(f)
elif text=="C":
t.set("")
pass
else:
k=t.set(t.get()+str(text))
e.update()
with open("history.txt","a") as g:
g.write(f"\nAt time {time.asctime(time.localtime(time.time()))}\nAlarm for {t.get()}seconds ")
"""define privious fn"""
def privious():
j=Toplevel(root)
with open("history.txt","r") as o:
Label(j,text=str(o.read()),font="arial 20 bold",bg="red").pack()
t=StringVar()
root.geometry("300x500")
root.maxsize(300,500)
root.title("Stop Watch")
root.configure(background="pink")
"""menu part"""
mainmenu=Menu(root,)
m1=Menu(mainmenu,tearoff=0)
m1.add_command(label="See Previous set time",command=privious)
m1.add_command(label="exit",command=exit,activebackground="black")
mainmenu.add_cascade(label="Options",menu=m1)
mainmenu.add_cascade(label="Exit",command=quit)
root.configure(menu=mainmenu)
"""entry"""
e=Entry(root,textvariable=t,font="comiSansms 30 bold",bd=2,fg="#2F4F4F",state=DISABLED)
e.pack(side=TOP,fill=X,pady=2)
"""frame works"""
f1=Frame(root,bg="pink")
f1.pack(pady=10)
f2=Frame(root,bg="pink")
f2.pack(pady=10)
f3=Frame(root,bg="pink")
f3.pack(pady=10)
f4=Frame(root,bg="pink")
f4.pack(pady=10)
"""button"""
for i in range(1,10,1):
if i<4:
b=Button(f1,text=i,width=7,height=5,font="lucida 10 bold",bg="#F0FFF0")
b.pack(side=LEFT,padx=10,pady=3)
b.bind("<Button-1>",click)
elif i<7:
b=Button(f2,text=i,width=7,height=5,font="lucida 10 bold",bg="#FFF5EE")
b.pack(side=LEFT,padx=10,pady=3)
b.bind("<Button-1>", click)
elif i<10:
b=Button(f3,text=i,width=7,height=5,font="lucida 10 bold",bg="#FFF0F5")
b.pack(side=LEFT,padx=10,pady=3)
b.bind("<Button-1>", click)
b=Button(f4,text="C",width=7,height=5,font="lucida 10 bold",bg="red")
b.pack(side=LEFT, padx=10, pady=3)
b.bind("<Button-1>",click)
b=Button(f4,text="0",width=7,height=5,font="lucida 10 bold")
b.pack(side=LEFT, padx=10, pady=3)
b.bind("<Button-1>",click)
b=Button(f4,text="Start",width=7,height=5,font="lucida 10 bold",bg="green")
b.pack(side=LEFT, padx=10, pady=3)
b.bind("<Button-1>",click)
root.mainloop()
|
import os
import sklearn.metrics
import numpy as np
import sys
import time
from . import sentence_encoder
from . import data_loader
import torch
from torch import autograd, optim, nn
from torch.autograd import Variable
from torch.nn import functional as F
# from pytorch_pretrained_bert import BertAdam
from transformers import AdamW, get_linear_schedule_with_warmup
def warmup_linear(global_step, warmup_step):
if global_step < warmup_step:
return global_step / warmup_step
else:
return 1.0
class FewShotREModel(nn.Module):
def __init__(self, sentence_encoder):
'''
sentence_encoder: Sentence encoder
You need to set self.cost as your own loss function.
'''
nn.Module.__init__(self)
self.sentence_encoder = nn.DataParallel(sentence_encoder)
self.cost = nn.CrossEntropyLoss()
def forward(self, support, query, N, K, Q):
'''
support: Inputs of the support set.
query: Inputs of the query set.
N: Num of classes
K: Num of instances for each class in the support set
Q: Num of instances for each class in the query set
return: logits, pred
'''
raise NotImplementedError
def loss(self, logits, label):
'''
logits: Logits with the size (..., class_num)
label: Label with whatever size.
return: [Loss] (A single value)
'''
N = logits.size(-1)
return self.cost(logits.view(-1, N), label.view(-1))
def accuracy(self, pred, label):
'''
pred: Prediction results with whatever size
label: Label with whatever size
return: [Accuracy] (A single value)
'''
return torch.mean((pred.view(-1) == label.view(-1)).type(torch.FloatTensor))
class FewShotREFramework:
def __init__(self, train_data_loader, val_data_loader, test_data_loader, adv_data_loader=None, adv=False, d=None):
'''
train_data_loader: DataLoader for training.
val_data_loader: DataLoader for validating.
test_data_loader: DataLoader for testing.
'''
self.train_data_loader = train_data_loader
self.val_data_loader = val_data_loader
self.test_data_loader = test_data_loader
self.adv_data_loader = adv_data_loader
self.adv = adv
if adv:
self.adv_cost = nn.CrossEntropyLoss()
self.d = d
self.d.cuda()
def __load_model__(self, ckpt):
'''
ckpt: Path of the checkpoint
return: Checkpoint dict
'''
if os.path.isfile(ckpt):
checkpoint = torch.load(ckpt)
print("Successfully loaded checkpoint '%s'" % ckpt)
return checkpoint
else:
raise Exception("No checkpoint found at '%s'" % ckpt)
def item(self, x):
'''
PyTorch before and after 0.4
'''
torch_version = torch.__version__.split('.')
if int(torch_version[0]) == 0 and int(torch_version[1]) < 4:
return x[0]
else:
return x.item()
def train(self,
model,
model_name,
B, N_for_train, N_for_eval, K, Q,
na_rate=0,
learning_rate=1e-1,
lr_step_size=20000,
weight_decay=1e-5,
train_iter=30000,
val_iter=1000,
val_step=2000,
test_iter=3000,
load_ckpt=None,
save_ckpt=None,
pytorch_optim=optim.SGD,
bert_optim=False,
warmup=True,
warmup_step=300,
grad_iter=1,
fp16=False,
pair=False,
adv_dis_lr=1e-1,
adv_enc_lr=1e-1):
'''
model: a FewShotREModel instance
model_name: Name of the model
B: Batch size
N: Num of classes for each batch
K: Num of instances for each class in the support set
Q: Num of instances for each class in the query set
ckpt_dir: Directory of checkpoints
learning_rate: Initial learning rate
lr_step_size: Decay learning rate every lr_step_size steps
weight_decay: Rate of decaying weight
train_iter: Num of iterations of training
val_iter: Num of iterations of validating
val_step: Validate every val_step steps
test_iter: Num of iterations of testing
'''
print("Start training...")
# Init
if bert_optim:
print('Use bert optim!')
parameters_to_optimize = list(model.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
parameters_to_optimize = [
{'params': [p for n, p in parameters_to_optimize
if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in parameters_to_optimize
if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(parameters_to_optimize, lr=2e-5, correct_bias=False)
if self.adv:
optimizer_encoder = AdamW(parameters_to_optimize, lr=1e-5, correct_bias=False)
scheduler = get_linear_schedule_with_warmup(optimizer, num_warmup_steps=warmup_step, num_training_steps=train_iter)
else:
optimizer = pytorch_optim(model.parameters(),
learning_rate, weight_decay=weight_decay)
if self.adv:
optimizer_encoder = pytorch_optim(model.parameters(), lr=adv_enc_lr)
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=lr_step_size)
if self.adv:
optimizer_dis = pytorch_optim(self.d.parameters(), lr=adv_dis_lr)
if load_ckpt:
state_dict = self.__load_model__(load_ckpt)['state_dict']
own_state = model.state_dict()
for name, param in state_dict.items():
if name not in own_state:
continue
own_state[name].copy_(param)
start_iter = 0
else:
start_iter = 0
if fp16:
from apex import amp
model, optimizer = amp.initialize(model, optimizer, opt_level='O1')
model.train()
if self.adv:
self.d.train()
# Training
best_acc = 0
not_best_count = 0 # Stop training after several epochs without improvement.
iter_loss = 0.0
iter_loss_dis = 0.0
iter_right = 0.0
iter_right_dis = 0.0
iter_sample = 0.0
for it in range(start_iter, start_iter + train_iter):
if pair:
batch, label = next(self.train_data_loader)
if torch.cuda.is_available():
for k in batch:
batch[k] = batch[k].cuda()
label = label.cuda()
logits, pred = model(batch, N_for_train, K,
Q * N_for_train + na_rate * Q)
else:
support, query, label = next(self.train_data_loader)
if torch.cuda.is_available():
for k in support:
support[k] = support[k].cuda()
for k in query:
query[k] = query[k].cuda()
label = label.cuda()
logits, pred = model(support, query,
N_for_train, K, Q * N_for_train + na_rate * Q)
loss = model.loss(logits, label) / float(grad_iter)
right = model.accuracy(pred, label)
if fp16:
with amp.scale_loss(loss, optimizer) as scaled_loss:
scaled_loss.backward()
# torch.nn.utils.clip_grad_norm_(amp.master_params(optimizer), 10)
else:
loss.backward()
# torch.nn.utils.clip_grad_norm_(model.parameters(), 10)
if it % grad_iter == 0:
optimizer.step()
scheduler.step()
optimizer.zero_grad()
# Adv part
if self.adv:
support_adv = next(self.adv_data_loader)
if torch.cuda.is_available():
for k in support_adv:
support_adv[k] = support_adv[k].cuda()
features_ori = model.sentence_encoder(support)
features_adv = model.sentence_encoder(support_adv)
features = torch.cat([features_ori, features_adv], 0)
total = features.size(0)
dis_labels = torch.cat([torch.zeros((total//2)).long().cuda(),
torch.ones((total//2)).long().cuda()], 0)
dis_logits = self.d(features)
loss_dis = self.adv_cost(dis_logits, dis_labels)
_, pred = dis_logits.max(-1)
right_dis = float((pred == dis_labels).long().sum()) / float(total)
loss_dis.backward(retain_graph=True)
optimizer_dis.step()
optimizer_dis.zero_grad()
optimizer_encoder.zero_grad()
loss_encoder = self.adv_cost(dis_logits, 1 - dis_labels)
loss_encoder.backward(retain_graph=True)
optimizer_encoder.step()
optimizer_dis.zero_grad()
optimizer_encoder.zero_grad()
iter_loss_dis += self.item(loss_dis.data)
iter_right_dis += right_dis
iter_loss += self.item(loss.data)
iter_right += self.item(right.data)
iter_sample += 1
if self.adv:
sys.stdout.write('step: {0:4} | loss: {1:2.6f}, accuracy: {2:3.2f}%, dis_loss: {3:2.6f}, dis_acc: {4:2.6f}'
.format(it + 1, iter_loss / iter_sample,
100 * iter_right / iter_sample,
iter_loss_dis / iter_sample,
100 * iter_right_dis / iter_sample) +'\r')
else:
if ((it+1) % 100 == 0):
print('step: {0:4} | loss: {1:2.6f}, accuracy: {2:3.2f}%'.format(it + 1, iter_loss / iter_sample, 100 * iter_right / iter_sample) +'\r')
#sys.stdout.flush()
if (it + 1) % val_step == 0:
print('-------------------------------')
print('Evaluation with evaluation data')
acc = self.eval(model, B, N_for_eval, K, Q, val_iter,
na_rate=na_rate, pair=pair)
print(' Evaluation accuracy: %f' % acc)
model.train()
if acc > best_acc:
print('Best checkpoint -> Saving checkpoint with accuracy of %f' % acc)
torch.save({'state_dict': model.state_dict()}, save_ckpt)
best_acc = acc
iter_loss = 0.
iter_loss_dis = 0.
iter_right = 0.
iter_right_dis = 0.
iter_sample = 0.
print("\n####################\n")
print("Finish training " + model_name)
def eval(self,
model,
B, N, K, Q,
eval_iter,
na_rate=0,
pair=False,
ckpt=None):
'''
model: a FewShotREModel instance
B: Batch size
N: Num of classes for each batch
K: Num of instances for each class in the support set
Q: Num of instances for each class in the query set
eval_iter: Num of iterations
ckpt: Checkpoint path. Set as None if using current model parameters.
return: Accuracy
'''
#print("")
model.eval()
if ckpt is None:
eval_dataset = self.val_data_loader
else:
state_dict = self.__load_model__(ckpt)['state_dict']
own_state = model.state_dict()
for name, param in state_dict.items():
if name not in own_state:
continue
own_state[name].copy_(param)
eval_dataset = self.test_data_loader
iter_right = 0.0
iter_sample = 0.0
with torch.no_grad():
for it in range(eval_iter):
if pair:
batch, label = next(eval_dataset)
if torch.cuda.is_available():
for k in batch:
batch[k] = batch[k].cuda()
label = label.cuda()
logits, pred = model(batch, N, K, Q * N + Q * na_rate)
else:
support, query, label = next(eval_dataset)
if torch.cuda.is_available():
for k in support:
support[k] = support[k].cuda()
for k in query:
query[k] = query[k].cuda()
label = label.cuda()
logits, pred = model(support, query, N, K, Q * N + Q * na_rate)
right = model.accuracy(pred, label)
iter_right += self.item(right.data)
iter_sample += 1
if ((it+1) % 100 == 0):
print('[EVAL] step: {0:4} | accuracy: {1:3.2f}%'.format(it + 1, 100 * iter_right / iter_sample) +'\r')
#sys.stdout.flush()
#print("")
return iter_right / iter_sample
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetPolicyAssignmentResult',
'AwaitableGetPolicyAssignmentResult',
'get_policy_assignment',
]
@pulumi.output_type
class GetPolicyAssignmentResult:
"""
The policy assignment.
"""
def __init__(__self__, display_name=None, id=None, name=None, policy_definition_id=None, scope=None, type=None):
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if policy_definition_id and not isinstance(policy_definition_id, str):
raise TypeError("Expected argument 'policy_definition_id' to be a str")
pulumi.set(__self__, "policy_definition_id", policy_definition_id)
if scope and not isinstance(scope, str):
raise TypeError("Expected argument 'scope' to be a str")
pulumi.set(__self__, "scope", scope)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
The display name of the policy assignment.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The ID of the policy assignment.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the policy assignment.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policyDefinitionId")
def policy_definition_id(self) -> Optional[str]:
"""
The ID of the policy definition.
"""
return pulumi.get(self, "policy_definition_id")
@property
@pulumi.getter
def scope(self) -> Optional[str]:
"""
The scope for the policy assignment.
"""
return pulumi.get(self, "scope")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of the policy assignment.
"""
return pulumi.get(self, "type")
class AwaitableGetPolicyAssignmentResult(GetPolicyAssignmentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPolicyAssignmentResult(
display_name=self.display_name,
id=self.id,
name=self.name,
policy_definition_id=self.policy_definition_id,
scope=self.scope,
type=self.type)
def get_policy_assignment(policy_assignment_name: Optional[str] = None,
scope: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPolicyAssignmentResult:
"""
The policy assignment.
:param str policy_assignment_name: The name of the policy assignment to get.
:param str scope: The scope of the policy assignment.
"""
__args__ = dict()
__args__['policyAssignmentName'] = policy_assignment_name
__args__['scope'] = scope
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:authorization/v20151001preview:getPolicyAssignment', __args__, opts=opts, typ=GetPolicyAssignmentResult).value
return AwaitableGetPolicyAssignmentResult(
display_name=__ret__.display_name,
id=__ret__.id,
name=__ret__.name,
policy_definition_id=__ret__.policy_definition_id,
scope=__ret__.scope,
type=__ret__.type)
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ..featuredetection import DilateMask
def test_DilateMask_inputs():
input_map = dict(
args=dict(argstr="%s",),
environ=dict(nohash=True, usedefault=True,),
inputBinaryVolume=dict(argstr="--inputBinaryVolume %s", extensions=None,),
inputVolume=dict(argstr="--inputVolume %s", extensions=None,),
lowerThreshold=dict(argstr="--lowerThreshold %f",),
outputVolume=dict(argstr="--outputVolume %s", hash_files=False,),
sizeStructuralElement=dict(argstr="--sizeStructuralElement %d",),
)
inputs = DilateMask.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_DilateMask_outputs():
output_map = dict(outputVolume=dict(extensions=None,),)
outputs = DilateMask.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
|
from tkinter import *
from speedtest import Speedtest
#Function for checking the speed
def update_text():
speed_test = Speedtest()
download = speed_test.download()
upload = speed_test.upload()
download_speed = round(download / (10**6), 2)
upload_speed = round(upload / (10**6), 2)
speed_test.get_servers([])
ping=speed_test.results.ping
down_label.config(text= "Download Speed - " + str(download_speed) + "Mbps")
up_label.config(text= "Upload Speed - " + str(upload_speed) + "Mbps")
ping_label.config(text="Ping - " + str(ping) + "ms")
root = Tk()
#Title Of Application
root.title("Internet Speed Tracker")
#Dimension of application window
root.geometry('350x350')
root.resizable(0,0)
root.config(bg="cyan")
font1=("Times",14,"bold")
font2=("Times",12,"bold","italic")
#Added the logo to application
l=PhotoImage(file="Media/logo.gif")
root.iconphoto(False,l)
#Functions for HoverButton
def on_enterbutton(r):
button.config(background="light blue",foreground="black")
def on_leavebutton(r):
button.config(background="SystemButtonFace",foreground="black")
button = Button(root, text="Get Speed", width=30,command=update_text)
button.pack(padx=20,pady=20)
button.bind('<Enter>',on_enterbutton)
button.bind('<Leave>',on_leavebutton)
labelA=Label(root,text="Click the button to check speed!!",bg="cyan",font=font2,padx=5,pady=5)
labelA.pack()
labelB=Label(root,text="Checking the speed takes time..Please wait..",bg="cyan",font=font2,padx=10,pady=10)
labelB.pack()
down_label = Label(root, text="",bg="cyan",padx=20,pady=20,font=font1)
down_label.pack()
up_label = Label(root, text="",bg="cyan",padx=20,pady=20,font=font1)
up_label.pack()
ping_label=Label(root,text="",bg="cyan",padx=20,pady=20,font=font1)
ping_label.pack()
root.mainloop()
|
from enum import Enum
from typing import List
from pydantic import BaseModel
class UpdateStatus(Enum):
upToDate = 'upToDate'
mismatch = 'mismatch'
remoteAhead = "remoteAhead"
localAhead = "localAhead"
class PackageVersionInfo(BaseModel):
version: str
fingerprint: str
class CheckUpdateResponse(BaseModel):
packageName: str
localVersionInfo: PackageVersionInfo
remoteVersionInfo: PackageVersionInfo
status: UpdateStatus
class CheckUpdatesResponse(BaseModel):
updates: List[CheckUpdateResponse]
class DownloadPackageBody(BaseModel):
packageName: str
version: str
class DownloadPackagesBody(BaseModel):
packages: List[DownloadPackageBody]
class DownloadedPackageResponse(BaseModel):
packageName: str
version: str
fingerprint: str
|
"""
Deploy code snips into swig interface files
(c) Hex-Rays
"""
from __future__ import print_function
import sys, re, os, glob
major, minor, micro, _, _ = sys.version_info
try:
from argparse import ArgumentParser
except:
print("Failed to import module 'argparse'. Upgrade to Python 2.7, copy argparse.py to this directory or try 'apt-get install python-argparse'")
raise
parser = ArgumentParser()
parser.add_argument("-t", "--template", required=True)
parser.add_argument("-o", "--output", required=True)
parser.add_argument("-m", "--module", required=True)
parser.add_argument("-w", "--pywraps", required=True)
parser.add_argument("-d", "--interface-dependencies", type=str, required=True)
parser.add_argument("-l", "--lifecycle-aware", default=False, action="store_true")
parser.add_argument("-v", "--verbose", default=False, action="store_true")
parser.add_argument("-b", "--bc695", default=False, action="store_true")
parser.add_argument("-x", "--xml-doc-directory", required=True)
args = parser.parse_args()
this_dir, _ = os.path.split(__file__)
sys.path.append(this_dir)
import doxygen_utils
typemaps = []
# generate typemaps that will have to be injected for additional checks
xml_tree = doxygen_utils.load_xml_for_module(args.xml_doc_directory, args.module, or_dummy=False)
if xml_tree is not None:
all_functions = doxygen_utils.get_toplevel_functions(xml_tree)
for fun_node in all_functions:
fun_name = doxygen_utils.get_single_child_element_text_contents(fun_node, "name")
params = []
def reg_param(*args):
params.append(args)
doxygen_utils.for_each_param(fun_node, reg_param)
def relevant_and_non_null(ptyp, desc):
if ptyp.strip().startswith("qstring"):
return False
return (desc or "").lower().find("not be null") > -1
for name, ptyp, desc in params:
if relevant_and_non_null(ptyp, desc):
# generate 'check' typemap
signature = []
body = []
for idx, tpl in enumerate(params):
name, ptyp, desc = tpl
signature.append("%s %s" % (ptyp, name or ""))
if relevant_and_non_null(ptyp, desc):
body.append("if ( $%d == NULL )" % (idx+1))
body.append(""" SWIG_exception_fail(SWIG_ValueError, "invalid null reference in method '$symname', argument $argnum of type '$%d_type'");""" % (idx+1))
pass
typemaps.append("%%typemap(check) (%s)" % ", ".join(signature))
typemaps.append("{")
typemaps.extend(body)
typemaps.append("}")
break
else:
if args.module not in ["idaapi", "idc"]:
raise Exception("Missing XML file for module '%s'" % args.module)
# creates a regular expression
def make_re(tag, module, prefix):
s = '%(p)s<%(tag)s\(%(m)s\)>(.+?)%(p)s</%(tag)s\(%(m)s\)>' % {'m': module, 'tag': tag, 'p': prefix}
return (s, re.compile(s, re.DOTALL))
def convert_path(path_in):
parts = path_in.split('/')
return os.sep.join(parts)
def apply_tags(template_str, input_str, tags, verbose, path):
for desc, (expr_str, expr) in tags:
# find source pattern
matches = expr.findall(input_str)
if not matches:
if verbose:
print("Failed to match <%s> source expression against '%s', skipping...!" % (desc, expr_str))
continue
is_695_bwcompat = desc == "pycode_BC695"
if not is_695_bwcompat:
# find pattern in destination
dest = expr.search(template_str)
if not dest:
raise Exception("Found <%s> for module '%s' in input (%s), but failed to match in destination" % (
desc, expr_str, path))
# accumulate all the strings to be replaced
replaces = []
for src in matches:
replaces.append(src)
if is_695_bwcompat:
if args.bc695:
r2 = []
for r in replaces:
rlines = r.split("\n")
rlines = map(lambda l: " %s" % l, filter(lambda l: len(l.strip()), rlines))
r2.append("\n".join(rlines))
replaces = ["", "if _BC695:"] + r2 + ["\n"]
template_str = template_str + "%pythoncode %{" + "\n".join(replaces) + "%}"
else:
template_str = template_str[:dest.start(1)] + "\n".join(replaces) + template_str[dest.end(1):]
return template_str
def deploy(module, template, output, pywraps, iface_deps, lifecycle_aware, verbose):
template = convert_path(template)
output = convert_path(output)
# read template file
with open(template) as fin:
template_str = fin.read()
# read input file(s)
all_files = glob.glob(os.path.join(pywraps, "py_%s.*" % module)) + \
glob.glob(os.path.join(pywraps, "py_%s_*.*" % module))
for path in all_files:
fname = os.path.basename(path)
tagname, _ = os.path.splitext(fname)
if verbose:
print("Considering file: '%s' (tagname: '%s')" % (path, tagname))
# create regular expressions
tags = (
('pycode', make_re('pycode', tagname, '#')),
('code', make_re('code', tagname, '//')),
('inline', make_re('inline', tagname, '//')),
('decls', make_re('decls', tagname, '//')),
('init', make_re('init', tagname, '//')),
('pycode_BC695', make_re('pycode_BC695', tagname, '#')),
)
with open(path) as fin:
input_str = fin.read()
template_str = apply_tags(template_str, input_str, tags, verbose, path)
# synthetic tags
if typemaps:
typemaps_str = "\n".join([
"//<typemaps(%s)>" % module,
"\n".join(typemaps),
"//</typemaps(%s)>" % module,
])
synth_tags = (
('typemaps', make_re('typemaps', module, '//')),
)
template_str = apply_tags(template_str, typemaps_str, synth_tags, verbose, "[generated]")
# write output file
with open(output, 'w') as f:
# f.write("""%module(docstring="IDA Plugin SDK API wrapper: {0}",directors="1",threads="1") {1}\n""".format(
# module,
# module if module == "idaapi" else "_ida_%s" % module))
f.write("""%module(docstring="IDA Plugin SDK API wrapper: {0}",directors="1",threads="1") {1}\n""".format(
module, "ida_%s" % module))
f.write("#ifndef IDA_MODULE_DEFINED\n")
f.write(""" #define IDA_MODULE_%s\n""" % module.upper())
f.write("#define IDA_MODULE_DEFINED\n")
f.write("#endif // IDA_MODULE_DEFINED\n")
for dep in [module] + iface_deps.split(","):
if len(dep):
f.write("#ifndef HAS_DEP_ON_INTERFACE_%s\n" % dep.upper())
f.write(" #define HAS_DEP_ON_INTERFACE_%s\n" % dep.upper())
f.write("#endif\n")
f.write("%include \"header.i\"\n")
f.write(template_str)
if lifecycle_aware:
f.write("""
%%init %%{
{
module_callbacks_t module_lfc;
module_lfc.closebase = ida_%s_closebase;
module_lfc.term = ida_%s_term;
register_module_lifecycle_callbacks(module_lfc);
}
%%}
""" % (module, module))
deploy(
args.module,
args.template,
args.output,
args.pywraps,
args.interface_dependencies,
args.lifecycle_aware,
args.verbose)
|
import os
import collections
import gym
import numpy as np
import joblib
import tensorflow.compat.v1 as tf
import sonnet as snt
from baselines.common.input import observation_placeholder, encode_observation
from baselines.common.policies import PolicyWithValue
from gfootball.env import football_action_set
from gfootball.env import observation_preprocessing
from gfootball.env import player_base
from gfootball.env import wrappers
def gfootball_impala_cnn_network_fn(frame):
# Convert to floats.
frame = tf.to_float(frame)
frame /= 255
with tf.variable_scope('convnet'):
conv_out = frame
conv_layers = [(16, 2), (32, 2), (32, 2), (32, 2)]
for i, (num_ch, num_blocks) in enumerate(conv_layers):
# Downscale.
conv_out = snt.Conv2D(num_ch, 3, stride=1, padding='SAME')(conv_out)
conv_out = tf.nn.pool(
conv_out,
window_shape=[3, 3],
pooling_type='MAX',
padding='SAME',
strides=[2, 2])
# Residual block(s).
for j in range(num_blocks):
with tf.variable_scope('residual_%d_%d' % (i, j)):
block_input = conv_out
conv_out = tf.nn.relu(conv_out)
conv_out = snt.Conv2D(num_ch, 3, stride=1, padding='SAME')(conv_out)
conv_out = tf.nn.relu(conv_out)
conv_out = snt.Conv2D(num_ch, 3, stride=1, padding='SAME')(conv_out)
conv_out += block_input
conv_out = tf.nn.relu(conv_out)
conv_out = snt.BatchFlatten()(conv_out)
conv_out = snt.Linear(256)(conv_out)
conv_out = tf.nn.relu(conv_out)
return conv_out
class Player(player_base.PlayerBase):
"""An agent loaded from PPO2 cnn model checkpoint."""
def __init__(self, checkpoint_path):
player_base.PlayerBase.__init__(self)
self._action_set = 'default'
self._player_prefix = 'player_0'
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
self._sess = tf.Session(config=config)
stacking = 4
self._stacker = ObservationStacker(stacking)
with tf.variable_scope(self._player_prefix):
with tf.variable_scope('ppo2_model'):
env = DummyEnv(self._action_set, stacking)
ob_space = env.observation_space
X = observation_placeholder(ob_space, batch_size=1)
extra_tensors = {}
encoded_x = X
encoded_x = encode_observation(ob_space, encoded_x)
with tf.variable_scope('pi', reuse=tf.AUTO_REUSE):
policy_latent = gfootball_impala_cnn_network_fn(encoded_x)
self._policy = PolicyWithValue(
env=env,
observations=X,
latent=policy_latent,
vf_latent=policy_latent,
sess=self._sess,
estimate_q=False,
**extra_tensors
)
_load_variables(checkpoint_path, self._sess, prefix=self._player_prefix + '/')
saver = tf.train.Saver()
saver.save(self._sess, "/home/alex/Dropbox/projects/python/kaggle/football/saved_models/11_vs_11_easy_stochastic_v2/11_vs_11_easy_stochastic_v2")
def __del__(self):
self._sess.close()
def take_action(self, observation):
assert len(observation) == 1, 'Multiple players control is not supported'
observation = observation_preprocessing.generate_smm(observation)
observation = self._stacker.get(observation)
action = self._policy.step(observation)[0][0]
actions = [action] #[football_action_set.action_set_dict[self._action_set][action]]
return actions
def reset(self):
self._stacker.reset()
def _load_variables(load_path, sess, prefix='', remove_prefix=True):
"""Loads variables from checkpoint of policy trained by baselines."""
# Forked from address below since we needed loading from different var names:
# https://github.com/openai/baselines/blob/master/baselines/common/tf_util.py
variables = [v for v in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES)
if v.name.startswith(prefix)]
loaded_params = joblib.load(load_path)
restores = []
for v in variables:
v_name = v.name[len(prefix):] if remove_prefix else v.name
restores.append(v.assign(loaded_params[v_name]))
sess.run(restores)
class ObservationStacker(object):
"""Utility class that produces stacked observations."""
def __init__(self, stacking):
self._stacking = stacking
self._data = []
def get(self, observation):
if self._data:
self._data.append(observation)
self._data = self._data[-self._stacking:]
else:
self._data = [observation] * self._stacking
return np.concatenate(self._data, axis=-1)
def reset(self):
self._data = []
class DummyEnv(object):
# We need env object to pass to build_policy, however real environment
# is not there yet.
def __init__(self, action_set, stacking):
self.action_space = gym.spaces.Discrete(
len(football_action_set.action_set_dict[action_set]))
self.observation_space = gym.spaces.Box(
0, 255, shape=[72, 96, 4 * stacking], dtype=np.uint8)
|
from grafana_snapshots.constants import (PKG_NAME, PKG_VERSION)
from setuptools import setup, find_packages
# Global variables
name = PKG_NAME
version = PKG_VERSION
requires = [
'grafana-api',
'jinja2'
]
setup(
name=name,
version=version,
description='A Python-based application to build Grafana snapshots using the Grafana API and grafana-api python interface',
long_description_content_type='text/markdown',
long_description=open('README.md', 'r').read(),
author="author",
author_email="jfpik78@gmail.com",
url="https://github.com/peekjef72/grafana-snapshots-tool",
entry_points={
'console_scripts': [
'grafana-snapshots = grafana_snapshots.cli:main'
]
},
packages=find_packages(),
install_requires=requires,
package_data={'': ['conf/*']},
)
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import InvalidNonce
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.precise import Precise
class aofex(Exchange):
def describe(self):
return self.deep_extend(super(aofex, self).describe(), {
'id': 'aofex',
'name': 'AOFEX',
'countries': ['GB'],
'rateLimit': 1000,
'has': {
'fetchMarkets': True,
'fetchCurrencies': False,
'fetchOrderBook': True,
'fetchTrades': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchOHLCV': True,
'fetchBalance': True,
'createOrder': True,
'cancelOrder': True,
'cancelAllOrders': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchClosedOrder': True,
'fetchOrderTrades': True,
'fetchTradingFee': True,
},
'timeframes': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '1hour',
'6h': '6hour',
'12h': '12hour',
'1d': '1day',
'1w': '1week',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/77670271-056d1080-6f97-11ea-9ac2-4268e9ed0c1f.jpg',
'api': {
'public': 'https://openapi.aofex.com/openApi',
'private': 'https://openapi.aofex.com/openApi',
},
'www': 'https://aofex.com',
'doc': 'https://aofex.zendesk.com/hc/en-us/sections/360005576574-API',
'fees': 'https://aofex.zendesk.com/hc/en-us/articles/360025814934-Fees-on-AOFEX',
'referral': 'https://aofex.com/#/register?key=9763840',
},
'api': {
'public': {
'get': [
'market/symbols',
'market/trade',
'market/depth',
'market/kline',
'market/precision',
'market/24kline',
'market/gears_depth',
'market/detail',
],
},
'private': {
'get': [
'entrust/currentList',
'entrust/historyList',
'entrust/rate',
'wallet/list',
'entrust/detail',
],
'post': [
'entrust/add',
'entrust/cancel',
],
},
},
'fees': {
'trading': {
'maker': 0.0019,
'taker': 0.002,
},
},
'exceptions': {
'exact': {
'20001': ExchangeError, # request failure
'20401': PermissionDenied, # no permission
'20500': ExchangeError, # system error
'20501': BadSymbol, # base symbol error
'20502': ExchangeError, # base currency error
'20503': ExchangeError, # base date error
'20504': InsufficientFunds, # account frozen balance insufficient error
'20505': BadRequest, # bad argument
'20506': AuthenticationError, # api signature not valid
'20507': ExchangeError, # gateway internal error
'20508': InvalidAddress, # ad ethereum addresss
'20509': InsufficientFunds, # order accountbalance error
'20510': InvalidOrder, # order limitorder price error
'20511': InvalidOrder, # order limitorder amount error
'20512': InvalidOrder, # order orderprice precision error
'20513': InvalidOrder, # order orderamount precision error
'20514': InvalidOrder, # order marketorder amount error
'20515': InvalidOrder, # order queryorder invalid
'20516': InvalidOrder, # order orderstate error
'20517': InvalidOrder, # order datelimit error
'50518': InvalidOrder, # order update error
'20519': InvalidNonce, # the nonce has been used
'20520': InvalidNonce, # nonce expires, please verify server time
'20521': BadRequest, # incomplete header parameters
'20522': ExchangeError, # not getting the current user
'20523': AuthenticationError, # please authenticate
'20524': PermissionDenied, # btc account lockout
'20525': AuthenticationError, # get API Key error
'20526': PermissionDenied, # no query permission
'20527': PermissionDenied, # no deal permission
'20528': PermissionDenied, # no withdrawal permission
'20529': AuthenticationError, # API Key expired
'20530': PermissionDenied, # no permission
},
'broad': {
},
},
'options': {
'fetchBalance': {
'show_all': '0', # '1' to show zero balances
},
},
'commonCurrencies': {
'CPC': 'Consensus Planet Coin',
},
})
def fetch_markets(self, params={}):
markets = self.publicGetMarketSymbols(params)
#
# {
# errno: 0,
# errmsg: 'success',
# result: [
# {
# id: 2,
# symbol: 'BTC-USDT',
# base_currency: 'BTC',
# quote_currency: 'USDT',
# min_size: 0.00008,
# max_size: 1300,
# min_price: 1000,
# max_price: 110000,
# maker_fee: 1,
# taker_fee: 1,
# isHot: null,
# isNew: null,
# crown: null
# },
# ]
# }
#
precisions = self.publicGetMarketPrecision()
#
# {
# errno: 0,
# errmsg: 'success',
# result: {
# 'MANA-USDT': {
# amount: '2',
# minQuantity: '32',
# maxQuantity: '46000000',
# price: '4',
# minPrice: '0.003',
# maxPrice: '0.35'
# },
# }
# }
#
precisions = self.safe_value(precisions, 'result', {})
markets = self.safe_value(markets, 'result', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
numericId = self.safe_integer(market, 'id')
precision = self.safe_value(precisions, id, {})
makerFee = self.safe_number(market, 'maker_fee')
takerFee = self.safe_number(market, 'taker_fee')
result.append({
'id': id,
'numericId': numericId,
'symbol': symbol,
'baseId': baseId,
'quoteId': quoteId,
'base': base,
'quote': quote,
'active': None,
'maker': makerFee / 1000,
'taker': takerFee / 1000,
'precision': {
'amount': self.safe_integer(precision, 'amount'),
'price': self.safe_integer(precision, 'price'),
},
'limits': {
'amount': {
'min': self.safe_number(market, 'min_size'),
'max': self.safe_number(market, 'max_size'),
},
'price': {
'min': self.safe_number(market, 'min_price'),
'max': self.safe_number(market, 'max_price'),
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# id: 1584950100,
# amount: "329.196",
# count: 81,
# open: "0.021155",
# close: "0.021158",
# low: "0.021144",
# high: "0.021161",
# vol: "6.963557767"
# }
#
return [
self.safe_timestamp(ohlcv, 'id'),
self.safe_number(ohlcv, 'open'),
self.safe_number(ohlcv, 'high'),
self.safe_number(ohlcv, 'low'),
self.safe_number(ohlcv, 'close'),
self.safe_number(ohlcv, 'amount'),
]
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
if limit is None:
limit = 150 # default 150, max 2000
request = {
'symbol': market['id'],
'period': self.timeframes[timeframe],
'size': limit, # default 150, max 2000
}
response = self.publicGetMarketKline(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: "success",
# result: {
# ts: 1584950139003,
# symbol: "ETH-BTC",
# period: "1min",
# data: [
# {
# id: 1584950100,
# amount: "329.196",
# count: 81,
# open: "0.021155",
# close: "0.021158",
# low: "0.021144",
# high: "0.021161",
# vol: "6.963557767"
# },
# {
# id: 1584950040,
# amount: "513.265",
# count: 151,
# open: "0.021165",
# close: "0.021155",
# low: "0.021151",
# high: "0.02118",
# vol: "10.862806573"
# },
# ]
# }
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_ohlcvs(data, market, since, limit)
def fetch_balance(self, params={}):
self.load_markets()
options = self.safe_value(self.options, 'fetchBalance', {})
showAll = self.safe_value(options, 'show_all', '0')
request = {
# 'currency': 'BTC',
'show_all': showAll, # required to show zero balances
}
response = self.privateGetWalletList(self.extend(request, params))
#
# {
# "errno": 0,
# "errmsg": "success",
# "result": [
# {"available": "0", "frozen": "0", "currency": "BTC"}
# ]
# }
#
result = {'info': response}
balances = self.safe_value(response, 'result', [])
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(balance, 'available')
account['used'] = self.safe_string(balance, 'frozen')
result[code] = account
return self.parse_balance(result, False)
def fetch_trading_fee(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.privateGetEntrustRate(self.extend(request, params))
#
# {
# "errno":0,
# "errmsg":"success",
# "result": {
# "toFee":"0.002","fromFee":"0.002"
# }
# }
#
result = self.safe_value(response, 'result', {})
return {
'info': response,
'symbol': symbol,
'maker': self.safe_number(result, 'fromFee'),
'taker': self.safe_number(result, 'toFee'),
}
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
request = {
'symbol': self.market_id(symbol),
}
response = self.publicGetMarketDepth(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: "success",
# result: {
# buyType: 1,
# sellType: 1,
# ts: 1584950701050,
# symbol: "ETH-BTC",
# asks: [
# ["0.021227", "0.182"],
# ["0.021249", "0.035"],
# ["0.021253", "0.058"],
# ],
# bids: [
# ["0.021207", "0.039"],
# ["0.021203", "0.051"],
# ["0.02117", "2.326"],
# ]
# }
# }
#
result = self.safe_value(response, 'result', {})
timestamp = self.safe_integer(result, 'ts')
return self.parse_order_book(result, timestamp)
def parse_ticker(self, ticker, market=None):
#
# fetchTicker
#
# {
# id: 1584890087,
# amount: '150032.919',
# count: 134538,
# open: '0.021394',
# close: '0.021177',
# low: '0.021053',
# high: '0.021595',
# vol: '3201.72451442'
# }
#
timestamp = self.safe_timestamp(ticker, 'id')
symbol = None
if market:
symbol = market['symbol']
open = self.safe_number(ticker, 'open')
last = self.safe_number(ticker, 'close')
change = None
if symbol is not None:
change = float(self.price_to_precision(symbol, last - open))
else:
change = last - open
average = self.sum(last, open) / 2
percentage = change / open * 100
baseVolume = self.safe_number(ticker, 'amount')
quoteVolume = self.safe_number(ticker, 'vol')
vwap = self.vwap(baseVolume, quoteVolume)
if vwap is not None:
vwap = float(self.price_to_precision(symbol, vwap))
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': vwap,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': change,
'percentage': percentage,
'average': average,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
ids = self.market_ids(symbols)
request['symbol'] = ','.join(ids)
response = self.publicGetMarket24kline(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: "success",
# result: [
# {
# symbol: "HB-AQ",
# data: {
# id: 1584893403,
# amount: "4753751.243400354852648809",
# count: 4724,
# open: "6.3497",
# close: "6.3318",
# low: "6.011",
# high: "6.5",
# vol: "29538384.7873528796542891343493"
# }
# },
# ]
# }
#
tickers = self.safe_value(response, 'result', [])
result = {}
for i in range(0, len(tickers)):
marketId = self.safe_string(tickers[i], 'symbol')
market = self.safe_market(marketId, None, '-')
symbol = market['symbol']
data = self.safe_value(tickers[i], 'data', {})
result[symbol] = self.parse_ticker(data, market)
return self.filter_by_array(result, 'symbol', symbols)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.publicGetMarketDetail(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: 'success',
# result: {
# id: 1584890087,
# amount: '150032.919',
# count: 134538,
# open: '0.021394',
# close: '0.021177',
# low: '0.021053',
# high: '0.021595',
# vol: '3201.72451442'
# }
# }
#
result = self.safe_value(response, 'result', {})
return self.parse_ticker(result, market)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# id: 1584948803298490,
# amount: "2.737",
# price: "0.021209",
# direction: "sell",
# ts: 1584948803
# }
#
# fetchOrder trades
#
# {
# "id":null,
# "ctime":"2020-03-23 20:07:17",
# "price":"123.9",
# "number":"0.010688626311541565",
# "total_price":"1.324320799999999903",
# "fee":"0.000021377252623083"
# }
#
id = self.safe_string(trade, 'id')
ctime = self.parse8601(self.safe_string(trade, 'ctime'))
timestamp = self.safe_timestamp(trade, 'ts', ctime) - 28800000 # 8 hours, adjust to UTC
symbol = None
if (symbol is None) and (market is not None):
symbol = market['symbol']
side = self.safe_string(trade, 'direction')
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string_2(trade, 'amount', 'number')
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.safe_number(trade, 'total_price')
if cost is None:
cost = self.parse_number(Precise.string_mul(priceString, amountString))
feeCost = self.safe_number(trade, 'fee')
fee = None
if feeCost is not None:
feeCurrencyCode = None
if market is not None:
if side == 'buy':
feeCurrencyCode = market['base']
elif side == 'sell':
feeCurrencyCode = market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': None,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.publicGetMarketTrade(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: "success",
# result: {
# symbol: "ETH-BTC",
# ts: 1584948805439,
# data: [
# {
# id: 1584948803300883,
# amount: "0.583",
# price: "0.021209",
# direction: "buy",
# ts: 1584948803
# },
# {
# id: 1584948803298490,
# amount: "2.737",
# price: "0.021209",
# direction: "sell",
# ts: 1584948803
# },
# ]
# }
# }
#
result = self.safe_value(response, 'result', {})
data = self.safe_value(result, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_order_status(self, status):
statuses = {
'1': 'open',
'2': 'open', # partially filled
'3': 'closed',
'4': 'canceled', # canceling
'5': 'canceled', # partially canceled
'6': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# createOrder
#
# {order_sn: 'BM7442641584965237751ZMAKJ5'}
#
# fetchOpenOrders, fetchClosedOrders
#
# {
# "order_sn": "BL74426415849672087836G48N1",
# "symbol": "ETH-USDT",
# "ctime": "2020-03-23 20:40:08",
# "type": 2,
# "side": "buy",
# "price": "90", # None for market orders
# "number": "0.1",
# "total_price": "9.0", # 0 for market orders
# "deal_number": null,
# "deal_price": null,
# "status": 1,
# }
#
# fetchOrder
#
# {
# order_sn: 'BM7442641584965237751ZMAKJ5',
# symbol: 'ETH-USDT',
# ctime: '2020-03-23 20:07:17',
# type: 1,
# side: 'buy',
# price: '0',
# number: '10',
# total_price: '0',
# deal_number: '0.080718626311541565',
# deal_price: '123.890000000000000000',
# status: 3,
# # the trades field is injected by fetchOrder
# trades: [
# {
# id: null,
# ctime: '2020-03-23 20:07:17',
# price: '123.9',
# number: '0.010688626311541565',
# total_price: '1.324320799999999903',
# fee: '0.000021377252623083'
# }
# ]
# }
#
id = self.safe_string(order, 'order_sn')
orderStatus = self.safe_string(order, 'status')
status = self.parse_order_status(orderStatus)
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market, '-')
timestamp = self.parse8601(self.safe_string(order, 'ctime'))
if timestamp is not None:
timestamp -= 28800000 # 8 hours, adjust to UTC
orderType = self.safe_string(order, 'type')
type = 'limit' if (orderType == '2') else 'market'
side = self.safe_string(order, 'side')
# amount = self.safe_number(order, 'number')
# price = self.safe_number(order, 'price')
cost = None
price = None
amount = None
average = None
number = self.safe_number(order, 'number')
totalPrice = self.safe_number(order, 'total_price')
if type == 'limit':
amount = number
price = self.safe_number(order, 'price')
else:
average = self.safe_number(order, 'deal_price')
if side == 'buy':
amount = self.safe_number(order, 'deal_number')
else:
amount = number
fee = None
trades = None
filled = None
feeCost = None
remaining = None
lastTradeTimestamp = None
# all orders except new orders and canceled orders
if (orderStatus != '1') and (orderStatus != '6'):
rawTrades = self.safe_value(order, 'trades')
if rawTrades is not None:
for i in range(0, len(rawTrades)):
rawTrades[i]['direction'] = side
trades = self.parse_trades(rawTrades, market, None, None, {
'symbol': market['symbol'],
'order': id,
'side': side,
'type': type,
})
tradesLength = len(trades)
if tradesLength > 0:
firstTrade = trades[0]
feeCost = firstTrade['fee']['cost']
lastTradeTimestamp = firstTrade['timestamp']
filled = firstTrade['amount']
cost = firstTrade['cost']
for i in range(1, len(trades)):
trade = trades[i]
feeCost = self.sum(feeCost, trade['fee']['cost'])
filled = self.sum(filled, trade['amount'])
cost = self.sum(cost, trade['cost'])
lastTradeTimestamp = max(lastTradeTimestamp, trade['timestamp'])
if amount is not None:
filled = min(amount, filled)
if filled > 0:
average = cost / filled
if feeCost is not None:
feeCurrencyCode = market['base'] if (side == 'buy') else market['quote']
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
else:
filled = 0
cost = 0
if cost is None:
if type == 'limit':
cost = totalPrice
elif side == 'buy':
cost = number
if filled is None:
if (type == 'limit') and (orderStatus == '3'):
filled = amount
if filled is not None:
if amount is not None:
remaining = max(amount - filled, 0)
return {
'info': order,
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': lastTradeTimestamp,
'status': status,
'symbol': market['symbol'],
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'cost': cost,
'average': average,
'amount': amount,
'filled': filled,
'remaining': remaining,
'trades': trades,
'fee': fee,
}
def fetch_closed_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'order_sn': id,
}
response = self.privateGetEntrustDetail(self.extend(request, params))
#
# {
# "errno": 0,
# "errmsg": "success",
# "result": {
# "trades": [
# {
# "id":null,
# "ctime":"2020-03-23 20:07:17",
# "price":"123.9",
# "number":"0.010688626311541565",
# "total_price":"1.324320799999999903",
# "fee":"0.000021377252623083"
# },
# ],
# "entrust":{
# "order_sn":"BM7442641584965237751ZMAKJ5",
# "symbol":"ETH-USDT",
# "ctime":"2020-03-23 20:07:17",
# "type":1,
# "side":"buy",
# "price":"0",
# "number":"10",
# "total_price":"0",
# "deal_number":"0.080718626311541565",
# "deal_price":"123.890000000000000000",
# "status":3
# }
# }
# }
#
result = self.safe_value(response, 'result', {})
trades = self.safe_value(result, 'trades', [])
order = self.safe_value(result, 'entrust', {})
order['trades'] = trades
return self.parse_order(order)
def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
response = self.fetch_closed_order(id, symbol, params)
return self.safe_value(response, 'trades', [])
def fetch_orders_with_method(self, method, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
# 'from': 'BM7442641584965237751ZMAKJ5', # query start order_sn
'direct': 'prev', # next
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if limit is not None:
request['limit'] = limit # default 20, max 100
response = getattr(self, method)(self.extend(request, params))
#
# {
# "errno": 0,
# "errmsg": "success",
# "result": [
# {
# "order_sn": "BL74426415849672087836G48N1",
# "symbol": "ETH-USDT",
# "ctime": "2020-03-23 20:40:08",
# "type": 2,
# "side": "buy",
# "price": "90",
# "number": "0.1",
# "total_price": "9.0",
# "deal_number": null,
# "deal_price": null,
# "status": 1,
# }
# ]
# }
#
result = self.safe_value(response, 'result', [])
return self.parse_orders(result, market, since, limit)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('privateGetEntrustCurrentList', symbol, since, limit, params)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_with_method('privateGetEntrustHistoryList', symbol, since, limit, params)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
orderType = side + '-' + type
request = {
'symbol': market['id'],
'type': orderType,
}
if type == 'limit':
request['amount'] = self.amount_to_precision(symbol, amount)
request['price'] = self.price_to_precision(symbol, price)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
cost = amount
if createMarketBuyOrderRequiresPrice:
if price is not None:
cost = amount * price
else:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument")
precision = market['precision']['price']
request['amount'] = self.decimal_to_precision(cost, TRUNCATE, precision, self.precisionMode)
else:
request['amount'] = self.amount_to_precision(symbol, amount)
response = self.privatePostEntrustAdd(self.extend(request, params))
#
# {
# errno: 0,
# errmsg: 'success',
# result: {order_sn: 'BM7442641584965237751ZMAKJ5'}
# }
#
result = self.safe_value(response, 'result', {})
order = self.parse_order(result, market)
timestamp = self.milliseconds()
return self.extend(order, {
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'amount': amount,
'price': price,
'type': type,
'side': side,
})
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
request = {
'order_ids': id,
}
response = self.privatePostEntrustCancel(self.extend(request, params))
#
# {
# "errno": 0,
# "errmsg": "success",
# "result": {
# "success": ["avl12121", "bl3123123"],
# "failed": ["sd24564", "sdf6564564"]
# }
# }
#
result = self.safe_value(response, 'result', {})
success = self.safe_value(result, 'success', [])
if not self.in_array(id, success):
raise OrderNotFound(self.id + ' order id ' + id + ' not found in successfully canceled orders: ' + self.json(response))
timestamp = None
return {
'info': response,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': 'canceled',
'symbol': symbol,
'type': None,
'side': None,
'price': None,
'cost': None,
'average': None,
'amount': None,
'filled': None,
'remaining': None,
'trades': None,
'fee': None,
'clientOrderId': None,
}
def cancel_all_orders(self, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelAllOrders() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = self.privatePostEntrustCancel(self.extend(request, params))
#
# {
# "errno": 0,
# "errmsg": "success",
# "result": {
# "success": ["avl12121", "bl3123123"],
# "failed": ["sd24564", "sdf6564564"]
# }
# }
#
return response
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/' + path
keys = list(params.keys())
keysLength = len(keys)
if api == 'public':
if keysLength > 0:
url += '?' + self.urlencode(params)
else:
nonce = str(self.nonce())
uuid = self.uuid()
randomString = uuid[0:5]
nonceString = nonce + '_' + randomString
auth = {}
auth[self.apiKey] = self.apiKey
auth[self.secret] = self.secret
auth[nonceString] = nonceString
for i in range(0, keysLength):
key = keys[i]
auth[key] = key + '=' + params[key]
keysorted = self.keysort(auth)
stringToSign = ''
keys = list(keysorted.keys())
for i in range(0, len(keys)):
key = keys[i]
stringToSign += keysorted[key]
signature = self.hash(self.encode(stringToSign), 'sha1')
headers = {
'Nonce': nonceString,
'Token': self.apiKey,
'Signature': signature,
}
if method == 'POST':
headers['Content-Type'] = 'application/x-www-form-urlencoded'
if keysLength > 0:
body = self.urlencode(params)
else:
if keysLength > 0:
url += '?' + self.urlencode(params)
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
#
# {"errno":20501,"errmsg":"base symbol error"}
#
error = self.safe_string(response, 'errno')
if (error is not None) and (error != '0'):
message = self.safe_string(response, 'errmsg')
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], error, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
raise ExchangeError(feedback) # unknown message
|
import pytest
import tardis.montecarlo.montecarlo_numba.numba_interface as numba_interface
import numpy.testing as npt
import numpy as np
@pytest.mark.parametrize(
["current_shell_id", "nu"],
[(0, 0.6), (1, 0.4)]
)
def test_coninuum_opacities(
verysimple_continuum, current_shell_id, nu):
continuum = verysimple_continuum
continuum.calculate(nu, current_shell_id)
print(current_shell_id, nu)
print(continuum.chi_bf_tot)
print(continuum.chi_bf_contributions)
print(continuum.current_continuua)
print(continuum.x_sect_bfs)
print(continuum.chi_ff)
@pytest.mark.parametrize(
"current_shell_id",
[0, 1, 2]
)
def test_continuum_free_free_sampler(
verysimple_continuum, current_shell_id):
continuum = verysimple_continuum
nu = continuum.sample_nu_free_free(current_shell_id)
print(nu)
@pytest.mark.parametrize(
["current_shell_id", "continuum_id"],
[(0, 0), (1, 0)]
)
def test_coninuum_opacities(
verysimple_continuum, current_shell_id, continuum_id):
continuum = verysimple_continuum
nu = continuum.sample_nu_free_bound(current_shell_id, continuum_id)
print(nu)
@pytest.mark.parametrize("input_params", ["scatter", "macroatom", "downbranch"])
def test_numba_plasma_initialize(nb_simulation_verysimple, input_params):
line_interaction_type = input_params
plasma = nb_simulation_verysimple.plasma
actual = numba_interface.numba_plasma_initialize(
plasma, line_interaction_type
)
npt.assert_allclose(
actual.electron_density, plasma.electron_densities.values
)
npt.assert_allclose(actual.line_list_nu, plasma.atomic_data.lines.nu.values)
npt.assert_allclose(actual.tau_sobolev, plasma.tau_sobolevs.values)
if line_interaction_type == "scatter":
empty = np.zeros(1, dtype=np.int64)
npt.assert_allclose(
actual.transition_probabilities, np.zeros((1, 1), dtype=np.float64)
)
npt.assert_allclose(actual.line2macro_level_upper, empty)
npt.assert_allclose(actual.macro_block_references, empty)
npt.assert_allclose(actual.transition_type, empty)
npt.assert_allclose(actual.destination_level_id, empty)
npt.assert_allclose(actual.transition_line_id, empty)
else:
npt.assert_allclose(
actual.transition_probabilities,
plasma.transition_probabilities.values,
)
npt.assert_allclose(
actual.line2macro_level_upper,
plasma.atomic_data.lines_upper2macro_reference_idx,
)
npt.assert_allclose(
actual.macro_block_references,
plasma.atomic_data.macro_atom_references["block_references"].values,
)
npt.assert_allclose(
actual.transition_type,
plasma.atomic_data.macro_atom_data["transition_type"].values,
)
npt.assert_allclose(
actual.destination_level_id,
plasma.atomic_data.macro_atom_data["destination_level_idx"].values,
)
npt.assert_allclose(
actual.transition_line_id,
plasma.atomic_data.macro_atom_data["lines_idx"].values,
)
@pytest.mark.xfail(reason="To be implemented")
def test_configuration_initialize():
assert False
def test_VPacketCollection_set_properties(verysimple_3vpacket_collection):
assert verysimple_3vpacket_collection.length == 0
nus = [3.0e15, 0.0, 1e15, 1e5]
energies = [0.4, 0.1, 0.6, 1e10]
initial_mus = [.1, 0, 1, .9]
initial_rs = [3e42, 4.5e45, 0, 9.0e40]
last_interaction_in_nus = np.array(
[3.0e15, 0.0, 1e15, 1e5], dtype=np.float64
)
last_interaction_types = np.array([1, 1, 3, 2], dtype=np.int64)
last_interaction_in_ids = np.array([100, 0, 1, 1000], dtype=np.int64)
last_interaction_out_ids = np.array([1201, 123, 545, 1232], dtype=np.int64)
for (
nu,
energy,
initial_mu,
initial_r,
last_interaction_in_nu,
last_interaction_type,
last_interaction_in_id,
last_interaction_out_id,
) in zip(
nus,
energies,
initial_mus,
initial_rs,
last_interaction_in_nus,
last_interaction_types,
last_interaction_in_ids,
last_interaction_out_ids,
):
verysimple_3vpacket_collection.set_properties(
nu,
energy,
initial_mu,
initial_r,
last_interaction_in_nu,
last_interaction_type,
last_interaction_in_id,
last_interaction_out_id,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.nus[
: verysimple_3vpacket_collection.idx
],
nus,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.energies[
: verysimple_3vpacket_collection.idx
],
energies,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.initial_mus[
: verysimple_3vpacket_collection.idx
],
initial_mus,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.initial_rs[
: verysimple_3vpacket_collection.idx
],
initial_rs,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.last_interaction_in_nu[
: verysimple_3vpacket_collection.idx
],
last_interaction_in_nus,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.last_interaction_type[
: verysimple_3vpacket_collection.idx
],
last_interaction_types,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.last_interaction_in_id[
: verysimple_3vpacket_collection.idx
],
last_interaction_in_ids,
)
npt.assert_array_equal(
verysimple_3vpacket_collection.last_interaction_out_id[
: verysimple_3vpacket_collection.idx
],
last_interaction_out_ids,
)
assert verysimple_3vpacket_collection.length == 9
|
# YOLOv5 🚀 by Ultralytics, GPL-3.0 license
"""
Image augmentation functions
"""
import math
import random
import cv2
import numpy as np
from utils.general import LOGGER, check_version, colorstr, resample_segments, segment2box
from utils.metrics import bbox_ioa
class Albumentations:
# YOLOv5 Albumentations class (optional, only used if package is installed)
def __init__(self):
self.transform = None
try:
import albumentations as A
check_version(A.__version__, '1.0.3', hard=True) # version requirement
self.transform = A.Compose([
A.Blur(p=0.01),
A.MedianBlur(p=0.01),
A.ToGray(p=0.01),
A.CLAHE(p=0.01),
A.RandomBrightnessContrast(p=0.0),
A.RandomGamma(p=0.0),
A.ImageCompression(quality_lower=75, p=0.0)],
bbox_params=A.BboxParams(format='yolo', label_fields=['class_labels']))
LOGGER.info(colorstr('albumentations: ') + ', '.join(f'{x}' for x in self.transform.transforms if x.p))
except ImportError: # package not installed, skip
pass
except Exception as e:
LOGGER.info(colorstr('albumentations: ') + f'{e}')
def __call__(self, im, labels, p=1.0):
if self.transform and random.random() < p:
new = self.transform(image=im, bboxes=labels[:, 1:], class_labels=labels[:, 0]) # transformed
im, labels = new['image'], np.array([[c, *b] for c, b in zip(new['class_labels'], new['bboxes'])])
return im, labels
def augment_hsv(im, hgain=0.5, sgain=0.5, vgain=0.5):
# HSV color-space augmentation
if hgain or sgain or vgain:
r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains
hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV))
dtype = im.dtype # uint8
x = np.arange(0, 256, dtype=r.dtype)
lut_hue = ((x * r[0]) % 180).astype(dtype)
lut_sat = np.clip(x * r[1], 0, 255).astype(dtype)
lut_val = np.clip(x * r[2], 0, 255).astype(dtype)
im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val)))
cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed
def hist_equalize(im, clahe=True, bgr=False):
# Equalize histogram on BGR image 'im' with im.shape(n,m,3) and range 0-255
yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV)
if clahe:
c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))
yuv[:, :, 0] = c.apply(yuv[:, :, 0])
else:
yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram
return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB
def replicate(im, labels):
# Replicate labels
h, w = im.shape[:2]
boxes = labels[:, 1:].astype(int)
x1, y1, x2, y2 = boxes.T
s = ((x2 - x1) + (y2 - y1)) / 2 # side length (pixels)
for i in s.argsort()[:round(s.size * 0.5)]: # smallest indices
x1b, y1b, x2b, y2b = boxes[i]
bh, bw = y2b - y1b, x2b - x1b
yc, xc = int(random.uniform(0, h - bh)), int(random.uniform(0, w - bw)) # offset x, y
x1a, y1a, x2a, y2a = [xc, yc, xc + bw, yc + bh]
im[y1a:y2a, x1a:x2a] = im[y1b:y2b, x1b:x2b] # im4[ymin:ymax, xmin:xmax]
labels = np.append(labels, [[labels[i, 0], x1a, y1a, x2a, y2a]], axis=0)
return im, labels
def letterbox(im, new_shape=(640, 640), color=(114, 114, 114), auto=True, scaleFill=False, scaleup=True, stride=32):
# Resize and pad image while meeting stride-multiple constraints
shape = im.shape[:2] # current shape [height, width]
if isinstance(new_shape, int):
new_shape = (new_shape, new_shape)
# Scale ratio (new / old)
r = min(new_shape[0] / shape[0], new_shape[1] / shape[1])
if not scaleup: # only scale down, do not scale up (for better val mAP)
r = min(r, 1.0)
# Compute padding
ratio = r, r # width, height ratios
new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r))
dw, dh = new_shape[1] - new_unpad[0], new_shape[0] - new_unpad[1] # wh padding
if auto: # minimum rectangle
dw, dh = np.mod(dw, stride), np.mod(dh, stride) # wh padding
elif scaleFill: # stretch
dw, dh = 0.0, 0.0
new_unpad = (new_shape[1], new_shape[0])
ratio = new_shape[1] / shape[1], new_shape[0] / shape[0] # width, height ratios
dw /= 2 # divide padding into 2 sides
dh /= 2
if shape[::-1] != new_unpad: # resize
im = cv2.resize(im, new_unpad, interpolation=cv2.INTER_LINEAR)
top, bottom = int(round(dh - 0.1)), int(round(dh + 0.1))
left, right = int(round(dw - 0.1)), int(round(dw + 0.1))
im = cv2.copyMakeBorder(im, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) # add border
return im, ratio, (dw, dh)
def random_perspective(im, targets=(), segments=(), degrees=10, translate=.1, scale=.1, shear=10, perspective=0.0,
border=(0, 0)):
# torchvision.transforms.RandomAffine(degrees=(-10, 10), translate=(0.1, 0.1), scale=(0.9, 1.1), shear=(-10, 10))
# targets = [cls, xyxy]
height = im.shape[0] + border[0] * 2 # shape(h,w,c)
width = im.shape[1] + border[1] * 2
# Center
C = np.eye(3)
C[0, 2] = -im.shape[1] / 2 # x translation (pixels)
C[1, 2] = -im.shape[0] / 2 # y translation (pixels)
# Perspective
P = np.eye(3)
P[2, 0] = random.uniform(-perspective, perspective) # x perspective (about y)
P[2, 1] = random.uniform(-perspective, perspective) # y perspective (about x)
# Rotation and Scale
R = np.eye(3)
a = random.uniform(-degrees, degrees)
# a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations
s = random.uniform(1 - scale, 1 + scale)
# s = 2 ** random.uniform(-scale, scale)
R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s)
# Shear
S = np.eye(3)
S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg)
S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg)
# Translation
T = np.eye(3)
T[0, 2] = random.uniform(0.5 - translate, 0.5 + translate) * width # x translation (pixels)
T[1, 2] = random.uniform(0.5 - translate, 0.5 + translate) * height # y translation (pixels)
# Combined rotation matrix
M = T @ S @ R @ P @ C # order of operations (right to left) is IMPORTANT
if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed
if perspective:
im = cv2.warpPerspective(im, M, dsize=(width, height), borderValue=(114, 114, 114))
else: # affine
im = cv2.warpAffine(im, M[:2], dsize=(width, height), borderValue=(114, 114, 114))
# Visualize
# import matplotlib.pyplot as plt
# ax = plt.subplots(1, 2, figsize=(12, 6))[1].ravel()
# ax[0].imshow(im[:, :, ::-1]) # base
# ax[1].imshow(im2[:, :, ::-1]) # warped
# Transform label coordinates
n = len(targets)
if n:
use_segments = any(x.any() for x in segments)
new = np.zeros((n, 4))
if use_segments: # warp segments
segments = resample_segments(segments) # upsample
for i, segment in enumerate(segments):
xy = np.ones((len(segment), 3))
xy[:, :2] = segment
xy = xy @ M.T # transform
xy = xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2] # perspective rescale or affine
# clip
new[i] = segment2box(xy, width, height)
else: # warp boxes
xy = np.ones((n * 4, 3))
xy[:, :2] = targets[:, [1, 2, 3, 4, 1, 4, 3, 2]].reshape(n * 4, 2) # x1y1, x2y2, x1y2, x2y1
xy = xy @ M.T # transform
xy = (xy[:, :2] / xy[:, 2:3] if perspective else xy[:, :2]).reshape(n, 8) # perspective rescale or affine
# create new boxes
x = xy[:, [0, 2, 4, 6]]
y = xy[:, [1, 3, 5, 7]]
new = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T
# clip
new[:, [0, 2]] = new[:, [0, 2]].clip(0, width)
new[:, [1, 3]] = new[:, [1, 3]].clip(0, height)
# filter candidates
i = box_candidates(box1=targets[:, 1:5].T * s, box2=new.T, area_thr=0.01 if use_segments else 0.10)
targets = targets[i]
targets[:, 1:5] = new[i]
return im, targets
def copy_paste(im, labels, segments, p=0.5):
# Implement Copy-Paste augmentation https://arxiv.org/abs/2012.07177, labels as nx5 np.array(cls, xyxy)
n = len(segments)
if p and n:
h, w, c = im.shape # height, width, channels
im_new = np.zeros(im.shape, np.uint8)
for j in random.sample(range(n), k=round(p * n)):
l, s = labels[j], segments[j]
box = w - l[3], l[2], w - l[1], l[4]
ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area
if (ioa < 0.30).all(): # allow 30% obscuration of existing labels
labels = np.concatenate((labels, [[l[0], *box]]), 0)
segments.append(np.concatenate((w - s[:, 0:1], s[:, 1:2]), 1))
cv2.drawContours(im_new, [segments[j].astype(np.int32)], -1, (255, 255, 255), cv2.FILLED)
result = cv2.bitwise_and(src1=im, src2=im_new)
result = cv2.flip(result, 1) # augment segments (flip left-right)
i = result > 0 # pixels to replace
# i[:, :] = result.max(2).reshape(h, w, 1) # act over ch
im[i] = result[i] # cv2.imwrite('debug.jpg', im) # debug
return im, labels, segments
def cutout(im, labels, p=0.5):
# Applies image cutout augmentation https://arxiv.org/abs/1708.04552
if random.random() < p:
h, w = im.shape[:2]
scales = [0.5] * 1 + [0.25] * 2 + [0.125] * 4 + [0.0625] * 8 + [0.03125] * 16 # image size fraction
for s in scales:
mask_h = random.randint(1, int(h * s)) # create random masks
mask_w = random.randint(1, int(w * s))
# box
xmin = max(0, random.randint(0, w) - mask_w // 2)
ymin = max(0, random.randint(0, h) - mask_h // 2)
xmax = min(w, xmin + mask_w)
ymax = min(h, ymin + mask_h)
# apply random color mask
im[ymin:ymax, xmin:xmax] = [random.randint(64, 191) for _ in range(3)]
# return unobscured labels
if len(labels) and s > 0.03:
box = np.array([xmin, ymin, xmax, ymax], dtype=np.float32)
ioa = bbox_ioa(box, labels[:, 1:5]) # intersection over area
labels = labels[ioa < 0.60] # remove >60% obscured labels
return labels
def mixup(im, labels, im2, labels2):
# Applies MixUp augmentation https://arxiv.org/pdf/1710.09412.pdf
r = np.random.beta(32.0, 32.0) # mixup ratio, alpha=beta=32.0
im = (im * r + im2 * (1 - r)).astype(np.uint8)
labels = np.concatenate((labels, labels2), 0)
return im, labels
def box_candidates(box1, box2, wh_thr=2, ar_thr=100, area_thr=0.1, eps=1e-16): # box1(4,n), box2(4,n)
# Compute candidate boxes: box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio
w1, h1 = box1[2] - box1[0], box1[3] - box1[1]
w2, h2 = box2[2] - box2[0], box2[3] - box2[1]
ar = np.maximum(w2 / (h2 + eps), h2 / (w2 + eps)) # aspect ratio
return (w2 > wh_thr) & (h2 > wh_thr) & (w2 * h2 / (w1 * h1 + eps) > area_thr) & (ar < ar_thr) # candidates
|
import numpy
from scipy.integrate import trapz
from skipi.function import Function, evaluate
from skipi.domain import Domain
class FourierTransform(Function):
@classmethod
def to_function(cls, domain, feval, frequency_domain):
#TODO: frequency_domain as Domain?
#domain = Domain.from_domain(domain)
#frequency_domain = Domain.from_domain(frequency_domain)
#dom = domain.get()
#freq_dom = frequency_domain.get()
w = numpy.array(frequency_domain).reshape((len(frequency_domain), 1))
feval = evaluate(domain, feval)
t_domain = numpy.array(domain).reshape((1, len(domain)))
integrand = feval * numpy.exp(- 1j * numpy.dot(w, t_domain))
F = trapz(integrand, dx=Domain.get_dx(domain))
return Function.to_function(frequency_domain, F)
@classmethod
def from_function(cls, frequency_domain, fun: Function):
return cls.to_function(fun.get_domain(), fun.get_function(), frequency_domain)
class InverseFourierTransform(Function):
@classmethod
def to_function(cls, frequency_domain, feval, x_domain):
# TODO: frequency_domain as Domain?
w = numpy.array(x_domain).reshape((len(x_domain), 1))
domain = numpy.array(frequency_domain).reshape((1, len(frequency_domain)))
feval = evaluate(domain, feval)
integrand = feval * numpy.exp(1j * numpy.dot(w, domain))
F = 1 / (2 * numpy.pi) * trapz(integrand, dx=Domain.get_dx(frequency_domain))
return Function.to_function(x_domain, F)
@classmethod
def from_function(cls, x_domain, fun: Function):
return cls.to_function(fun.get_domain(), fun.get_function(), x_domain)
class InverseCosineTransform(InverseFourierTransform):
@classmethod
def to_function(cls, frequency_domain, feval, x_domain):
dx = Domain.get_dx(frequency_domain)
w = numpy.array(x_domain).reshape((len(x_domain), 1))
domain = numpy.array(frequency_domain).reshape((1, len(frequency_domain)))
feval = evaluate(domain, feval)
F = 1 / (numpy.pi) * trapz(feval * numpy.cos(numpy.dot(w, domain)), dx=dx)
return Function.to_function(x_domain, F)
class CosineTransform(FourierTransform):
@classmethod
def to_function(cls, frequency_domain, feval, x_domain):
dx = Domain.get_dx(frequency_domain)
w = numpy.array(x_domain).reshape((len(x_domain), 1))
domain = numpy.array(frequency_domain).reshape((1, len(frequency_domain)))
feval = evaluate(domain, feval)
F = trapz(feval * numpy.cos(numpy.dot(w, domain)), dx=dx)
return Function.to_function(x_domain, F)
def fourier_matrix(t_space, f_space):
# Important, otherwise t_space changes outside the function
t_space = numpy.array(t_space)
dt = t_space[1] - t_space[0]
if dt == 0:
raise RuntimeError("Given t_space has an incorrect format")
f = numpy.array(f_space).reshape((len(f_space), 1))
t = numpy.array(t_space).reshape((1, len(t_space)))
f_t_matrix = numpy.dot(f, t)
e_matrix = numpy.exp(-1j * f_t_matrix)
# this is kinda the weighting of the trapezoidal integration rule
e_matrix[:, 0] *= 0.5
e_matrix[:, -1] *= 0.5
return e_matrix * dt
def invfourier_matrix(f_space, t_space):
r"""
Returns a matrix representing a inverse fourier transform.
Let :math:`R \colon f_space \to RR` (RR being the real numbers) be a function. The inverse fourier
transform is then defined as
..math::
F^{-1}[R](t) = \int_{RR}{e^{itf} R(f) df}
This can be represented by a matrix multiplication. For this, assume you want to evaluate the inverse
fourier transform :math:`F^-1[R] at t \in t_space` and you know the function R at :math:`f \in f_space`.
Then this function returns a matrix A with the following properties
..math::
F^{-1}[R] = A * R
with * denoting the usual matrix-vector-product (i.e. numpy.dot) and the resulting vector will be
evaluated exactly at :math:`t \in t_space`, i.e. :math:`(A*R)[i] = F^{-1}[R](t_space[i])`
..note::
More details in the case of reflectometry: The resulting matrix A looks like the following
with k (wavevector transfer) being f and x (depth) being t:
k
----------▶
| ikx
x | e
|
▼
Thus :math:`A * R(k) = F[R](x) = V(x)`
:Example:
>>> x_space = numpy.linspace(0, 200, 200)
>>> k_space = numpy.linspace(-0.1, 0.1, 400)
>>> # R ... being the reflection evaluated at k_space, i.e. R = R_function(k_space)
>>> A = invfourier_matrix(k_space, x_space)
>>> V = numpy.dot(A, R) # V being evaluated at x_space, i.e. V = V_function(x_space)
with R, V being the reflection and potential function, respectively.
:param f_space: frequency space. The space where the function to do the inverse fourier transform is
known. Has to be equidistantly spaced.
:param t_space: time space. The space where the inverse fourier transform shall be evaluated.
:return: A matrix representing the inverse fourier transform
:raises:
RuntimeError: If the spacing of f_space is zero, i.e. delta f_space = 0, df = 0.
"""
# Important, otherwise f_space changes outside the function
f_space = numpy.array(f_space)
df = f_space[1] - f_space[0]
if df == 0:
raise RuntimeError("Given f_space has an incorrect format")
f = numpy.array(f_space).reshape((1, len(f_space)))
t = numpy.array(t_space).reshape((len(t_space), 1))
t_f_matrix = numpy.dot(t, f)
e_matrix = numpy.exp(1j * t_f_matrix)
# this is kinda the weighting of the trapezoidal integration rule
e_matrix[:, 0] *= 0.5
e_matrix[:, -1] *= 0.5
return 1 / (2 * numpy.pi) * e_matrix * df
|
#!/usr/bin/env python3
class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
if not nums:
return -1
low = 0
high = len(nums) - 1
while low <= high:
mid = (low + high) / 2
if target == nums[mid]:
return mid
if nums[low] <= nums[mid]:
if nums[low] <= target <= nums[mid]:
high = mid - 1
else:
low = mid + 1
else:
if nums[mid] <= target <= nums[high]:
low = mid + 1
else:
high = mid - 1
return -1
if __name__ == "__main__":
s = Solution()
nums = [4, 5, 6, 7, 0, 1, 2]
target = 0
result = s.search(nums, target)
print(result)
|
from qtpy.QtWidgets import QWidget, QSizePolicy
class FormBaseWidget(QWidget):
def __init__(self):
super().__init__()
self.setMaximumWidth(400)
self.setMinimumWidth(400)
sp = self.sizePolicy()
sp.setVerticalPolicy(QSizePolicy.Minimum)
self.setSizePolicy(sp)
|
#: Max number of units of distance a ship can travel in a turn
MAX_SPEED = 7.0
#: Radius of a ship
SHIP_RADIUS = 0.5
#: Starting health of ship, also its max
MAX_SHIP_HEALTH = 255
#: Starting health of ship, also its max
BASE_SHIP_HEALTH = 255
#: Starting health of ship, also its max
TURNS_TO_DESTROY_SHIP = 4
#: Weapon cooldown period
WEAPON_COOLDOWN = 1
#: Weapon damage radius
WEAPON_RADIUS = 5.0
#: Weapon damage
WEAPON_DAMAGE = 64
#: Radius in which explosions affect other entities
EXPLOSION_RADIUS = 10.0
#: Distance from the edge of the planet at which ships can try to dock
DOCK_RADIUS = 4.0
#: Number of turns it takes to dock a ship
DOCK_TURNS = 5
#: Number of production units per turn contributed by each docked ship
BASE_PRODUCTIVITY = 6
#: Number of production units to build a ship
PRODUCTION_UNITS_PER_SHIP = 72
#: Number of turns to build one ship by a single ship
BASE_PRODUCTIVITY_TURNS = 12
#: Distance from the planets edge at which new ships are created
SPAWN_RADIUS = 2.0
#: Distance between ships under which check for collisions
COLLISION_VICINITY = 5.0
|
import os
import sys
import yaml
import warnings
import cftime
import calendar
import numpy as np
import pandas as pd
import xarray as xr
import matplotlib.pyplot as plt
import util
start_date = '1998-12-01'
end_date = '2020-03-01'
southern_ocean_stn_list = ['CRZ', 'MQA', 'DRP', 'PSA', 'SYO', 'CYA', 'MAA', 'HBA',]# 'BHD', 'CGO']
southern_ocean_stn_list_sf6 = ['HBA', 'SYO', 'PSA', 'DRP', 'CRZ', 'USH']
southern_ocean_records = [
'HBA_NOAA_flask_CO2',
'SYO_NOAA_flask_CO2',
'SYO_TU_insitu_CO2',
'MAA_CSIRO_flask_CO2',
'CYA_CSIRO_flask_CO2',
'PSA_NOAA_flask_CO2',
'PSA_SIO_O2_flask_CO2',
'DRP_NOAA_flask_CO2',
'MQA_CSIRO_insitu_CO2',
'CRZ_NOAA_flask_CO2',
]
# 'BHD_NIWA_insitu_CO2',
# 'CGO_CSIRO_insitu_CO2'
for rec in southern_ocean_records:
assert rec[:3] in southern_ocean_stn_list, 'SO records and SO stn list mismatch'
spo_records = [
'SPO_NOAA_insitu_CO2',
'SPO_NOAA_flask_CO2',
'SPO_SIO_O2_flask_CO2',
'SPO_SIO_CDK_flask_CO2',
'SPO_CSIRO_flask_CO2'
]
reference_records = dict(
CO2=['SPO_NOAA_insitu_CO2',],
SF6=['SPO_NOAA_flask_SF6',],
)
attrs = dict(
CO2=dict(
long_name='CO$_2$',
units='ppm'
),
SF6=dict(
long_name='SF$_6$',
units='ppt'
),
)
def data_files(c, model=None):
"""get data file for station data"""
model = 'obs' if model is None else model
if model == 'obs':
assert c in ['CO2', 'SF6']
if c == 'SF6':
file = 'data/surface-obs/SO_SF6_monthly.txt'
else:
file = 'data/surface-obs/SO_CO2_monthly.txt'
else:
with open('data/model-description.yaml', 'r') as fid:
model_paths = yaml.safe_load(fid)[model]['obs_data_paths']
if c in model_paths:
key = c
else:
print(c)
print(model)
print(model_paths)
return
file = f'{model_paths[key]}/SO_CO2_monthly.txt'
assert os.path.exists(file), f'missing {file}'
return file
def get_stn_info(constituent='CO2'):
if constituent=='CO2':
file_info = 'data/surface-obs/SO_CO2_locations.yml'
elif constituent=='SF6':
file_info = 'data/surface-obs/SO_SF6_locations.yml'
else:
raise ValueError(f'unknown constituent: {constituent}')
with open(file_info, 'r') as fid:
df = pd.DataFrame(yaml.safe_load(fid)).transpose().drop('include', axis=1)
df = df.rename(columns={'stncode': 'stn'})
df.index = [f'{s}_{constituent}' for s in df.index]
df['constituent'] = constituent
assert df.index.is_unique, (
'non-unique index'
)
return df
def read_stndata(file):
if '_CO2_' in file:
constituent = 'CO2'
elif '_SF6_' in file:
constituent = 'SF6'
else:
raise ValueError('unknown constituent')
df = pd.read_csv(file, header=0, sep='\s+', na_values=[9.96921e+42, 'NA'])
day = lambda row_year_mon: util.eomday(row_year_mon[0], row_year_mon[1]) / 2
df['day'] = df[['year', 'month']].apply(day, axis=1)
df['date'] = pd.to_datetime(df[['year', 'month', 'day']])
df['year_frac'] = util.year_frac(df.year.to_numpy(), df.month.to_numpy(), df.day.to_numpy())
df['polar_year'] = df.year.where(df.month <= 6, df.year + 1)
time_cols = ['date', 'year', 'month', 'day', 'year_frac', 'polar_year']
stn_cols = list(set(df.columns) - set(time_cols))
df = df[time_cols+stn_cols]
df = df.loc[(start_date <= df.date) & (df.date <= end_date)]
stn_cols = list(set(df.columns) - set(time_cols))
df = df.rename({s: f'{s}_{constituent}' for s in stn_cols}, axis=1)
return df.set_index('date')
def to_dataset(
stninfo, stndata, constituent,
station_list=[], plot_coverage=True, dropna=True, unique_stn=True,
gap_fill=False,
):
"""make an xarray dataset from dataframe"""
if not station_list:
station_list = list(stninfo.index[stninfo.constituent==constituent])
station_list = list(filter(lambda s: s in stndata.columns, station_list))
# sort by latitude
lat = stninfo.loc[station_list].lat.to_numpy(dtype=np.float)
I = np.argsort(lat)
station_list = np.array(station_list)[I]
data = stndata[station_list]
info = stninfo.loc[station_list]
if unique_stn:
info = info.set_index('stn')
record_or_stn = 'stn'
else:
record_or_stn = 'record'
def visualize_coverage(data):
"""make a plot of data coverage"""
plt.figure(figsize=(15, 6))
plt.pcolormesh(
data.index,
np.arange(0, len(info)+1, 1),
np.where(np.isnan(data.to_numpy()), np.nan, 1).T,
)
plt.yticks(np.arange(0, len(info), 1)+0.5);
plt.gca().set_yticklabels(info.index);
plt.grid(True)
plt.grid(True, axis='x', which='minor')
# apply gap filling procedure
if gap_fill:
data_filled = data.interpolate(method='linear', axis=0, limit=1, limit_direction='backward')
else:
data_filled = data
if plot_coverage:
visualize_coverage(data)
plt.suptitle(f'{constituent} Station Coverage', fontsize=16, fontweight='bold');
visualize_coverage(data_filled)
plt.suptitle(f'{constituent} Station Coverage (gap filled)', fontsize=16, fontweight='bold');
plt.show()
# drop NaNs
if dropna:
data_filled = data_filled.dropna(axis=0)
# assemble DataArray
return xr.DataArray(
data_filled.to_numpy(),
dims=('time', record_or_stn),
coords={
'time': xr.DataArray(data_filled.index.values, dims=('time')),
'year_frac': xr.DataArray(stndata.year_frac.to_numpy().astype(np.float), dims=('time')),
record_or_stn: xr.DataArray(info.index, dims=(record_or_stn)),
'institution': xr.DataArray(info.institution, dims=(record_or_stn)),
'lat': xr.DataArray(info.lat.to_numpy().astype(np.float), dims=(record_or_stn)),
'lon': xr.DataArray(info.lon.to_numpy().astype(np.float), dims=(record_or_stn)),
'stncode': xr.DataArray(info.stn, dims=(record_or_stn)),
},
name=constituent,
attrs=attrs[constituent]
)
def open_surface_co2_data(model, tracer):
"""return a dataset of station data"""
if model == 'obs':
return open_surface_data_obs(tracer)
# TODO: clean up this mess!
if 'TM5' in model and tracer == 'CO2':
tracer = 'CO2_SUM'
if '+' in tracer or tracer == 'CO2_SUM':
tracers = ['CO2_OCN', 'CO2_LND', 'CO2_FFF', 'CO2_BKG'] if tracer == 'CO2_SUM' else tracer.split('+')
file = data_files(tracers[0], model)
das_srf = to_dataset(
get_stn_info('CO2'),
read_stndata(file),
'CO2',
plot_coverage=False, dropna=False, unique_stn=False, gap_fill=False,
)
for subt in tracers[1:]:
file = data_files(subt, model)
das_srf += to_dataset(
get_stn_info('CO2'),
read_stndata(file),
'CO2',
plot_coverage=False, dropna=False, unique_stn=False, gap_fill=False,
)
else:
file = data_files(tracer, model)
das_srf = to_dataset(
get_stn_info('CO2'),
read_stndata(file),
'CO2',
plot_coverage=False, dropna=False, unique_stn=False, gap_fill=False,
)
# swap MQA_CSIRO_flask_CO2 for MQA_CSIRO_insitu_CO2
assert 'MQA_CSIRO_flask_CO2' in das_srf.record
assert 'MQA_CSIRO_insitu_CO2' not in das_srf.record
ndx = np.where(das_srf.record == 'MQA_CSIRO_flask_CO2')[0]
record = das_srf.record.copy()
record.values[ndx] = 'MQA_CSIRO_insitu_CO2'
das_srf['record'] = record
return das_srf
def open_surface_data_obs(constituent='CO2'):
"""return a dataset surface-station observations"""
constituent = constituent.upper()
assert constituent in ['CO2', 'SF6'], f'unknown constituent {constituent}'
stninfo = get_stn_info(constituent)
df = read_stndata(data_files(constituent))
ds = to_dataset(
stninfo, df, constituent,
plot_coverage=False,
dropna=False,
unique_stn=False,
gap_fill=False,
)
# fill gaps in NOAA in situ SPO record
if constituent == 'CO2':
idx_record = np.where(ds.record == 'SPO_NOAA_insitu_CO2')[0]
idx_time = np.append(
np.where(ds.time == np.datetime64('2001-01-15'))[0],
np.where(ds.time == np.datetime64('2001-02-14'))[0]
)
idx_time_edges = np.append(
idx_time[0]-1,
idx_time[-1]+1,
)
gapfill_values = np.interp(
ds.time.isel(time=idx_time),
ds.time.isel(time=idx_time_edges),
ds.isel(time=idx_time_edges, record=idx_record).squeeze()
)
ds[idx_time, idx_record] = gapfill_values[:, None]
return ds
def filter_outliers(da, verbose=False, return_index=False):
da_mean = da.mean('time')
da_std = da.std('time')
keep = (da_mean - 3 * da_std <= da) & (da <= da_mean + 3 * da_std)
if verbose:
print('-'*80)
print('filtering outliers: n points removed')
for record in da.record.values:
n = da.sel(record=record).notnull().sum().values
n_removed = n - keep.sel(record=record).sum().values
print(f'\t{record}: {n_removed}, ({100. * n_removed/n:0.2f}%)')
print('-'*80)
if return_index:
return keep
else:
return da.where(keep)
def seasonal_uncertainty(das_srf, season=None, verbose=False):
# compute difference of SPO records from SPO mean
das_spo_a = das_srf.sel(record=spo_records) - das_srf.sel(record=spo_records).median('record')
seasons = ['DJF', 'MAM', 'JJA', 'SON']
stn_errors = []
for season in seasons:
das_spo_a_djf = util.ann_mean(das_spo_a.to_dataset(), season=season, time_bnds_varname=None, n_req=2)
stn_errors.append(np.float(das_spo_a_djf.CO2.mean('time').std('record', ddof=1).values))
stn_error = max(stn_errors)
n_stn = len(southern_ocean_records)
n_rep = 2 # there are two stations with co-located records
obs_gradient_std = np.sqrt(
stn_error**2 +
(n_stn - n_rep) * stn_error**2 / n_stn**2 +
(2. * n_rep * stn_error**2) / ((2. * n_stn)**2)
)
if verbose:
print('-'*60)
print(f'n_stn = {n_stn}; n_rep = {n_rep}')
print(f'stn_error = {stn_error:0.4f} ppm')
print([f'{s}: {e:0.4f}' for s, e in zip(seasons, stn_errors)])
print('-'*60)
print(f'SO-SPO seasonal gradient error = {obs_gradient_std:0.4f} ppm')
print('-'*60)
return obs_gradient_std
def compute_DCO2y(da_srf, season):
"""compute the gradient metric from monthly surface data"""
warnings.filterwarnings(action='ignore', message='Mean of empty slice')
if season in ['DJF', 'MAM', 'JJA', 'SON']:
ds = util.ann_mean(da_srf.to_dataset(), season=season, time_bnds_varname=None)
ds['time'] = ds.time + util.season_yearfrac[season]
elif season in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]:
months = (da_srf.time.values.astype('datetime64[M]').astype(int) % 12 + 1).astype(int)
ndx = np.where(months == season)[0]
ds = da_srf.isel(time=ndx).to_dataset()
ds['time'] = util.year_frac(*util.datetime64_parts(ds.time))
for rec in southern_ocean_records:
assert rec in ds.record, f'missing {rec}'
assert 'SPO_NOAA_insitu_CO2' in ds.record, "missing 'SPO_NOAA_insitu_CO2'"
return (
(ds.sel(record=southern_ocean_records).groupby('stncode').mean('record') -
ds.sel(record='SPO_NOAA_insitu_CO2')).mean('stncode')
)
|
# coding=utf-8
# Copyright 2018-2020 EVA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import sys
from os.path import dirname, abspath, join
'''
To allow running eva_server from any location
'''
THIS_DIR = dirname(__file__)
EVA_CODE_DIR = abspath(join(THIS_DIR, '..'))
sys.path.append(EVA_CODE_DIR)
from eva.server.interpreter import start_cmd_client # noqa: E402
from eva.utils.logging_manager import LoggingManager, \
LoggingLevel # noqa: E402
def eva_client(host='0.0.0.0', port=5432):
"""
Start the eva system
"""
# Launch server
try:
start_cmd_client(host=host, port=port)
except Exception as e:
LoggingManager().log(e, LoggingLevel.CRITICAL)
def parse_args(args):
parser = argparse.ArgumentParser(description='')
parser.add_argument('-H', '--host', dest='host', type=str,
help='Host address for EVA server', default='0.0.0.0')
parser.add_argument('-P', '--port', dest='port', type=int,
help='Port for EVA server', default=5432)
return parser.parse_args(args)
def main():
args = parse_args(sys.argv[1:])
eva_client(host=args.host, port=args.port)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
from shoop.apps import AppConfig
class PugConfig(AppConfig):
name = "shoop_pugme"
provides = {
"admin_module": [
"shoop_pugme.admin_module:PugAdminModule"
]
}
|
'''
Author: Yiwen Ding <dyiwen@umich.edu>
Date: May 2, 2021
'''
import csv
import os
from io import StringIO
from pdfminer3.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer3.converter import TextConverter
from pdfminer3.layout import LAParams
from pdfminer3.pdfpage import PDFPage
def retrieve_url(remote_url: str, local_path: str):
"""
Saves a URL to a local path. Can handle cookies, e.g., those
used downloading PDFs from MIT Press (TACL, CL).
:param remote_url: The URL to download from. Currently supports http only.
:param local_path: Where to save the file to.
"""
outdir = os.path.dirname(local_path)
if not os.path.exists(outdir):
os.makedirs(outdir)
if remote_url.startswith("http"):
import ssl
import urllib.request
cookieProcessor = urllib.request.HTTPCookieProcessor()
opener = urllib.request.build_opener(cookieProcessor)
request = urllib.request.Request(
remote_url, headers={
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A'}
)
try:
with opener.open(request, timeout=1000) as url, open(local_path, mode="wb") as input_file_fh:
try:
input_file_fh.write(url.read())
except ConnectionResetError:
return False
return True
except urllib.error.HTTPError:
return False
else:
shutil.copyfile(remote_url, local_path)
return True
def getOverlappingLink(annotationList, element):
for (x0, y0, x1, y1), url in annotationList:
if x0 > element.x1 or element.x0 > x1:
continue
if y0 > element.y1 or element.y0 > y1:
continue
return url
else:
return None
def get_pdf_email(pdf_path, author_name):
pagenums = set()
output = StringIO()
manager = PDFResourceManager()
converter = TextConverter(manager, output, laparams=LAParams())
interpreter = PDFPageInterpreter(manager, converter)
infile = open(pdf_path, 'rb')
email_address = ""
for page in PDFPage.get_pages(infile, pagenums):
try:
interpreter.process_page(page)
break
except AttributeError:
return email_address
infile.close()
converter.close()
text = output.getvalue()
output.close()
keywords = ['@']
possible_emails = []
for each_line in text.split('\n'):
if '@' in each_line:
possible_emails.append(each_line)
keywords = author_name.split(' ')
for email_addr in possible_emails:
if any(x in email_addr for x in keywords):
email_address = email_addr
elif "firstname" in email_addr.lower():
email_address = email_addr.lower().replace("firstname", keywords[0])
if "lastname" in email_address:
email_address = email_address.replace("lastname", keywords[-1])
if "}" in email_address:
prefix = email_address.split('}')[0]
service = email_address.split('}')[1]
if "," in service:
temp = service.split(",")[0]
email_list = service.split(",")[1:-1]
service = temp
elif ";" in service:
temp = service.split(";")[0]
email_list = service.split(";")[1:-1]
service = temp
else:
email_list = service
if "{" in prefix:
prefix = prefix.replace('{', ' ')
if "," in prefix:
for each_prefix in prefix.split(','):
# if "{" in each_prefix:
# each_prefix = each_prefix.replace('{','')
if any(x in each_prefix for x in keywords):
email_address = each_prefix + service
return email_address
elif "|" in prefix:
for each_prefix in prefix.split('|'):
# if "{" in each_prefix:
# each_prefix = each_prefix.replace('{','')
if any(x in each_prefix for x in keywords):
email_address = each_prefix + service
return email_address
else:
if any(x in prefix for x in keywords):
email_address = prefix + service
return email_address
for each_one in email_list:
if any(x in each_one for x in keywords):
email_address = each_one
return email_address
if ',' in email_address:
for each_one in email_address.split(','):
if any(x in each_one for x in keywords):
email_address = each_one
return email_address
def retrieve_email():
info_list = []
with open('junior_authors_n_papers.csv') as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
if line_count != 0:
author_info = {}
author_info['id'] = row[0]
author_info['name'] = row[1]
author_info['url'] = [row[2], row[3], row[4]]
info_list.append(author_info)
line_count += 1
if line_count > 3600:
break
output_list = []
for each_author in info_list:
author_name = each_author['name'].lower()
count = 0
for each_link in each_author['url']:
if each_link:
output_dict = {}
output_dict["email address"] = []
output_dict["id"] = each_author["id"]
output_dict["name"] = author_name
print(each_link)
try:
have_pdf = retrieve_url(each_link + '.pdf', os.getcwd() + '/' + author_name + str(count) + ".pdf")
if have_pdf:
# try:
email_addr = get_pdf_email(author_name + str(count) + '.pdf', author_name)
output_dict["email address"].append(email_addr)
# except TypeError or AttributeError:
# pass
except:
output_dict["email address"].append(" ")
count += 1
output_list.append(output_dict)
with open('junior_authors_n_email.csv', mode='w') as csv_file:
fieldnames = ['id', 'name', 'email address']
writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
writer.writeheader()
for each_one in output_list:
if not each_one["email address"]:
writer.writerow({'id': each_one["id"], 'name': each_one["name"], 'email address': ' '})
else:
if "@" in each_one["email address"][-1] and ":" not in each_one["email address"][-1]:
writer.writerow({'id': each_one["id"], 'name': each_one["name"],
'email address': each_one["email address"][-1]})
else:
writer.writerow({'id': each_one["id"], 'name': each_one["name"], 'email address': " "})
retrieve_email()
# retrieve_url('https://www.aclweb.org/anthology/2020.acl-main.487.pdf', os.getcwd()+'/' + "Stephen Denuyl" + "0" +".pdf")
# print("email is: ")
# print(get_pdf_email("Stephen Denuyl0.pdf", "Stephen Denuyl"))
# AttributeError: https://www.aclweb.org/anthology/2020.semeval-1.118
# ConnectionResetError: https://www.aclweb.org/anthology/2020.nlp4if-1.4
# pdfminer3.psparser.PSEOF: Unexpected EOF: https://www.aclweb.org/anthology/2020.computerm-1.14
# pdfminer3.pdfparser.PDFSyntaxError: No /Root object! - Is this really a PDF?: https://www.aclweb.org/anthology/W19-4023
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Configuration file (powered by YACS)."""
import argparse
import os
import sys
from pycls.core.io import cache_url
from yacs.config import CfgNode as CfgNode
from template_lib.v2.config import update_parser_defaults_from_yaml
# Global config object
_C = CfgNode()
# Example usage:
# from core.config import cfg
cfg = _C
# ------------------------------------------------------------------------------------ #
# Model options
# ------------------------------------------------------------------------------------ #
_C.MODEL = CfgNode()
# Model type
_C.MODEL.TYPE = ""
# Number of weight layers
_C.MODEL.DEPTH = 0
# Number of classes
_C.MODEL.NUM_CLASSES = 10
# Loss function (see pycls/models/loss.py for options)
_C.MODEL.LOSS_FUN = "cross_entropy"
# ------------------------------------------------------------------------------------ #
# ResNet options
# ------------------------------------------------------------------------------------ #
_C.RESNET = CfgNode()
# Transformation function (see pycls/models/resnet.py for options)
_C.RESNET.TRANS_FUN = "basic_transform"
# Number of groups to use (1 -> ResNet; > 1 -> ResNeXt)
_C.RESNET.NUM_GROUPS = 1
# Width of each group (64 -> ResNet; 4 -> ResNeXt)
_C.RESNET.WIDTH_PER_GROUP = 64
# Apply stride to 1x1 conv (True -> MSRA; False -> fb.torch)
_C.RESNET.STRIDE_1X1 = True
# ------------------------------------------------------------------------------------ #
# AnyNet options
# ------------------------------------------------------------------------------------ #
_C.ANYNET = CfgNode()
# Stem type
_C.ANYNET.STEM_TYPE = "simple_stem_in"
# Stem width
_C.ANYNET.STEM_W = 32
# Block type
_C.ANYNET.BLOCK_TYPE = "res_bottleneck_block"
# Depth for each stage (number of blocks in the stage)
_C.ANYNET.DEPTHS = []
# Width for each stage (width of each block in the stage)
_C.ANYNET.WIDTHS = []
# Strides for each stage (applies to the first block of each stage)
_C.ANYNET.STRIDES = []
# Bottleneck multipliers for each stage (applies to bottleneck block)
_C.ANYNET.BOT_MULS = []
# Group widths for each stage (applies to bottleneck block)
_C.ANYNET.GROUP_WS = []
# Whether SE is enabled for res_bottleneck_block
_C.ANYNET.SE_ON = False
# SE ratio
_C.ANYNET.SE_R = 0.25
# ------------------------------------------------------------------------------------ #
# RegNet options
# ------------------------------------------------------------------------------------ #
_C.REGNET = CfgNode()
# Stem type
_C.REGNET.STEM_TYPE = "simple_stem_in"
# Stem width
_C.REGNET.STEM_W = 32
# Block type
_C.REGNET.BLOCK_TYPE = "res_bottleneck_block"
# Stride of each stage
_C.REGNET.STRIDE = 2
# Squeeze-and-Excitation (RegNetY)
_C.REGNET.SE_ON = False
_C.REGNET.SE_R = 0.25
# Depth
_C.REGNET.DEPTH = 10
# Initial width
_C.REGNET.W0 = 32
# Slope
_C.REGNET.WA = 5.0
# Quantization
_C.REGNET.WM = 2.5
# Group width
_C.REGNET.GROUP_W = 16
# Bottleneck multiplier (bm = 1 / b from the paper)
_C.REGNET.BOT_MUL = 1.0
# ------------------------------------------------------------------------------------ #
# EfficientNet options
# ------------------------------------------------------------------------------------ #
_C.EN = CfgNode()
# Stem width
_C.EN.STEM_W = 32
# Depth for each stage (number of blocks in the stage)
_C.EN.DEPTHS = []
# Width for each stage (width of each block in the stage)
_C.EN.WIDTHS = []
# Expansion ratios for MBConv blocks in each stage
_C.EN.EXP_RATIOS = []
# Squeeze-and-Excitation (SE) ratio
_C.EN.SE_R = 0.25
# Strides for each stage (applies to the first block of each stage)
_C.EN.STRIDES = []
# Kernel sizes for each stage
_C.EN.KERNELS = []
# Head width
_C.EN.HEAD_W = 1280
# Drop connect ratio
_C.EN.DC_RATIO = 0.0
# Dropout ratio
_C.EN.DROPOUT_RATIO = 0.0
# ------------------------------------------------------------------------------------ #
# Batch norm options
# ------------------------------------------------------------------------------------ #
_C.BN = CfgNode()
# BN epsilon
_C.BN.EPS = 1e-5
# BN momentum (BN momentum in PyTorch = 1 - BN momentum in Caffe2)
_C.BN.MOM = 0.1
# Precise BN stats
_C.BN.USE_PRECISE_STATS = True
_C.BN.NUM_SAMPLES_PRECISE = 8192
# Initialize the gamma of the final BN of each block to zero
_C.BN.ZERO_INIT_FINAL_GAMMA = False
# Use a different weight decay for BN layers
_C.BN.USE_CUSTOM_WEIGHT_DECAY = False
_C.BN.CUSTOM_WEIGHT_DECAY = 0.0
# ------------------------------------------------------------------------------------ #
# Optimizer options
# ------------------------------------------------------------------------------------ #
_C.OPTIM = CfgNode()
# Base learning rate
_C.OPTIM.BASE_LR = 0.1
# Learning rate policy select from {'cos', 'exp', 'steps'}
_C.OPTIM.LR_POLICY = "cos"
# Exponential decay factor
_C.OPTIM.GAMMA = 0.1
# Steps for 'steps' policy (in epochs)
_C.OPTIM.STEPS = []
# Learning rate multiplier for 'steps' policy
_C.OPTIM.LR_MULT = 0.1
# Maximal number of epochs
_C.OPTIM.MAX_EPOCH = 200
# Momentum
_C.OPTIM.MOMENTUM = 0.9
# Momentum dampening
_C.OPTIM.DAMPENING = 0.0
# Nesterov momentum
_C.OPTIM.NESTEROV = True
# L2 regularization
_C.OPTIM.WEIGHT_DECAY = 5e-4
# Start the warm up from OPTIM.BASE_LR * OPTIM.WARMUP_FACTOR
_C.OPTIM.WARMUP_FACTOR = 0.1
# Gradually warm up the OPTIM.BASE_LR over this number of epochs
_C.OPTIM.WARMUP_EPOCHS = 0
# ------------------------------------------------------------------------------------ #
# Training options
# ------------------------------------------------------------------------------------ #
_C.TRAIN = CfgNode()
# Dataset and split
_C.TRAIN.DATASET = ""
_C.TRAIN.SPLIT = "train"
# Total mini-batch size
_C.TRAIN.BATCH_SIZE = 128
# Image size
_C.TRAIN.IM_SIZE = 224
# Evaluate model on test data every eval period epochs
_C.TRAIN.EVAL_PERIOD = 1
# Save model checkpoint every checkpoint period epochs
_C.TRAIN.CHECKPOINT_PERIOD = 1
# Resume training from the latest checkpoint in the output directory
_C.TRAIN.AUTO_RESUME = True
# Weights to start training from
_C.TRAIN.WEIGHTS = ""
# ------------------------------------------------------------------------------------ #
# Testing options
# ------------------------------------------------------------------------------------ #
_C.TEST = CfgNode()
# Dataset and split
_C.TEST.DATASET = ""
_C.TEST.SPLIT = "val"
# Total mini-batch size
_C.TEST.BATCH_SIZE = 200
# Image size
_C.TEST.IM_SIZE = 256
# Weights to use for testing
_C.TEST.WEIGHTS = ""
# ------------------------------------------------------------------------------------ #
# Common train/test data loader options
# ------------------------------------------------------------------------------------ #
_C.DATA_LOADER = CfgNode()
# Number of data loader workers per process
_C.DATA_LOADER.NUM_WORKERS = 8
# Load data to pinned host memory
_C.DATA_LOADER.PIN_MEMORY = True
# ------------------------------------------------------------------------------------ #
# Memory options
# ------------------------------------------------------------------------------------ #
_C.MEM = CfgNode()
# Perform ReLU inplace
_C.MEM.RELU_INPLACE = True
# ------------------------------------------------------------------------------------ #
# CUDNN options
# ------------------------------------------------------------------------------------ #
_C.CUDNN = CfgNode()
# Perform benchmarking to select the fastest CUDNN algorithms to use
# Note that this may increase the memory usage and will likely not result
# in overall speedups when variable size inputs are used (e.g. COCO training)
_C.CUDNN.BENCHMARK = True
# ------------------------------------------------------------------------------------ #
# Precise timing options
# ------------------------------------------------------------------------------------ #
_C.PREC_TIME = CfgNode()
# Number of iterations to warm up the caches
_C.PREC_TIME.WARMUP_ITER = 3
# Number of iterations to compute avg time
_C.PREC_TIME.NUM_ITER = 30
# ------------------------------------------------------------------------------------ #
# Misc options
# ------------------------------------------------------------------------------------ #
# Number of GPUs to use (applies to both training and testing)
_C.NUM_GPUS = 1
# Output directory
_C.OUT_DIR = "/tmp"
# Config destination (in OUT_DIR)
_C.CFG_DEST = "config.yaml"
# Note that non-determinism may still be present due to non-deterministic
# operator implementations in GPU operator libraries
_C.RNG_SEED = 1
# Log destination ('stdout' or 'file')
_C.LOG_DEST = "stdout"
# Log period in iters
_C.LOG_PERIOD = 10
# Distributed backend
_C.DIST_BACKEND = "nccl"
# Hostname and port range for multi-process groups (actual port selected randomly)
_C.HOST = "localhost"
_C.PORT_RANGE = [10000, 65000]
# Models weights referred to by URL are downloaded to this local cache
_C.DOWNLOAD_CACHE = "/tmp/pycls-download-cache"
# ------------------------------------------------------------------------------------ #
# Default config
# ------------------------------------------------------------------------------------ #
_CFG_DEFAULT = _C.clone()
_CFG_DEFAULT.freeze()
# ------------------------------------------------------------------------------------ #
# Deprecated keys
# ------------------------------------------------------------------------------------ #
_C.register_deprecated_key("PREC_TIME.BATCH_SIZE")
_C.register_deprecated_key("PREC_TIME.ENABLED")
_C.register_deprecated_key("PORT")
def assert_and_infer_cfg(cache_urls=True):
"""Checks config values invariants."""
err_str = "The first lr step must start at 0"
assert not _C.OPTIM.STEPS or _C.OPTIM.STEPS[0] == 0, err_str
data_splits = ["train", "val", "test"]
err_str = "Data split '{}' not supported"
assert _C.TRAIN.SPLIT in data_splits, err_str.format(_C.TRAIN.SPLIT)
assert _C.TEST.SPLIT in data_splits, err_str.format(_C.TEST.SPLIT)
err_str = "Mini-batch size should be a multiple of NUM_GPUS."
assert _C.TRAIN.BATCH_SIZE % _C.NUM_GPUS == 0, err_str
assert _C.TEST.BATCH_SIZE % _C.NUM_GPUS == 0, err_str
err_str = "Log destination '{}' not supported"
assert _C.LOG_DEST in ["stdout", "file"], err_str.format(_C.LOG_DEST)
if cache_urls:
cache_cfg_urls()
def cache_cfg_urls():
"""Download URLs in config, cache them, and rewrite cfg to use cached file."""
_C.TRAIN.WEIGHTS = cache_url(_C.TRAIN.WEIGHTS, _C.DOWNLOAD_CACHE)
_C.TEST.WEIGHTS = cache_url(_C.TEST.WEIGHTS, _C.DOWNLOAD_CACHE)
def dump_cfg():
"""Dumps the config to the output directory."""
cfg_file = os.path.join(_C.OUT_DIR, _C.CFG_DEST)
with open(cfg_file, "w") as f:
_C.dump(stream=f)
def load_cfg(out_dir, cfg_dest="config.yaml"):
"""Loads config from specified output directory."""
cfg_file = os.path.join(out_dir, cfg_dest)
_C.merge_from_file(cfg_file)
def reset_cfg():
"""Reset config to initial state."""
cfg.merge_from_other_cfg(_CFG_DEFAULT)
def load_cfg_fom_args(description="Config file options."):
"""Load config from command line arguments and set any specified options."""
parser = argparse.ArgumentParser(description=description)
help_s = "Config file location"
parser.add_argument("--cfg", dest="cfg_file", help=help_s, required=True, type=str)
help_s = "See pycls/core/config.py for all options"
parser.add_argument("opts", help=help_s, default=None, nargs=argparse.REMAINDER)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
update_parser_defaults_from_yaml(parser)
args = parser.parse_args()
_C.merge_from_file(args.cfg_file)
_C.merge_from_list(args.opts)
|
"""Support for displaying persistent notifications."""
from collections import OrderedDict
import logging
from typing import Any, Mapping, MutableMapping, Optional
import voluptuous as vol
from homeassistant.components import websocket_api
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import TemplateError
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.loader import bind_hass
from homeassistant.util import slugify
import homeassistant.util.dt as dt_util
# mypy: allow-untyped-calls, allow-untyped-defs
ATTR_CREATED_AT = "created_at"
ATTR_MESSAGE = "message"
ATTR_NOTIFICATION_ID = "notification_id"
ATTR_TITLE = "title"
ATTR_STATUS = "status"
DOMAIN = "persistent_notification"
ENTITY_ID_FORMAT = DOMAIN + ".{}"
EVENT_PERSISTENT_NOTIFICATIONS_UPDATED = "persistent_notifications_updated"
SERVICE_CREATE = "create"
SERVICE_DISMISS = "dismiss"
SERVICE_MARK_READ = "mark_read"
SCHEMA_SERVICE_CREATE = vol.Schema(
{
vol.Required(ATTR_MESSAGE): cv.template,
vol.Optional(ATTR_TITLE): cv.template,
vol.Optional(ATTR_NOTIFICATION_ID): cv.string,
}
)
SCHEMA_SERVICE_DISMISS = vol.Schema({vol.Required(ATTR_NOTIFICATION_ID): cv.string})
SCHEMA_SERVICE_MARK_READ = vol.Schema({vol.Required(ATTR_NOTIFICATION_ID): cv.string})
DEFAULT_OBJECT_ID = "notification"
_LOGGER = logging.getLogger(__name__)
STATE = "notifying"
STATUS_UNREAD = "unread"
STATUS_READ = "read"
@bind_hass
def create(hass, message, title=None, notification_id=None):
"""Generate a notification."""
hass.add_job(async_create, hass, message, title, notification_id)
@bind_hass
def dismiss(hass, notification_id):
"""Remove a notification."""
hass.add_job(async_dismiss, hass, notification_id)
@callback
@bind_hass
def async_create(
hass: HomeAssistant,
message: str,
title: Optional[str] = None,
notification_id: Optional[str] = None,
) -> None:
"""Generate a notification."""
data = {
key: value
for key, value in [
(ATTR_TITLE, title),
(ATTR_MESSAGE, message),
(ATTR_NOTIFICATION_ID, notification_id),
]
if value is not None
}
hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_CREATE, data))
@callback
@bind_hass
def async_dismiss(hass: HomeAssistant, notification_id: str) -> None:
"""Remove a notification."""
data = {ATTR_NOTIFICATION_ID: notification_id}
hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_DISMISS, data))
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the persistent notification component."""
persistent_notifications: MutableMapping[str, MutableMapping] = OrderedDict()
hass.data[DOMAIN] = {"notifications": persistent_notifications}
@callback
def create_service(call):
"""Handle a create notification service call."""
title = call.data.get(ATTR_TITLE)
message = call.data.get(ATTR_MESSAGE)
notification_id = call.data.get(ATTR_NOTIFICATION_ID)
if notification_id is not None:
entity_id = ENTITY_ID_FORMAT.format(slugify(notification_id))
else:
entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, DEFAULT_OBJECT_ID, hass=hass
)
notification_id = entity_id.split(".")[1]
attr = {}
if title is not None:
try:
title.hass = hass
title = title.async_render()
except TemplateError as ex:
_LOGGER.error("Error rendering title %s: %s", title, ex)
title = title.template
attr[ATTR_TITLE] = title
try:
message.hass = hass
message = message.async_render()
except TemplateError as ex:
_LOGGER.error("Error rendering message %s: %s", message, ex)
message = message.template
attr[ATTR_MESSAGE] = message
hass.states.async_set(entity_id, STATE, attr)
# Store notification and fire event
# This will eventually replace state machine storage
persistent_notifications[entity_id] = {
ATTR_MESSAGE: message,
ATTR_NOTIFICATION_ID: notification_id,
ATTR_STATUS: STATUS_UNREAD,
ATTR_TITLE: title,
ATTR_CREATED_AT: dt_util.utcnow(),
}
hass.bus.async_fire(EVENT_PERSISTENT_NOTIFICATIONS_UPDATED)
@callback
def dismiss_service(call):
"""Handle the dismiss notification service call."""
notification_id = call.data.get(ATTR_NOTIFICATION_ID)
entity_id = ENTITY_ID_FORMAT.format(slugify(notification_id))
if entity_id not in persistent_notifications:
return
hass.states.async_remove(entity_id)
del persistent_notifications[entity_id]
hass.bus.async_fire(EVENT_PERSISTENT_NOTIFICATIONS_UPDATED)
@callback
def mark_read_service(call):
"""Handle the mark_read notification service call."""
notification_id = call.data.get(ATTR_NOTIFICATION_ID)
entity_id = ENTITY_ID_FORMAT.format(slugify(notification_id))
if entity_id not in persistent_notifications:
_LOGGER.error(
"Marking persistent_notification read failed: "
"Notification ID %s not found.",
notification_id,
)
return
persistent_notifications[entity_id][ATTR_STATUS] = STATUS_READ
hass.bus.async_fire(EVENT_PERSISTENT_NOTIFICATIONS_UPDATED)
hass.services.async_register(
DOMAIN, SERVICE_CREATE, create_service, SCHEMA_SERVICE_CREATE
)
hass.services.async_register(
DOMAIN, SERVICE_DISMISS, dismiss_service, SCHEMA_SERVICE_DISMISS
)
hass.services.async_register(
DOMAIN, SERVICE_MARK_READ, mark_read_service, SCHEMA_SERVICE_MARK_READ
)
hass.components.websocket_api.async_register_command(websocket_get_notifications)
return True
@callback
@websocket_api.websocket_command({vol.Required("type"): "persistent_notification/get"})
def websocket_get_notifications(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: Mapping[str, Any],
) -> None:
"""Return a list of persistent_notifications."""
connection.send_message(
websocket_api.result_message(
msg["id"],
[
{
key: data[key]
for key in (
ATTR_NOTIFICATION_ID,
ATTR_MESSAGE,
ATTR_STATUS,
ATTR_TITLE,
ATTR_CREATED_AT,
)
}
for data in hass.data[DOMAIN]["notifications"].values()
],
)
)
|
from .configuration import config
from collections import namedtuple
import falcon
import logging
import marshmallow
import slackclient
class SlackMessageRouter:
def __init__(self):
self._slack = slackclient.SlackClient(config['slack']['bot_user']['token'])
webhook = config['slack']['outgoing_webhook']
self._schema = WebhookDataSchema(webhook['token'],
set(webhook['matchers'].keys()))
self._matchers = {user_name: Matcher(spec) for
user_name, spec in webhook['matchers'].items()}
def on_post(self, req, resp):
h = '\n'.join('\t{}: {}'.format(k, v) for k, v in req.headers.items())
p = '\n'.join('\t{}: {}'.format(k, v) for k, v in req.params.items())
logging.info('\nheaders: \n%s\nparams: \n%s', h, p)
data, err = self._schema.load(req.params)
if err:
logging.info('%s %s', data, err)
logging.info('valid user names %s',
' '.join(self._matchers.keys()))
resp.status = falcon.HTTP_400
return
matcher = self._matchers[data.user_name]
if matcher(data):
self._slack.api_call(
'chat.postMessage',
channel=matcher.channel,
text='<!channel>: This looks interesting...',
attachments=[{'text': data.text}],
as_user=True)
resp.status = falcon.HTTP_200
WebhookData = namedtuple('WebhookData', ('token', 'text', 'user_name'))
class WebhookDataSchema(marshmallow.Schema):
token = marshmallow.fields.String(required=True)
text = marshmallow.fields.String(required=True)
user_name = marshmallow.fields.String(required=True)
bot_name = marshmallow.fields.String()
def __init__(self, webhook_token, user_names):
super().__init__()
self._token = webhook_token
self._user_names = user_names
@marshmallow.validates_schema
def _matches_user_name_or_bot_name(self, data):
if data.get('user_name') not in self._user_names and \
data.get('bot_name') not in self._user_names:
raise marshmallow.ValidationError('invalid user/bot name')
@marshmallow.validates('token')
def _matches_token(self, value):
if value != self._token:
raise marshmallow.ValidationError('invalid token')
@marshmallow.post_load
def _post_load(self, data):
if 'bot_name' in data:
data['user_name'] = data['bot_name']
del data['bot_name']
return WebhookData(**data)
class Matcher:
def __init__(self, spec):
self._text_contains = spec['text_contains']
self._output_channel = spec['output_channel']
self._unmatches = spec['unmatch']
def __call__(self, data):
lowered = data.text.lower()
return any(t in lowered for t in self._text_contains) \
and all(u not in lowered for u in self._unmatches)
@property
def channel(self):
return self._output_channel
|
''' data processing for neuron project '''
# built-in
import sys
import os
import shutil
import six
# third party
import nibabel as nib
import numpy as np
import scipy.ndimage.interpolation
from tqdm import tqdm_notebook as tqdm # for verbosity for forloops
import matplotlib.pyplot as plt
# note sure if tqdm_notebook reverts back to
from tqdm import tqdm
from subprocess import call
# import local ndutils
import pystrum.pynd.ndutils as nd
import re
def proc_mgh_vols(inpath,
outpath,
ext='.mgz',
label_idx=None,
**kwargs):
''' process mgh data from mgz format and save to numpy format
1. load file
2. normalize intensity
3. resize
4. save as python block
TODO: check header info and such.?
'''
# get files in input directory
files = [f for f in os.listdir(inpath) if f.endswith(ext)]
# go through each file
list_skipped_files = ()
for fileidx in tqdm(range(len(files)), ncols=80):
# load nifti volume
volnii = nib.load(os.path.join(inpath, files[fileidx]))
# get the data out
vol_data = volnii.get_data().astype(float)
if ('dim' in volnii.header) and volnii.header['dim'][4] > 1:
vol_data = vol_data[:, :, :, -1]
# process volume
try:
vol_data = vol_proc(vol_data, **kwargs)
except Exception as e:
list_skipped_files += (files[fileidx], )
print("Skipping %s\nError: %s" % (files[fileidx], str(e)), file=sys.stderr)
continue
if label_idx is not None:
vol_data = (vol_data == label_idx).astype(int)
# save numpy file
outname = os.path.splitext(os.path.join(outpath, files[fileidx]))[0] + '.npz'
np.savez_compressed(outname, vol_data=vol_data)
for file in list_skipped_files:
print("Skipped: %s" % file, file=sys.stderr)
def scans_to_slices(inpath, outpath, slice_nrs,
ext='.mgz',
label_idx=None,
dim_idx=2,
out_ext='.png',
slice_pad=0,
vol_inner_pad_for_slice_nrs=0,
**kwargs): # vol_proc args
# get files in input directory
files = [f for f in os.listdir(inpath) if f.endswith(ext)]
# go through each file
list_skipped_files = ()
for fileidx in tqdm(range(len(files)), ncols=80):
# load nifti volume
volnii = nib.load(os.path.join(inpath, files[fileidx]))
# get the data out
vol_data = volnii.get_data().astype(float)
if ('dim' in volnii.header) and volnii.header['dim'][4] > 1:
vol_data = vol_data[:, :, :, -1]
if slice_pad > 0:
assert (out_ext != '.png'), "slice pad can only be used with volumes"
# process volume
try:
vol_data = vol_proc(vol_data, **kwargs)
except Exception as e:
list_skipped_files += (files[fileidx], )
print("Skipping %s\nError: %s" % (files[fileidx], str(e)), file=sys.stderr)
continue
mult_fact = 255
if label_idx is not None:
vol_data = (vol_data == label_idx).astype(int)
mult_fact = 1
# extract slice
if slice_nrs is None:
slice_nrs_sel = range(vol_inner_pad_for_slice_nrs + slice_pad,
vol_data.shape[dim_idx] - slice_pad - vol_inner_pad_for_slice_nrs)
else:
slice_nrs_sel = slice_nrs
for slice_nr in slice_nrs_sel:
slice_nr_out = range(slice_nr - slice_pad, slice_nr + slice_pad + 1)
if dim_idx == 2: # TODO: fix in one line
vol_img = np.squeeze(vol_data[:, :, slice_nr_out])
elif dim_idx == 1:
vol_img = np.squeeze(vol_data[:, slice_nr_out, :])
else:
vol_img = np.squeeze(vol_data[slice_nr_out, :, :])
# save file
if out_ext == '.png':
# save png file
img = (vol_img * mult_fact).astype('uint8')
outname = os.path.splitext(os.path.join(outpath, files[fileidx]))[
0] + '_slice%d.png' % slice_nr
try:
from PIL import Image
Image.fromarray(img).convert('RGB').save(outname)
except ImportError:
raise ImportError(
'Could not save "%s" since PIL has not been installed' % outname)
else:
if slice_pad == 0: # dimenion has collapsed
assert vol_img.ndim == 2
vol_img = np.expand_dims(vol_img, dim_idx)
# assuming nibabel saving image
nii = nib.Nifti1Image(vol_img, np.diag([1, 1, 1, 1]))
outname = os.path.splitext(os.path.join(outpath, files[fileidx]))[
0] + '_slice%d.nii.gz' % slice_nr
nib.save(nii, outname)
def vol_proc(vol_data,
crop=None,
# None (to not resize), or vector. If vector, third entry can be None
resize_shape=None,
interp_order=None,
rescale=None,
rescale_prctle=None,
resize_slices=None,
resize_slices_dim=None,
offset=None,
clip=None,
extract_nd=None, # extracts a particular section
force_binary=None, # forces anything > 0 to be 1
permute=None):
''' process a volume with a series of intensity rescale, resize and crop rescale'''
if offset is not None:
vol_data = vol_data + offset
# intensity normalize data .* rescale
if rescale is not None:
vol_data = np.multiply(vol_data, rescale)
if rescale_prctle is not None:
# print("max:", np.max(vol_data.flat))
# print("test")
rescale = np.percentile(vol_data.flat, rescale_prctle)
# print("rescaling by 1/%f" % (rescale))
vol_data = np.multiply(vol_data.astype(float), 1 / rescale)
if resize_slices is not None:
resize_slices = [*resize_slices]
assert resize_shape is None, "if resize_slices is given, resize_shape has to be None"
resize_shape = resize_slices
if resize_slices_dim is None:
resize_slices_dim = np.where([f is None for f in resize_slices])[0]
assert len(resize_slices_dim) == 1, "Could not find dimension or slice resize"
resize_slices_dim = resize_slices_dim[0]
resize_shape[resize_slices_dim] = vol_data.shape[resize_slices_dim]
# resize (downsample) matrices
if resize_shape is not None and resize_shape != vol_data.shape:
resize_shape = [*resize_shape]
# allow for the last entry to be None
if resize_shape[-1] is None:
resize_ratio = np.divide(resize_shape[0], vol_data.shape[0])
resize_shape[-1] = np.round(resize_ratio * vol_data.shape[-1]).astype('int')
resize_ratio = np.divide(resize_shape, vol_data.shape)
vol_data = scipy.ndimage.interpolation.zoom(vol_data, resize_ratio, order=interp_order)
# crop data if necessary
if crop is not None:
vol_data = nd.volcrop(vol_data, crop=crop)
# needs to be last to guarantee clip limits.
# For e.g., resize might screw this up due to bicubic interpolation if it was done after.
if clip is not None:
vol_data = np.clip(vol_data, clip[0], clip[1])
if extract_nd is not None:
vol_data = vol_data[np.ix_(*extract_nd)]
if force_binary:
vol_data = (vol_data > 0).astype(float)
# return with checks. this check should be right at the end before rturn
if clip is not None:
assert np.max(vol_data) <= clip[1], "clip failed"
assert np.min(vol_data) >= clip[0], "clip failed"
return vol_data
def prior_to_weights(prior_filename, nargout=1, min_freq=0, force_binary=False, verbose=False):
''' transform a 4D prior (3D + nb_labels) into a class weight vector '''
# load prior
if isinstance(prior_filename, six.string_types):
prior = np.load(prior_filename)['prior']
else:
prior = prior_filename
# assumes prior is 4D.
assert np.ndim(prior) == 4 or np.ndim(prior) == 3, "prior is the wrong number of dimensions"
prior_flat = np.reshape(prior, (np.prod(prior.shape[0:(np.ndim(prior) - 1)]), prior.shape[-1]))
if force_binary:
nb_labels = prior_flat.shape[-1]
prior_flat[:, 1] = np.sum(prior_flat[:, 1:nb_labels], 1)
prior_flat = np.delete(prior_flat, range(2, nb_labels), 1)
# sum total class votes
class_count = np.sum(prior_flat, 0)
class_prior = class_count / np.sum(class_count)
# adding minimum frequency
class_prior[class_prior < min_freq] = min_freq
class_prior = class_prior / np.sum(class_prior)
if np.any(class_prior == 0):
print("Warning, found a label with 0 support. Setting its weight to 0!", file=sys.stderr)
class_prior[class_prior == 0] = np.inf
# compute weights from class frequencies
weights = 1 / class_prior
weights = weights / np.sum(weights)
# weights[0] = 0 # explicitly don't care about bg
# a bit of verbosity
if verbose:
f, (ax1, ax2, ax3) = plt.subplots(1, 3)
ax1.bar(range(prior.size), np.log(prior))
ax1.set_title('log class freq')
ax2.bar(range(weights.size), weights)
ax2.set_title('weights')
ax3.bar(range(weights.size), np.log((weights)) - np.min(np.log((weights))))
ax3.set_title('log(weights)-minlog')
f.set_size_inches(12, 3)
plt.show()
np.set_printoptions(precision=3)
# return
if nargout == 1:
return weights
else:
return (weights, prior)
def filestruct_change(in_path, out_path, re_map,
mode='subj_to_type',
use_symlinks=False, name=""):
"""
change from independent subjects in a folder to breakdown structure
example: filestruct_change('/../in_path', '/../out_path',
{'asegs.nii.gz':'asegs', 'norm.nii.gz':'vols'})
input structure:
/.../in_path/subj_1 --> with files that match regular repressions defined in re_map.keys()
/.../in_path/subj_2 --> with files that match regular repressions defined in re_map.keys()
...
output structure:
/.../out_path/asegs/subj_1.nii.gz, subj_2.nii.gz
/.../out_path/vols/subj_1.nii.gz, subj_2.nii.gz
Parameters:
in_path (string): input path
out_path (string): output path
re_map (dictionary): keys are reg-exs that match files in the input folders.
values are the folders to put those files in the new structure.
values can also be tuples, in which case values[0] is the dst folder,
and values[1] is the extension of the output file
mode (optional)
use_symlinks (bool): whether to just use symlinks rather than copy files
default:True
"""
if not os.path.isdir(out_path):
os.mkdir(out_path)
# go through folders
for subj in tqdm(os.listdir(in_path), desc=name):
# go through files in a folder
files = os.listdir(os.path.join(in_path, subj))
for file in files:
# see which key matches. Make sure only one does.
matches = [re.match(k, file) for k in re_map.keys()]
nb_matches = sum([f is not None for f in matches])
assert nb_matches == 1, "Found %d matches for file %s/%s" % (nb_matches, file, subj)
# get the matches key
match_idx = [i for i, f in enumerate(matches) if f is not None][0]
matched_dst = re_map[list(re_map.keys())[match_idx]]
_, ext = os.path.splitext(file)
if isinstance(matched_dst, tuple):
ext = matched_dst[1]
matched_dst = matched_dst[0]
# prepare source and destination file
src_file = os.path.join(in_path, subj, file)
dst_path = os.path.join(out_path, matched_dst)
if not os.path.isdir(dst_path):
os.mkdir(dst_path)
dst_file = os.path.join(dst_path, subj + ext)
if use_symlinks:
# on windows there are permission problems.
# Can try : call(['mklink', 'LINK', 'TARGET'], shell=True)
# or note https://stackoverflow.com/questions/6260149/os-symlink-support-in-windows
os.symlink(src_file, dst_file)
else:
shutil.copyfile(src_file, dst_file)
def ml_split(in_path, out_path,
cat_titles=['train', 'validate', 'test'],
cat_prop=[0.5, 0.3, 0.2],
use_symlinks=False,
seed=None,
tqdm=tqdm):
"""
split dataset
"""
if seed is not None:
np.random.seed(seed)
if not os.path.isdir(out_path):
os.makedirs(out_path)
# get subjects and randomize their order
subjs = sorted(os.listdir(in_path))
nb_subj = len(subjs)
subj_order = np.random.permutation(nb_subj)
# prepare split
cat_tot = np.cumsum(cat_prop)
if not cat_tot[-1] == 1:
print("split_prop sums to %f, re-normalizing" % cat_tot)
cat_tot = np.array(cat_tot) / cat_tot[-1]
nb_cat_subj = np.round(cat_tot * nb_subj).astype(int)
cat_subj_start = [0, *nb_cat_subj[:-1]]
# go through each category
for cat_idx, cat in enumerate(cat_titles):
if not os.path.isdir(os.path.join(out_path, cat)):
os.mkdir(os.path.join(out_path, cat))
cat_subj_idx = subj_order[cat_subj_start[cat_idx]:nb_cat_subj[cat_idx]]
for subj_idx in tqdm(cat_subj_idx, desc=cat):
src_folder = os.path.join(in_path, subjs[subj_idx])
dst_folder = os.path.join(out_path, cat, subjs[subj_idx])
if use_symlinks:
# on windows there are permission problems.
# Can try : call(['mklink', 'LINK', 'TARGET'], shell=True)
# or note https://stackoverflow.com/questions/6260149/os-symlink-support-in-windows
os.symlink(src_folder, dst_folder)
else:
if os.path.isdir(src_folder):
shutil.copytree(src_folder, dst_folder)
else:
shutil.copyfile(src_folder, dst_folder)
|
import numpy
# from cryptography.fernet import Fernet
def handler(event, context):
# Try using some of the modules to make sure they work & don't crash the process
# print(Fernet.generate_key())
return {"pi": "{0:.2f}".format(numpy.pi)}
def first_function_handler(event, context):
return "Hello World"
def second_function_handler(event, context):
return "Hello Mars"
|
#! /usr/bin/env python3
# coding=utf8
import pycurl
import json
import requests
import re
class PKO:
sid = ""
flow_id = ""
account = ""
def _httpIPKO(self, url, data):
headers = {'x-ias-ias_sid': self.sid,
'X-Requested-With': "XMLHttpRequest"}
res = requests.post(url, json=data, headers=headers)
return res.text
def _getSID(self, login):
url = "https://www.ipko.pl/secure/ikd3/api/login"
data = {"_method": "POST",
"version": 2, "seq": 1,
"location": "",
"request": {"state": "login",
"data": {"login": login}}}
res = json.loads(self._httpIPKO(url, data))
self.flow_id = res['response']['flow_id']
self.sid = res['session']['sid']
return self.sid, self.flow_id
def _password(self, password):
data = {"_method": "PUT",
"sid": self.sid, "version": 2,
"seq": 2, "location": "",
"request": {"state": "password",
"flow_id": self.flow_id,
"first_prelogin": "true",
"data": {"password": password}}}
url = "https://www.ipko.pl/secure/ikd3/api/login"
self._httpIPKO(url, data)
def _dispatch(self):
data = {"_method": "PUT",
"sid": self.sid,
"version": 2,
"seq": 3,
"location": "",
"request": {"state": "dispatch",
"flow_id": self.flow_id,
"first_prelogin": "true",
"data": {}}}
url = "https://www.ipko.pl/secure/ikd3/api/login"
self._httpIPKO(url, data)
def login(self, login, password):
self._getSID(login)
self._password(password)
self._dispatch()
self._getAccountNumber()
def _getAccountNumber(self):
url = "https://www.ipko.pl/secure/ikd3/api/home/account"
data = {"_method": "GET",
"sid": self.sid,
"seq": 10,
"location": "#home",
"request": {"object_id": "null"}}
res = json.loads(self._httpIPKO(url, data))
res = res['response']['filter']['account']['available'][0]
self.account = res['data']['number']['value']
return str(self.account)
def getHistory(self):
url = ("https://www.ipko.pl/secure/ikd3/api/"
"accounts/operations/completed")
data = {"_method": "POST",
"sid": self.sid,
"seq": 22,
"location": "#accounts",
"request": {"object_id": self.account,
"filter": {"page_size": 50}}}
res = json.loads(self._httpIPKO(url, data))
return res['response']['items']
|
# Copyright 2018-2019 The glTF-Blender-IO authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import bpy
from mathutils import Vector, Matrix
import numpy as np
from ...io.imp.gltf2_io_binary import BinaryData
from ..com.gltf2_blender_extras import set_extras
from .gltf2_blender_material import BlenderMaterial
class BlenderMesh():
"""Blender Mesh."""
def __new__(cls, *args, **kwargs):
raise RuntimeError("%s should not be instantiated" % cls)
@staticmethod
def create(gltf, mesh_idx, skin_idx):
"""Mesh creation."""
return create_mesh(gltf, mesh_idx, skin_idx)
# Maximum number of TEXCOORD_n/COLOR_n sets to import
UV_MAX = 8
COLOR_MAX = 8
def create_mesh(gltf, mesh_idx, skin_idx):
pymesh = gltf.data.meshes[mesh_idx]
name = pymesh.name or 'Mesh_%d' % mesh_idx
mesh = bpy.data.meshes.new(name)
# Temporarily parent the mesh to an object.
# This is used to set skin weights and shapekeys.
tmp_ob = None
try:
tmp_ob = bpy.data.objects.new('##gltf-import:tmp-object##', mesh)
do_primitives(gltf, mesh_idx, skin_idx, mesh, tmp_ob)
set_extras(mesh, gltf.data.meshes[mesh_idx].extras, exclude=['targetNames'])
finally:
if tmp_ob:
bpy.data.objects.remove(tmp_ob)
return mesh
def do_primitives(gltf, mesh_idx, skin_idx, mesh, ob):
"""Put all primitive data into the mesh."""
pymesh = gltf.data.meshes[mesh_idx]
# Scan the primitives to find out what we need to create
has_normals = False
num_uvs = 0
num_cols = 0
num_joint_sets = 0
for prim in pymesh.primitives:
if 'POSITION' not in prim.attributes:
continue
if gltf.import_settings['import_shading'] == "NORMALS":
if 'NORMAL' in prim.attributes:
has_normals = True
if skin_idx is not None:
i = 0
while ('JOINTS_%d' % i) in prim.attributes and \
('WEIGHTS_%d' % i) in prim.attributes:
i += 1
num_joint_sets = max(i, num_joint_sets)
i = 0
while i < UV_MAX and ('TEXCOORD_%d' % i) in prim.attributes: i += 1
num_uvs = max(i, num_uvs)
i = 0
while i < COLOR_MAX and ('COLOR_%d' % i) in prim.attributes: i += 1
num_cols = max(i, num_cols)
num_shapekeys = 0
if len(pymesh.primitives) > 0: # Empty primitive tab is not allowed, but some invalid files...
for morph_i, _ in enumerate(pymesh.primitives[0].targets or []):
if pymesh.shapekey_names[morph_i] is not None:
num_shapekeys += 1
# -------------
# We'll process all the primitives gathering arrays to feed into the
# various foreach_set function that create the mesh data.
num_faces = 0 # total number of faces
vert_locs = np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert
vert_normals = np.empty(dtype=np.float32, shape=(0,3)) # normal for each vert
edge_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loose edge
loop_vidxs = np.array([], dtype=np.uint32) # vertex_index for each loop
loop_uvs = [
np.empty(dtype=np.float32, shape=(0,2)) # UV for each loop for each layer
for _ in range(num_uvs)
]
loop_cols = [
np.empty(dtype=np.float32, shape=(0,4)) # color for each loop for each layer
for _ in range(num_cols)
]
vert_joints = [
np.empty(dtype=np.uint32, shape=(0,4)) # 4 joints for each vert for each set
for _ in range(num_joint_sets)
]
vert_weights = [
np.empty(dtype=np.float32, shape=(0,4)) # 4 weights for each vert for each set
for _ in range(num_joint_sets)
]
sk_vert_locs = [
np.empty(dtype=np.float32, shape=(0,3)) # coordinate for each vert for each shapekey
for _ in range(num_shapekeys)
]
for prim in pymesh.primitives:
prim.num_faces = 0
if 'POSITION' not in prim.attributes:
continue
vert_index_base = len(vert_locs)
if prim.indices is not None:
indices = BinaryData.decode_accessor(gltf, prim.indices)
indices = indices.reshape(len(indices))
else:
num_verts = gltf.data.accessors[prim.attributes['POSITION']].count
indices = np.arange(0, num_verts, dtype=np.uint32)
mode = 4 if prim.mode is None else prim.mode
points, edges, tris = points_edges_tris(mode, indices)
if points is not None:
indices = points
elif edges is not None:
indices = edges
else:
indices = tris
# We'll add one vert to the arrays for each index used in indices
unique_indices, inv_indices = np.unique(indices, return_inverse=True)
vs = BinaryData.decode_accessor(gltf, prim.attributes['POSITION'], cache=True)
vert_locs = np.concatenate((vert_locs, vs[unique_indices]))
if has_normals:
if 'NORMAL' in prim.attributes:
ns = BinaryData.decode_accessor(gltf, prim.attributes['NORMAL'], cache=True)
ns = ns[unique_indices]
else:
ns = np.zeros((len(unique_indices), 3), dtype=np.float32)
vert_normals = np.concatenate((vert_normals, ns))
for i in range(num_joint_sets):
if ('JOINTS_%d' % i) in prim.attributes and ('WEIGHTS_%d' % i) in prim.attributes:
js = BinaryData.decode_accessor(gltf, prim.attributes['JOINTS_%d' % i], cache=True)
ws = BinaryData.decode_accessor(gltf, prim.attributes['WEIGHTS_%d' % i], cache=True)
js = js[unique_indices]
ws = ws[unique_indices]
else:
js = np.zeros((len(unique_indices), 4), dtype=np.uint32)
ws = np.zeros((len(unique_indices), 4), dtype=np.float32)
vert_joints[i] = np.concatenate((vert_joints[i], js))
vert_weights[i] = np.concatenate((vert_weights[i], ws))
for morph_i, target in enumerate(prim.targets or []):
if pymesh.shapekey_names[morph_i] is None:
continue
morph_vs = BinaryData.decode_accessor(gltf, target['POSITION'], cache=True)
morph_vs = morph_vs[unique_indices]
sk_vert_locs[morph_i] = np.concatenate((sk_vert_locs[morph_i], morph_vs))
# inv_indices are the indices into the verts just for this prim;
# calculate indices into the overall verts array
prim_vidxs = inv_indices.astype(np.uint32, copy=False)
prim_vidxs += vert_index_base # offset for verts from previous prims
if edges is not None:
edge_vidxs = np.concatenate((edge_vidxs, prim_vidxs))
if tris is not None:
prim.num_faces = len(indices) // 3
num_faces += prim.num_faces
loop_vidxs = np.concatenate((loop_vidxs, prim_vidxs))
for uv_i in range(num_uvs):
if ('TEXCOORD_%d' % uv_i) in prim.attributes:
uvs = BinaryData.decode_accessor(gltf, prim.attributes['TEXCOORD_%d' % uv_i], cache=True)
uvs = uvs[indices]
else:
uvs = np.zeros((len(indices), 2), dtype=np.float32)
loop_uvs[uv_i] = np.concatenate((loop_uvs[uv_i], uvs))
for col_i in range(num_cols):
if ('COLOR_%d' % col_i) in prim.attributes:
cols = BinaryData.decode_accessor(gltf, prim.attributes['COLOR_%d' % col_i], cache=True)
cols = cols[indices]
if cols.shape[1] == 3:
cols = colors_rgb_to_rgba(cols)
else:
cols = np.ones((len(indices), 4), dtype=np.float32)
loop_cols[col_i] = np.concatenate((loop_cols[col_i], cols))
# Accessors are cached in case they are shared between primitives; clear
# the cache now that all prims are done.
gltf.decode_accessor_cache = {}
if gltf.import_settings['merge_vertices']:
vert_locs, vert_normals, vert_joints, vert_weights, \
sk_vert_locs, loop_vidxs, edge_vidxs = \
merge_duplicate_verts(
vert_locs, vert_normals, vert_joints, vert_weights, \
sk_vert_locs, loop_vidxs, edge_vidxs\
)
# ---------------
# Convert all the arrays glTF -> Blender
# Change from relative to absolute positions for morph locs
for sk_locs in sk_vert_locs:
sk_locs += vert_locs
gltf.locs_batch_gltf_to_blender(vert_locs)
gltf.normals_batch_gltf_to_blender(vert_normals)
for sk_locs in sk_vert_locs:
gltf.locs_batch_gltf_to_blender(sk_locs)
if num_joint_sets:
skin_into_bind_pose(
gltf, skin_idx, vert_joints, vert_weights,
locs=[vert_locs] + sk_vert_locs,
vert_normals=vert_normals,
)
for uvs in loop_uvs:
uvs_gltf_to_blender(uvs)
for cols in loop_cols:
colors_linear_to_srgb(cols[:, :-1])
# ---------------
# Start creating things
mesh.vertices.add(len(vert_locs))
mesh.vertices.foreach_set('co', squish(vert_locs))
mesh.loops.add(len(loop_vidxs))
mesh.loops.foreach_set('vertex_index', loop_vidxs)
mesh.edges.add(len(edge_vidxs) // 2)
mesh.edges.foreach_set('vertices', edge_vidxs)
mesh.polygons.add(num_faces)
# All polys are tris
loop_starts = np.arange(0, 3 * num_faces, step=3)
loop_totals = np.full(num_faces, 3)
mesh.polygons.foreach_set('loop_start', loop_starts)
mesh.polygons.foreach_set('loop_total', loop_totals)
for uv_i in range(num_uvs):
name = 'UVMap' if uv_i == 0 else 'UVMap.%03d' % uv_i
layer = mesh.uv_layers.new(name=name)
if layer is None:
print("WARNING: UV map is ignored because the maximum number of UV layers has been reached.")
break
layer.data.foreach_set('uv', squish(loop_uvs[uv_i]))
for col_i in range(num_cols):
name = 'Col' if col_i == 0 else 'Col.%03d' % col_i
layer = mesh.vertex_colors.new(name=name)
if layer is None:
print("WARNING: Vertex colors are ignored because the maximum number of vertex color layers has been "
"reached.")
break
layer.data.foreach_set('color', squish(loop_cols[col_i]))
# Skinning
# TODO: this is slow :/
if num_joint_sets:
pyskin = gltf.data.skins[skin_idx]
for i, _ in enumerate(pyskin.joints):
# ob is a temp object, so don't worry about the name.
ob.vertex_groups.new(name='X%d' % i)
vgs = list(ob.vertex_groups)
for i in range(num_joint_sets):
js = vert_joints[i].tolist() # tolist() is faster
ws = vert_weights[i].tolist()
for vi in range(len(vert_locs)):
w0, w1, w2, w3 = ws[vi]
j0, j1, j2, j3 = js[vi]
if w0 != 0: vgs[j0].add((vi,), w0, 'REPLACE')
if w1 != 0: vgs[j1].add((vi,), w1, 'REPLACE')
if w2 != 0: vgs[j2].add((vi,), w2, 'REPLACE')
if w3 != 0: vgs[j3].add((vi,), w3, 'REPLACE')
# Shapekeys
if num_shapekeys:
ob.shape_key_add(name='Basis')
mesh.shape_keys.name = mesh.name
sk_i = 0
for sk_name in pymesh.shapekey_names:
if sk_name is None:
continue
ob.shape_key_add(name=sk_name)
key_block = mesh.shape_keys.key_blocks[sk_name]
key_block.data.foreach_set('co', squish(sk_vert_locs[sk_i]))
sk_i += 1
# ----
# Assign materials to faces
has_materials = any(prim.material is not None for prim in pymesh.primitives)
if has_materials:
material_indices = np.empty(num_faces, dtype=np.uint32)
empty_material_slot_index = None
f = 0
for prim in pymesh.primitives:
if prim.material is not None:
# Get the material
pymaterial = gltf.data.materials[prim.material]
vertex_color = 'COLOR_0' if 'COLOR_0' in prim.attributes else None
if vertex_color not in pymaterial.blender_material:
BlenderMaterial.create(gltf, prim.material, vertex_color)
material_name = pymaterial.blender_material[vertex_color]
# Put material in slot (if not there)
if material_name not in mesh.materials:
mesh.materials.append(bpy.data.materials[material_name])
material_index = mesh.materials.find(material_name)
else:
if empty_material_slot_index is None:
mesh.materials.append(None)
empty_material_slot_index = len(mesh.materials) - 1
material_index = empty_material_slot_index
material_indices[f:f + prim.num_faces].fill(material_index)
f += prim.num_faces
mesh.polygons.foreach_set('material_index', material_indices)
# ----
# Normals
# Set polys smooth/flat
set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs)
mesh.validate()
has_loose_edges = len(edge_vidxs) != 0 # need to calc_loose_edges for them to show up
mesh.update(calc_edges_loose=has_loose_edges)
if has_normals:
mesh.create_normals_split()
mesh.normals_split_custom_set_from_vertices(vert_normals)
mesh.use_auto_smooth = True
def points_edges_tris(mode, indices):
points = None
edges = None
tris = None
if mode == 0:
# POINTS
points = indices
elif mode == 1:
# LINES
# 1 3
# / /
# 0 2
edges = indices
elif mode == 2:
# LINE LOOP
# 1---2
# / \
# 0-------3
# in: 0123
# out: 01122330
edges = np.empty(2 * len(indices), dtype=np.uint32)
edges[[0, -1]] = indices[[0, 0]] # 0______0
edges[1:-1] = np.repeat(indices[1:], 2) # 01122330
elif mode == 3:
# LINE STRIP
# 1---2
# / \
# 0 3
# in: 0123
# out: 011223
edges = np.empty(2 * len(indices) - 2, dtype=np.uint32)
edges[[0, -1]] = indices[[0, -1]] # 0____3
edges[1:-1] = np.repeat(indices[1:-1], 2) # 011223
elif mode == 4:
# TRIANGLES
# 2 3
# / \ / \
# 0---1 4---5
tris = indices
elif mode == 5:
# TRIANGLE STRIP
# 0---2---4
# \ / \ /
# 1---3
# TODO: numpyify
def alternate(i, xs):
even = i % 2 == 0
return xs if even else (xs[0], xs[2], xs[1])
tris = np.array([
alternate(i, (indices[i], indices[i + 1], indices[i + 2]))
for i in range(0, len(indices) - 2)
])
tris = squish(tris)
elif mode == 6:
# TRIANGLE FAN
# 3---2
# / \ / \
# 4---0---1
# TODO: numpyify
tris = np.array([
(indices[0], indices[i], indices[i + 1])
for i in range(1, len(indices) - 1)
])
tris = squish(tris)
else:
raise Exception('primitive mode unimplemented: %d' % mode)
return points, edges, tris
def squish(array):
"""Squish nD array into 1D array (required by foreach_set)."""
return array.reshape(array.size)
def colors_rgb_to_rgba(rgb):
rgba = np.ones((len(rgb), 4), dtype=np.float32)
rgba[:, :3] = rgb
return rgba
def colors_linear_to_srgb(color):
assert color.shape[1] == 3 # only change RGB, not A
not_small = color >= 0.0031308
small_result = np.where(color < 0.0, 0.0, color * 12.92)
large_result = 1.055 * np.power(color, 1.0 / 2.4, where=not_small) - 0.055
color[:] = np.where(not_small, large_result, small_result)
def uvs_gltf_to_blender(uvs):
# u,v -> u,1-v
uvs[:, 1] *= -1
uvs[:, 1] += 1
def skin_into_bind_pose(gltf, skin_idx, vert_joints, vert_weights, locs, vert_normals):
# Skin each position/normal using the bind pose.
# Skinning equation: vert' = sum_(j,w) w * joint_mat[j] * vert
# where the sum is over all (joint,weight) pairs.
# Calculate joint matrices
joint_mats = []
pyskin = gltf.data.skins[skin_idx]
if pyskin.inverse_bind_matrices is not None:
inv_binds = BinaryData.get_data_from_accessor(gltf, pyskin.inverse_bind_matrices)
inv_binds = [gltf.matrix_gltf_to_blender(m) for m in inv_binds]
else:
inv_binds = [Matrix.Identity(4) for i in range(len(pyskin.joints))]
bind_mats = [gltf.vnodes[joint].bind_arma_mat for joint in pyskin.joints]
joint_mats = [bind_mat @ inv_bind for bind_mat, inv_bind in zip(bind_mats, inv_binds)]
# TODO: check if joint_mats are all (approximately) 1, and skip skinning
joint_mats = np.array(joint_mats, dtype=np.float32)
# Compute the skinning matrices for every vert
num_verts = len(locs[0])
skinning_mats = np.zeros((num_verts, 4, 4), dtype=np.float32)
weight_sums = np.zeros(num_verts, dtype=np.float32)
for js, ws in zip(vert_joints, vert_weights):
for i in range(4):
skinning_mats += ws[:, i].reshape(len(ws), 1, 1) * joint_mats[js[:, i]]
weight_sums += ws[:, i]
# Normalize weights to one; necessary for old files / quantized weights
skinning_mats /= weight_sums.reshape(num_verts, 1, 1)
skinning_mats_3x3 = skinning_mats[:, :3, :3]
skinning_trans = skinning_mats[:, :3, 3]
for vs in locs:
vs[:] = mul_mats_vecs(skinning_mats_3x3, vs)
vs[:] += skinning_trans
if len(vert_normals) != 0:
vert_normals[:] = mul_mats_vecs(skinning_mats_3x3, vert_normals)
# Don't translate normals!
normalize_vecs(vert_normals)
def mul_mats_vecs(mats, vecs):
"""Given [m1,m2,...] and [v1,v2,...], returns [m1@v1,m2@v2,...]. 3D only."""
return np.matmul(mats, vecs.reshape(len(vecs), 3, 1)).reshape(len(vecs), 3)
def normalize_vecs(vectors):
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
np.divide(vectors, norms, out=vectors, where=norms != 0)
def set_poly_smoothing(gltf, pymesh, mesh, vert_normals, loop_vidxs):
num_polys = len(mesh.polygons)
if gltf.import_settings['import_shading'] == "FLAT":
# Polys are flat by default; don't have to do anything
return
if gltf.import_settings['import_shading'] == "SMOOTH":
poly_smooths = np.full(num_polys, True)
f = 0
for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False)
f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths)
return
assert gltf.import_settings['import_shading'] == "NORMALS"
# Try to guess which polys should be flat based on the fact that all the
# loop normals for a flat poly are = the poly's normal.
poly_smooths = np.empty(num_polys, dtype=np.bool)
poly_normals = np.empty(num_polys * 3, dtype=np.float32)
mesh.polygons.foreach_get('normal', poly_normals)
poly_normals = poly_normals.reshape(num_polys, 3)
f = 0
for prim in pymesh.primitives:
if 'NORMAL' not in prim.attributes:
# Primitives with no NORMALs should use flat shading
poly_smooths[f:f + prim.num_faces].fill(False)
f += prim.num_faces
continue
# Check the normals at the three corners against the poly normal.
# Two normals are equal iff their dot product is 1.
poly_ns = poly_normals[f:f + prim.num_faces]
# Dot product against the first vertex normal in the tri
vert_ns = vert_normals[loop_vidxs[3*f:3*(f + prim.num_faces):3]]
dot_prods = np.sum(vert_ns * poly_ns, axis=1) # dot product
smooth = (dot_prods <= 0.9999999)
# Same for the second vertex, etc.
vert_ns = vert_normals[loop_vidxs[3*f+1:3*(f + prim.num_faces):3]]
dot_prods = np.sum(vert_ns * poly_ns, axis=1)
np.logical_or(smooth, dot_prods <= 0.9999999, out=smooth)
vert_ns = vert_normals[loop_vidxs[3*f+2:3*(f + prim.num_faces):3]]
dot_prods = np.sum(vert_ns * poly_ns, axis=1)
np.logical_or(smooth, dot_prods <= 0.9999999, out=smooth)
poly_smooths[f:f + prim.num_faces] = smooth
f += prim.num_faces
mesh.polygons.foreach_set('use_smooth', poly_smooths)
def merge_duplicate_verts(vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs):
# This function attempts to invert the splitting done when exporting to
# glTF. Welds together verts with the same per-vert data (but possibly
# different per-loop data).
#
# Ideally normals would be treated as per-loop data, but that has problems,
# so we currently treat the normal as per-vert.
#
# Strategy is simple: put all the per-vert data into an array of structs
# ("dots"), dedupe with np.unique, then take all the data back out.
# Very often two verts that "morally" should be merged will have normals
# with very small differences. Round off the normals to smooth this over.
if len(vert_normals) != 0:
vert_normals *= 50000
vert_normals[:] = np.trunc(vert_normals)
vert_normals *= (1/50000)
dot_fields = [('x', np.float32), ('y', np.float32), ('z', np.float32)]
if len(vert_normals) != 0:
dot_fields += [('nx', np.float32), ('ny', np.float32), ('nz', np.float32)]
for i, _ in enumerate(vert_joints):
dot_fields += [
('joint%dx' % i, np.uint32), ('joint%dy' % i, np.uint32),
('joint%dz' % i, np.uint32), ('joint%dw' % i, np.uint32),
('weight%dx' % i, np.float32), ('weight%dy' % i, np.float32),
('weight%dz' % i, np.float32), ('weight%dw' % i, np.float32),
]
for i, _ in enumerate(sk_vert_locs):
dot_fields += [
('sk%dx' % i, np.float32), ('sk%dy' % i, np.float32), ('sk%dz' % i, np.float32),
]
dots = np.empty(len(vert_locs), dtype=np.dtype(dot_fields))
dots['x'] = vert_locs[:, 0]
dots['y'] = vert_locs[:, 1]
dots['z'] = vert_locs[:, 2]
if len(vert_normals) != 0:
dots['nx'] = vert_normals[:, 0]
dots['ny'] = vert_normals[:, 1]
dots['nz'] = vert_normals[:, 2]
for i, (joints, weights) in enumerate(zip(vert_joints, vert_weights)):
dots['joint%dx' % i] = joints[:, 0]
dots['joint%dy' % i] = joints[:, 1]
dots['joint%dz' % i] = joints[:, 2]
dots['joint%dw' % i] = joints[:, 3]
dots['weight%dx' % i] = weights[:, 0]
dots['weight%dy' % i] = weights[:, 1]
dots['weight%dz' % i] = weights[:, 2]
dots['weight%dw' % i] = weights[:, 3]
for i, locs in enumerate(sk_vert_locs):
dots['sk%dx' % i] = locs[:, 0]
dots['sk%dy' % i] = locs[:, 1]
dots['sk%dz' % i] = locs[:, 2]
unique_dots, inv_indices = np.unique(dots, return_inverse=True)
loop_vidxs = inv_indices[loop_vidxs]
edge_vidxs = inv_indices[edge_vidxs]
vert_locs = np.empty((len(unique_dots), 3), dtype=np.float32)
vert_locs[:, 0] = unique_dots['x']
vert_locs[:, 1] = unique_dots['y']
vert_locs[:, 2] = unique_dots['z']
if len(vert_normals) != 0:
vert_normals = np.empty((len(unique_dots), 3), dtype=np.float32)
vert_normals[:, 0] = unique_dots['nx']
vert_normals[:, 1] = unique_dots['ny']
vert_normals[:, 2] = unique_dots['nz']
for i in range(len(vert_joints)):
vert_joints[i] = np.empty((len(unique_dots), 4), dtype=np.uint32)
vert_joints[i][:, 0] = unique_dots['joint%dx' % i]
vert_joints[i][:, 1] = unique_dots['joint%dy' % i]
vert_joints[i][:, 2] = unique_dots['joint%dz' % i]
vert_joints[i][:, 3] = unique_dots['joint%dw' % i]
vert_weights[i] = np.empty((len(unique_dots), 4), dtype=np.float32)
vert_weights[i][:, 0] = unique_dots['weight%dx' % i]
vert_weights[i][:, 1] = unique_dots['weight%dy' % i]
vert_weights[i][:, 2] = unique_dots['weight%dz' % i]
vert_weights[i][:, 3] = unique_dots['weight%dw' % i]
for i in range(len(sk_vert_locs)):
sk_vert_locs[i] = np.empty((len(unique_dots), 3), dtype=np.float32)
sk_vert_locs[i][:, 0] = unique_dots['sk%dx' % i]
sk_vert_locs[i][:, 1] = unique_dots['sk%dy' % i]
sk_vert_locs[i][:, 2] = unique_dots['sk%dz' % i]
return vert_locs, vert_normals, vert_joints, vert_weights, sk_vert_locs, loop_vidxs, edge_vidxs
|
def find_in_sorted(arr, x):
def binsearch(start, end):
if start == end:
return -1
mid = start + (end - start) // 2
if x < arr[mid]:
return binsearch(start, mid)
elif x > arr[mid]:
return binsearch(mid + 1, end)
else:
return mid
return binsearch(0, len(arr))
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 14 10:46:53 2017
@author: dalonlobo
"""
from __future__ import print_function
import os
import glob
import pysrt
# Path to the srt files
srt_files_path = "/home/dalonlobo/deepspeech_models/srt_data"
# Path for the text file
text_file_path = "/home/dalonlobo/deepspeech_models/srt_decoding"
text_file_name = "text_corpus.txt"
# Pattern for matching the srt files
pattern = "*.srt"
# Switch to the srt files directory
os.chdir(srt_files_path)
total_files = len(glob.glob(pattern))
with open(os.path.join(text_file_path, text_file_name), "a+") as text_corpus:
for srt_file in glob.glob(pattern):
print("Extracting: ", srt_file)
subtitles = pysrt.open(srt_file)
for subs in subtitles:
text_corpus.write(subs.text.encode('utf-8').strip() + "\n")
print("Done extracting", srt_file)
print("Total files extracted: ", total_files)
|
# a quick and dirty python script to build.
import os
import sys
import requests
def minifiy_js(code):
'''
minify and return javascript code
'''
r = requests.post("http://javascript-minifier.com/raw", data={'input': code})
r.raise_for_status()
return r.text
# combine all code into a single file
full_code = ""
for file in [
"license.js",
"sscd.js",
"utils/math.js",
"utils/vector.js",
"utils/extend.js",
"utils/aabb.js",
"shapes/shape.js",
"shapes/circle.js",
"shapes/rectangle.js",
"shapes/line.js",
"shapes/lines_strip.js",
"shapes/composite_shape.js",
"shapes/capsule.js",
"shapes/shapes_collider.js",
"sscd_close.js",
"packages/npm.js"]:
with open(os.path.join("src", file), 'r') as src:
full_code += "// FILE: " + file + "\r\n\r\n"
full_code += src.read() + "\r\n\r\n"
# write full version
dest = open('dist/dev/sscd.dev.js', 'w')
dest.write(full_code)
dest.close()
# minify and write minified version
dest = open('dist/dev/sscd.dev.min.js', 'w')
dest.write(full_code)
dest.close()
|
from rest_framework import serializers
from .models import Comment, Talk, Vote
class CommentSerializer(serializers.ModelSerializer):
class Meta:
model = Comment
fields = ('id', 'user', 'talk', 'content', 'created', 'modified')
class VoteSerializer(serializers.ModelSerializer):
class Meta:
model = Vote
fields = ('id', 'user', 'talk', 'vote', 'created', 'modified')
class TalkSerializer(serializers.ModelSerializer):
comments = serializers.SerializerMethodField()
downvotes = serializers.SerializerMethodField()
upvotes = serializers.SerializerMethodField()
user_vote = serializers.SerializerMethodField()
def get_comments(self, obj):
most_recent_10_comments = Comment.objects.filter(
talk=obj).order_by('-id')[:10]
return CommentSerializer(most_recent_10_comments, many=True).data
def get_downvotes(self, obj):
return Vote.objects.filter(
talk=obj,
vote=False).count()
def get_upvotes(self, obj):
return Vote.objects.filter(
talk=obj,
vote=True).count()
def get_user_vote(self, obj):
"""
Whether the current user has upvoted this talk (True),
downvoted this talk (False), or neither (None)
"""
request = self.context['request']
user_object = request.user
if user_object.is_authenticated():
try:
user_vote = Vote.objects.get(
user=user_object,
talk=obj)
return user_vote.vote
except Vote.DoesNotExist:
return None
return None
class Meta:
model = Talk
fields = ('id', 'title', 'speaker_name', 'date', 'comments', 'user_vote',
'description', 'downvotes', 'upvotes', 'created', 'modified')
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""tfr_gen: Generate mlir tfr decomposition function from python code."""
# pylint: disable=invalid-name
# pylint: disable=missing-function-docstring
# pylint: disable=g-direct-tensorflow-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import enum
import os
import re
import types
from typing import List, Tuple
import gast as ast
from tensorflow.compiler.mlir.tfr import tfr_wrapper as tfr
from tensorflow.core.framework import types_pb2
from tensorflow.python.autograph.converters import control_flow
from tensorflow.python.autograph.converters import return_statements
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.pyct import anno
from tensorflow.python.autograph.pyct import cfg
from tensorflow.python.autograph.pyct import qual_names
from tensorflow.python.autograph.pyct import transformer
from tensorflow.python.autograph.pyct import transpiler
from tensorflow.python.autograph.pyct.static_analysis import activity
from tensorflow.python.autograph.pyct.static_analysis import reaching_definitions
from tensorflow.python.autograph.pyct.static_analysis import reaching_fndefs
from tensorflow.python.autograph.pyct.static_analysis import type_inference
from tensorflow.python.framework import load_library
from tensorflow.python.framework import op_def_registry
from tensorflow.python.util import tf_inspect
class TFRTypes(enum.Enum):
"""All the supported types.
1-3: tfr types
4-99: mlir built-in types
100-199: TF related translator internal types
200- : Python related translator internal types
"""
TENSOR = 1
TENSOR_LIST = 2
ATTR = 3
NONE = 4
SHAPE = 5 # shape -> !shape.shape
I1 = 21
I32 = 22
I64 = 23
F32 = 24
INDEX = 25
AG_UNDEFINED_VAL = 100
AG_BUILTIN_FUNC = 101
TF_RAW_OP = 102
TF_REGION = 103
TF_TENSOR_SHAPE_FUNC = 104 # shape.as_list
TF_TENSOR_SHAPE_LIST = 105 # shape.as_list()
PY_BUILTIN_FUNC = 200
# As these are not real types, __getattribute__ helps them appear more like
# actual types (i.e. class definitions).
def __getattribute__(self, name):
if name == 'shape' and object.__getattribute__(self, 'value') == 1:
return TFRTypes.SHAPE
if name == 'as_list' and object.__getattribute__(self, 'value') == 5:
return TFRTypes.TF_TENSOR_SHAPE_FUNC
return object.__getattribute__(self, name)
def __str__(self):
if self.value < 4: # pylint: disable=comparison-with-callable
return '!tfr.' + self.name.lower()
elif self.value < 10: # pylint: disable=comparison-with-callable
return '!shape.' + self.name.lower()
else:
return self.name.lower()
_attribute_types = [
TFRTypes.I1, TFRTypes.I32, TFRTypes.I64, TFRTypes.F32, TFRTypes.INDEX,
TFRTypes.ATTR
]
def _get_type_from_proto(arg_def=None, attr_def=None):
if not arg_def:
if attr_def.type == 'bool':
return TFRTypes.I1
elif attr_def.type == 'int32':
return TFRTypes.I32
elif attr_def.type == 'int' or attr_def.type == 'int64':
return TFRTypes.I64
elif attr_def.type == 'float':
return TFRTypes.F32
else:
return TFRTypes.ATTR
if arg_def.number_attr or arg_def.type_list_attr:
return TFRTypes.TENSOR_LIST
else:
return TFRTypes.TENSOR
def _get_type_info_from_proto(arg_def=None, attr_def=None):
attr_type = _get_type_from_proto(arg_def, attr_def)
if not arg_def:
return '{}{{tfr.name="{}"}}'.format(attr_type, attr_def.name)
else:
attr_names = []
if arg_def.number_attr:
attr_names.append(arg_def.number_attr)
if arg_def.type_attr:
attr_names.append(arg_def.type_attr)
if arg_def.type_list_attr:
attr_names.append(arg_def.type_list_attr)
# TODO(fengliuai): currently we don't support backward type inference, so we
# have to store these non-derivable type in the signatures, and then they
# can be used to cast the values when raising to tf ops.
if arg_def.type == types_pb2.DT_FLOAT:
attr_names.append('f32_')
elif arg_def.type == types_pb2.DT_INT32:
attr_names.append('i32_')
elif arg_def.type == types_pb2.DT_INT64:
attr_names.append('i64_')
elif arg_def.type == types_pb2.DT_BOOL:
attr_names.append('i1_')
if not attr_names:
return str(attr_type)
else:
return '{}<{}>'.format(attr_type, ','.join(attr_names))
def _get_val_from_proto(attr_type, attr_val):
if attr_type == TFRTypes.I1:
return 'true' if attr_val.b else 'false'
elif attr_type == TFRTypes.I32 or attr_type == TFRTypes.I64:
return attr_val.i
elif attr_type == TFRTypes.F32:
return attr_val.f
elif attr_type == TFRTypes.ATTR:
# string
if attr_val.HasField('s'):
return '"{}"'.format(attr_val.s.decode())
# type
if attr_val.HasField('type'):
if attr_val.type == types_pb2.DT_FLOAT:
return 'f32'
elif attr_val.type == types_pb2.DT_INT32:
return 'i32'
elif attr_val.type == types_pb2.DT_INT64:
return 'i64'
elif attr_val.type == types_pb2.DT_BOOL:
return 'i1'
# list
if attr_val.HasField('list'):
if attr_val.list.f:
elt_ty = TFRTypes.F32
values = attr_val.list.f
elif attr_val.list.i:
elt_ty = TFRTypes.I64
values = attr_val.list.i
else:
elt_ty = TFRTypes.NONE
values = []
array_attr_elts = ['{}:{}'.format(val, elt_ty) for val in values]
return '[{}]'.format(','.join(array_attr_elts))
raise NotImplementedError(
'Proto AttrValue not recoganized. type: {}, value: {}'.format(
attr_type, attr_val))
def _collect_derived_attrs_from_proto(op_def):
derived_attrs = set()
for arg in op_def.input_arg:
if arg.type_attr:
derived_attrs.add(arg.type_attr)
if arg.number_attr:
derived_attrs.add(arg.number_attr)
if arg.type_list_attr:
derived_attrs.add(arg.type_list_attr)
# TODO(fengliuai): currently we don't support backward type inference, so we
# have to store these non-derivable type in the signatures, and then they
# can be used to cast the values when raising to tf ops.
if arg.type == types_pb2.DT_FLOAT:
derived_attrs.add('f32_')
elif arg.type == types_pb2.DT_INT32:
derived_attrs.add('i32_')
elif arg.type == types_pb2.DT_INT64:
derived_attrs.add('i64_')
elif arg.type == types_pb2.DT_BOOL:
derived_attrs.add('i1_')
return derived_attrs
def _require_tensor_list(arg_def):
return arg_def.type_list_attr or arg_def.number_attr
def _camel_to_snake(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
class OpDefCache(object):
"""A Dict to cache the OpDef for the Python function name."""
def __init__(self):
self._op_defs = {}
def lookup(self, f_name, func_def=None, optional=False):
if f_name in self._op_defs.keys():
return self._op_defs[f_name]
if isinstance(func_def, types.FunctionType):
if not hasattr(func_def, '_tfr_op_name'):
# skip a non-composition function
if optional:
return (None, None)
else:
raise KeyError('OpDef does not exist: ' + f_name)
op_name = getattr(func_def, '_tfr_op_name')
elif not func_def:
op_name = f_name
else:
# TODO(fengliuai): create one utility method to match different apis.
compose_dec = []
for dec in func_def.decorator_list:
if isinstance(dec, ast.Call):
if isinstance(dec.func,
ast.Attribute) and dec.func.attr == 'Composite':
compose_dec.append(dec)
if isinstance(dec.func, ast.Name) and dec.func.id == 'Composite':
compose_dec.append(dec)
if not compose_dec:
# skip a non-composition function
if optional:
return (None, None)
else:
raise KeyError('OpDef does not exist: ' + f_name)
elif len(compose_dec) > 1:
raise KeyError('More than one TF ops decomposes for.')
else:
op_name = compose_dec[0].args[0].value
op_def = op_def_registry.get(op_name)
if not op_def:
raise ValueError('Not a registered op: ' + op_name)
derived_attrs = _collect_derived_attrs_from_proto(op_def)
self._op_defs[f_name] = (op_def, derived_attrs)
return (op_def, derived_attrs)
def mlir_external_funcs(self):
tfr_funcs = []
for op_def, derived_attrs in self._op_defs.values():
tfr_func = '\ntfr.func @tf__{}_('.format(_camel_to_snake(op_def.name))
# tensor inputs
inputs = [
_get_type_info_from_proto(arg_def) for arg_def in op_def.input_arg
]
# attribute inputs. The attribute with default values are moved backwards.
non_derived_attrs = [
attr for attr in op_def.attr if attr.name not in derived_attrs
]
attrs_no_default = [
attr for attr in non_derived_attrs
if not attr.HasField('default_value')
]
attrs_with_default = [
attr for attr in non_derived_attrs if attr.HasField('default_value')
]
attr_names = set()
for attr_def in attrs_no_default + attrs_with_default:
inputs.append(_get_type_info_from_proto(None, attr_def))
attr_names.add(attr_def.name)
# tensor outputs
outputs = [
_get_type_info_from_proto(arg_def) for arg_def in op_def.output_arg
]
inputs = ','.join(inputs)
outputs = ','.join(outputs)
attrs = ','.join(sorted(derived_attrs.union(attr_names)))
tfr_funcs.append('{}{}) -> ({}) attributes {{{}}}'.format(
tfr_func, inputs, outputs, attrs))
return tfr_funcs
_PY_TYPE_TO_TFR = {
bool: TFRTypes.I1,
int: TFRTypes.I64,
float: TFRTypes.F32,
}
_AG_FIXED_RETURN_TYPE = {
'for_stmt': type(None),
'if_stmt': type(None),
'Undefined': TFRTypes.AG_UNDEFINED_VAL,
}
QN = qual_names.QN
# TODO(mdan): Fix this with an importable module.
AG_MODULE = api._TRANSPILER._extra_locals['ag__'] # pylint:disable=protected-access
class TFRTypeResolver(type_inference.Resolver):
"""Resolve types for the external names, calls and arguments."""
def __init__(self, op_defs):
super(TFRTypeResolver, self).__init__()
self._op_defs = op_defs
# This pattern matching mechanism works with the functional form generated
# by autograph:
#
# for i in data:
# print(i)
#
# generates:
#
# def loop_body(itr):
# i = itr
# print(i)
# ag__.for_stmt(target)
#
# The mechanism lets us infer the type of the itr argument based on that of
# target.
self._for_loop_target_types = {} # Maps body function name to iterated.
self._for_loop_body_fns = {} # Used only to avoid collisions.
def res_name(self, ns, types_ns, name):
name_str = str(name)
if name_str in ns:
ns_val = ns[name_str]
return {type(ns_val)}, ns_val
if name_str in __builtins__:
return {TFRTypes.PY_BUILTIN_FUNC}, __builtins__[name_str]
# This name is not in the namespace because the autograph transformation
# is not backloaded into Python.
if name_str == 'ag__':
return {type(AG_MODULE)}, AG_MODULE
return None, None
def res_value(self, ns, value):
if value is None:
return {TFRTypes.NONE}
if value in (TFRTypes.SHAPE, TFRTypes.TF_TENSOR_SHAPE_FUNC):
# See TFRTypes.__getattrbute__.
# TODO(mdan): Replacing the enum with classes would avoid this overlap.
return {value}
# TODO(mdan): Index more efficiently. Could do a name check instead.
if any(v is value for v in AG_MODULE.__dict__.values()):
return {TFRTypes.AG_BUILTIN_FUNC}
if getattr(value, '__name__', None) == 'tensorflow.raw_ops':
return {types.ModuleType}
if hasattr(value, '__module__'):
# All the imported operations, which are not autograph built-ins, are
# considered to be TF raw ops.
# TODO(fengliuai): refine the condition so we only matche tensorflow
# ops here.
return {TFRTypes.TF_RAW_OP}
# TODO(mdan): Is ATTR equivalent to string?
return {_PY_TYPE_TO_TFR.get(type(value), TFRTypes.ATTR)}
def res_call(self, ns, types_ns, node, f_type, args, keywords):
name = anno.Basic.QN.of(node.func)
if f_type == (TFRTypes.AG_BUILTIN_FUNC,):
if name == QN(QN('ag__'), attr='if_stmt'):
nouts = node.args[6].value
# TODO(mdan): Look at the actual types out of if_body.
side_effects = {
qual_names.QN(n.value): {TFRTypes.TENSOR}
for n in node.args[5].elts[:nouts]
}
return {type(None)}, side_effects
if name == QN(QN('ag__'), attr='for_stmt'):
assert isinstance(node.args[2], ast.Name)
body_fn_name = str(anno.Basic.QN.of(node.args[2]))
assert body_fn_name not in self._for_loop_body_fns, (
'Previously used here: {}. Are you reusing the Resolver across '
'transformations?').format(self._for_loop_body_fns[body_fn_name])
self._for_loop_body_fns[body_fn_name] = anno.Basic.ORIGIN.of(node)
iterated_type = args[0]
assert iterated_type & {
TFRTypes.TENSOR_LIST, TFRTypes.TENSOR, List[int]
}, (
iterated_type)
self._for_loop_target_types[body_fn_name] = iterated_type
return {type(None)}, None
# TODO(mdan): Actually resolve the type here instead.
ret_type = _AG_FIXED_RETURN_TYPE.get(name.qn[1], None)
if ret_type is not None:
return {ret_type}, None
raise NotImplementedError('return type of {}'.format(name))
elif f_type == (TFRTypes.TF_RAW_OP,):
op_name = name.qn[1]
op_def, _ = self._op_defs.lookup(op_name)
if len(op_def.output_arg) == 1:
return {_get_type_from_proto(op_def.output_arg[0])}, None
return ({tuple(_get_type_from_proto(arg) for arg in op_def.output_arg)},
None)
elif f_type == (TFRTypes.PY_BUILTIN_FUNC,):
assert name.is_simple()
if name == QN('range'):
return {List[int]}, None
if name == QN('len'):
return {TFRTypes.INDEX}, None
elif f_type == (TFRTypes.TF_TENSOR_SHAPE_FUNC,):
return {TFRTypes.TF_TENSOR_SHAPE_LIST}, None
raise NotImplementedError('Function:', name, f_type)
def res_arg(self, ns, types_ns, f_name, name, type_anno, f_is_local):
if f_is_local:
f_name_str = str(f_name)
if f_name_str in self._for_loop_target_types:
# See autograph/converters/control_flow.py - the function has a single
# argument, the iterate before any expansion.
assert self._for_loop_target_types[f_name_str] & {List[int]}
# Assume all loops are TF loops. Then the iterates are autoboxed into
# Tensors.
return {TFRTypes.INDEX}
else:
return None
func = ns[f_name]
op_def, derived_attrs = self._op_defs.lookup(f_name, func)
if op_def is None:
return None
pos = tf_inspect.getfullargspec(func).args.index(str(name))
if pos < len(op_def.input_arg):
arg_def = op_def.input_arg[pos]
return {_get_type_from_proto(arg_def)}
elif pos < len(op_def.input_arg) + len(op_def.attr) - len(derived_attrs):
non_derived_attr_pos = pos - len(op_def.input_arg)
for attr_def in op_def.attr:
# derived attribute, skip this one and continue to the next one.
if attr_def.name in derived_attrs:
continue
if non_derived_attr_pos == 0:
return {_get_type_from_proto(None, attr_def)}
non_derived_attr_pos -= 1
raise ValueError('Argument is not defined in OpDef: ' + str(name))
def res_subscript(self, ns, types_ns, node_or_slice, value, slice_):
assert len(value) == 1
value, = tuple(value)
if value == TFRTypes.TF_TENSOR_SHAPE_LIST:
# TODO(mdan): This is not entirely correct for multi-element slices.
return {int}
elif value in (TFRTypes.TENSOR_LIST, TFRTypes.TENSOR):
# TODO(mdan): This is not entirely correct for multi-element slices.
return {TFRTypes.TENSOR}
raise NotImplementedError('slice of {}'.format(value))
def res_compare(self, ns, types_ns, node, left, right):
# TODO(fengliuai): make sure left and right are compatible
return {TFRTypes.I1}
def res_binop(self, ns, types_ns, node, left, right):
# TODO(fengliuai): make sure left and right are compatible
return left
class SymbolTable(object):
"""Symbol Table for python code."""
def __init__(self):
self.symbols = []
self.enter_scope()
self.scf_scope = 0
# reserved key words
self.insert_symbol('len', 'len', TFRTypes.PY_BUILTIN_FUNC)
def enter_scope(self, scf_scope=False):
"""Enter a new scope - at function level."""
self.symbols.append({'types': {}, 'symbols': {}})
self.curr_table = self.symbols[len(self.symbols) - 1]
if scf_scope:
self.scf_scope += 1
def insert_symbol(self, name, value, type_):
self.curr_table['symbols'][name] = (value, type_)
# TODO(mdan): Use the inferred type rather than tracking it here.
# The following field is decrepcated.
self.curr_table['types'][name] = type_
return value
def exit_scope(self):
self.symbols.pop()
self.curr_table = self.symbols[len(self.symbols) - 1]
if self.scf_scope > 0:
self.scf_scope -= 1
def in_scf_scope(self):
return self.scf_scope > 0
def lookup(self, name):
curr_idx = len(self.symbols) - 1
while curr_idx >= 0 and (name not in self.symbols[curr_idx]['symbols']):
curr_idx -= 1
if curr_idx < 0:
return None
return self.symbols[curr_idx]['symbols'][name]
class TFRGen(transformer.CodeGenerator):
"""Visit the AST and generate MLIR TFR functions."""
def __init__(self, ctx, op_defs):
super(TFRGen, self).__init__(ctx)
self.ctx = ctx
self.symbol_table = SymbolTable()
self._op_defs = op_defs
def _create_mlir_loc(self, loc):
"""Creates mlir location from autograph ORIGIN value.
Args:
loc: OriginInfo
Returns:
A serialized mlir location string.
"""
if loc is not None and loc.loc.filename:
file_name = os.path.basename(loc.loc.filename)
return 'loc("{}":{}:{})'.format(file_name, loc.loc.lineno,
loc.loc.col_offset)
else:
return 'loc(unknown)'
def _emit_with_loc(self, op_str, node=None):
"""Emit the mlir operation with the location associated with the node.
Args:
op_str: The mlir operation string to be emitted.
node: The node of the AST tree, the mlir operation translated from.
"""
loc = ''
if node:
loc = self._create_mlir_loc(
anno.getanno(node, anno.Basic.ORIGIN, default=None))
self.emit(op_str + ' ' + loc)
def _get_inferred_type(self, node, default=None):
types_ = anno.getanno(node, anno.Static.TYPES, None)
if not types_:
print('WARN: no Static.TYPES annotation. Fix the type inference pass: ')
self.debug_print(node)
return default
if types_ and len(types_) > 1:
raise ValueError('ambiguous inferred type for "{}": {}'.format(
node, types_))
type_, = types_
# TODO(fengliuai): Tuple is added here to make return tuple work.
if type_ is list or type_ is Tuple:
# TODO(fengliuai): Seems like we need to move the followed list handling
# to the type inference and we shouldn't just put 'list' there. Otherwise
# we couldn't find out the right type for the Name node.
if not isinstance(node, ast.List):
return default
all_types = [
anno.getanno(elt, anno.Static.TYPES, None) for elt in node.elts
]
if (TFRTypes.TENSOR,) in all_types:
# For the elt which is not tfr.tensor, tfr.constant_tensor needs to be
# use to cast it to a tfr.tensor.
return TFRTypes.TENSOR_LIST
else:
return TFRTypes.ATTR
if default is not None and type_ != default:
print('WARN: type annotation {}({}) does not match {}({})'.format(
type_, type(type_), default, type(default)))
self.debug_print(node)
return type_
def _pack_tensor_list(self, value):
# This is packing a list of tensors, then the axis is 0.
axis = self._ssa_name('zero')
self._emit_with_loc('\n{} = constant 0 : i64'.format(axis))
casted = self._ssa_name('pack')
self.emit('\n{} = tfr.call @tf__pack({}, {})'.format(casted, value, axis))
self._emit_with_loc(' : (!tfr.tensor_list, i64) -> !tfr.tensor')
# load the op def of tf.Pack
self._op_defs.lookup('Pack')
return casted, TFRTypes.TENSOR
def _index_to_I64(self, value, ty):
if ty == TFRTypes.INDEX:
casted = self._ssa_name('casted')
self._emit_with_loc('\n{} = index_cast {} : index to i64'.format(
casted, value))
return casted, TFRTypes.I64
else:
return value, ty
def _value_to_tensor(self, value, ty, node):
value, ty = self._index_to_I64(value, ty)
cst_tensor = self._ssa_name('cst')
self.emit('\n{} = "tfr.constant_tensor"({})'.format(cst_tensor, value))
self._emit_with_loc(' : ({}) -> !tfr.tensor'.format(ty), node)
return cst_tensor, TFRTypes.TENSOR
def _ssa_name(self, prefix):
if isinstance(prefix, qual_names.QN):
assert prefix.is_simple(), 'ANF transform should have cleaned this up'
prefix = prefix.ssf()
return '%' + self.ctx.namer.new_symbol(prefix, set())
def _op_def(self, op_name):
return op_def_registry.get(op_name)
def visit_block(self, block):
return [self.visit(item) for item in block]
def visit_Pass(self, node):
if self.symbol_table.in_scf_scope():
self._emit_with_loc('\nscf.yield', node)
else:
self._emit_with_loc('\ntfr.return', node)
def visit_Attribute(self, node):
node_type = self._get_inferred_type(node, None)
if isinstance(node.value, ast.Name):
if node.value.id == 'ag__':
# some variables are assigned with 'ag__.xxx' method, we should handle
# them following the autograph convensions.
return (node.attr, TFRTypes.AG_BUILTIN_FUNC)
if node_type == TFRTypes.TF_RAW_OP:
# This branch is used when it is inside tensorflow
return (node.attr, TFRTypes.TF_RAW_OP)
value, _ = self.visit(node.value)
tensor_type = self._get_inferred_type(node.value, None)
# TODO(fengliuai): use node_type once it
if node_type == TFRTypes.SHAPE:
print('TODO: use "node_type"')
if node.attr == 'shape' and tensor_type == TFRTypes.TENSOR:
ssa_value = self._ssa_name('shape')
self._emit_with_loc(
'\n{} = tfr.get_shape {} -> !shape.shape'.format(ssa_value, value),
node)
return (ssa_value, TFRTypes.SHAPE)
if isinstance(node.value, ast.Attribute):
if isinstance(node.value.value, ast.Name):
if node.value.value.id == 'tf' and node.value.attr == 'raw_ops':
# This branch is used when it is outside tensorflow
return (node.attr, TFRTypes.TF_RAW_OP)
value, ty = self.visit(node.value)
# TODO(fengliuai): use node_type once it
if node_type == TFRTypes.TF_TENSOR_SHAPE_FUNC:
print('TODO: use "node_type"')
if ty == TFRTypes.SHAPE and node.attr == 'as_list':
return (value, TFRTypes.TF_TENSOR_SHAPE_FUNC)
raise NotImplementedError('Attribute kind not recoganized.')
def visit_Assign(self, node):
values = self.visit(node.value)
if isinstance(node.targets[0], ast.Tuple):
targets = [elt.id for elt in node.targets[0].elts]
elif isinstance(node.targets[0], ast.Name):
targets = [node.targets[0].id]
else:
raise NotImplementedError('Assignment target type not recoganized.')
if isinstance(values, list):
if len(targets) == len(values):
for key, value in zip(targets, values):
ssa_value, ty_ = value
ty = self._get_inferred_type(node.value, ty_)
self.symbol_table.insert_symbol(key, ssa_value, ty)
elif len(values) == 1:
n, ty = values[0]
assert ty == TFRTypes.TENSOR_LIST
# assign a tensor_list to multiple variables
for idx, key in enumerate(targets):
idx_name = self._ssa_name('idx')
self._emit_with_loc(
'\n{} = constant {} : index'.format(idx_name, idx), node)
elt_name = self._ssa_name('elt')
self.emit('\n{} = tfr.get_element {}[{}]'.format(
elt_name, n, idx_name))
self._emit_with_loc(' : (!tfr.tensor_list, index) -> !tfr.tensor',
node)
self.symbol_table.insert_symbol(key, elt_name, TFRTypes.TENSOR)
elif len(targets) == 1:
ssa_names = [n for n, _ in values]
tys = [t for _, t in values]
self.symbol_table.insert_symbol(targets[0], ssa_names, tys)
else:
self.symbol_table.insert_symbol(targets[0], values[0], values[1])
def _emit_binary_op(self, op, lhs, lhs_ty, rhs, rhs_ty):
assert lhs_ty, rhs_ty
if isinstance(op, ast.Sub):
code = 'sub'
elif isinstance(op, ast.Add):
code = 'add'
else:
raise NotImplementedError('BinOp operator not recognized' + op)
if lhs_ty == TFRTypes.I64:
suffix = 'i'
elif lhs_ty == TFRTypes.F32:
suffix = 'f'
else:
raise NotImplementedError('BinOp operand type not recognized' + op)
ret = self._ssa_name(code)
self._emit_with_loc(
'\n{} = {}{} {}, {} : {}'.format(ret, code, suffix, lhs, rhs, lhs_ty),
op)
return ret, lhs_ty
def visit_AugAssign(self, node):
lhs, lhs_ty = self.visit(node.target)
rhs, rhs_ty = self.visit(node.value)
ret, ret_ty = self._emit_binary_op(node.op, lhs, lhs_ty, rhs, rhs_ty)
self.symbol_table.insert_symbol(node.target.id, ret, ret_ty)
def visit_BinOp(self, node):
lhs, lhs_ty = self.visit(node.left)
rhs, rhs_ty = self.visit(node.right)
return self._emit_binary_op(node.op, lhs, lhs_ty, rhs, rhs_ty)
def visit_BoolOp(self, node):
values = [self.visit(value) for value in node.values]
# TODO(fengliuai): Handle more ast node types.
if isinstance(node.op, ast.Or):
raise NotImplementedError('Or operator not recognized')
elif isinstance(node.op, ast.And):
raise NotImplementedError('And operator not recognized')
def visit_Call(self, node):
func_name, func_type = self.visit(node.func)
_ = self._get_inferred_type(node.func, func_type)
if func_type == TFRTypes.AG_BUILTIN_FUNC:
if func_name == 'if_stmt':
cond, _ = self.visit(node.args[0])
body, _ = self.visit(node.args[1])
orelse, _ = self.visit(node.args[2])
get_state, _ = self.visit(node.args[3])
nouts = int(node.args[6].value)
out_symbols = []
# The out symbols are just a Tuple of names
for out in node.args[5].elts[:nouts]:
val, ty = self.symbol_table.lookup(out.value)
if ty != TFRTypes.AG_UNDEFINED_VAL:
raise ValueError('if stmt out symbol is not defined.')
out_symbols.append(out.value)
return self._visit_if_stmt(cond, body, orelse, get_state, out_symbols,
node)
elif func_name == 'for_stmt':
range_ = self._visit_iter(node.args[0])
body, _ = self.visit(node.args[2])
get_state, _ = self.visit(node.args[3])
loop_carried = [out.value for out in node.args[5].elts]
# TODO(fengliuai): opt is not used here.
return self._visit_for_stmt(range_, body, get_state, loop_carried, node)
elif func_name == 'Undefined':
val = self._ssa_name(node.args[0].value)
return (val, TFRTypes.AG_UNDEFINED_VAL)
elif func_name == 'UndefinedReturnValue':
val = self._ssa_name('return_val')
return (val, TFRTypes.AG_UNDEFINED_VAL)
if func_type == TFRTypes.TF_RAW_OP:
return self._visit_tf_op(func_name, node.args, node.keywords, node)
if func_type == TFRTypes.TF_TENSOR_SHAPE_FUNC:
return (func_name, TFRTypes.TF_TENSOR_SHAPE_LIST)
if func_type == TFRTypes.PY_BUILTIN_FUNC:
if func_name == 'len':
arg, ty = self.visit(node.args[0])
ty = self._get_inferred_type(node.args[0], ty)
assert ty == TFRTypes.TF_TENSOR_SHAPE_LIST, ty
len_value = self._ssa_name('len')
self._emit_with_loc(
'\n{} = shape.rank {} : !shape.shape -> !shape.size'.format(
len_value, arg), node)
size_value = self._ssa_name('len_size')
self._emit_with_loc(
'\n{} = shape.size_to_index {} : !shape.size'.format(
size_value, len_value), node)
return (size_value, TFRTypes.INDEX)
raise NotImplementedError('call operator not recognized: {} {}'.format(
func_name, func_type))
def visit_Compare(self, node):
lhs, lhs_ty = self.visit(node.left)
for op, right in zip(node.ops, node.comparators):
rhs, _ = self.visit(right)
if isinstance(op, ast.Eq):
pred = 'eq'
elif isinstance(op, ast.Lt):
pred = 'ult'
elif isinstance(op, ast.LtE):
pred = 'ule'
elif isinstance(op, ast.Gt):
pred = 'ugt'
elif isinstance(op, ast.GtE):
pred = 'uge'
elif isinstance(op, ast.NotEq):
pred = 'ne'
else:
raise NotImplementedError('Compare operator not recognized')
ret = self._ssa_name(pred)
if lhs_ty == TFRTypes.ATTR:
self._emit_with_loc(
'\n{} = tfr.equal {}, {} -> i1'.format(ret, lhs, rhs), node)
else:
if lhs_ty == TFRTypes.I64:
code = 'cmpi'
elif lhs_ty == TFRTypes.F32:
code = 'cmpf'
else:
raise NotImplementedError('Compare operand type not recognized')
self._emit_with_loc(
'\n{} = {} "{}", {}, {} : {}'.format(ret, code, pred, lhs, rhs,
lhs_ty), node)
return ret, TFRTypes.I1
def visit_Constant(self, node):
cst_name = self._ssa_name('cst')
if node.value is None:
cst_ty = TFRTypes.NONE
elif isinstance(node.value, bool):
cst_ty = self._get_inferred_type(node)
cst_val = str(node.value).lower()
self._emit_with_loc('\n{} = constant {}'.format(cst_name, cst_val), node)
else:
cst_ty = self._get_inferred_type(node)
cst_val = node.value
if cst_ty == TFRTypes.ATTR:
self._emit_with_loc(
'\n{} = tfr.constant "{}" -> {}'.format(cst_name, cst_val, cst_ty),
node)
else:
self._emit_with_loc(
'\n{} = constant {} : {}'.format(cst_name, cst_val, cst_ty), node)
return cst_name, cst_ty
def visit_FunctionDef(self, node):
op_def, derived_attrs = self._op_defs.lookup(node.name, node, True)
if op_def is None:
# Nested function. Insert it to symbol table for looking up later.
self.symbol_table.insert_symbol(node.name, node, None)
return
op_name = op_def.name
if self.symbol_table.lookup(op_name):
raise LookupError('Composition has not been registered for op: ' +
op_name)
else:
self.symbol_table.insert_symbol(node.name, None, None)
self.symbol_table.enter_scope()
self.emit('\ntfr.func @tf__{0}('.format(_camel_to_snake(op_name)))
arg_list = []
idx = 0
max_idx = len(op_def.input_arg) + len(op_def.attr)
for arg in node.args.args:
arg_name = self._ssa_name(anno.getanno(arg, anno.Basic.QN))
arg_type = anno.getanno(arg, anno.Static.TYPES)[0]
arg_attr = ''
if idx >= len(op_def.input_arg):
attr_def = op_def.attr[idx - len(op_def.input_arg)]
# skip the derived attributes
while attr_def.name in derived_attrs and (idx + 1) < max_idx:
idx += 1
attr_def = op_def.attr[idx - len(op_def.input_arg)]
if idx >= max_idx:
raise ValueError('Argument is not defined in OpDef: ' + arg_name)
arg_attr += '{{tfr.name="{}"'.format(attr_def.name)
if attr_def.HasField('default_value'):
default_val = _get_val_from_proto(arg_type, attr_def.default_value)
arg_attr += ',tfr.default={}'.format(default_val)
arg_attr += '}'
idx += 1
arg_str = '{}: {}{}'.format(arg_name, arg_type, arg_attr)
arg_list.append(arg_str)
self.symbol_table.insert_symbol(arg.id, arg_name, arg_type)
ret_type_list = []
for ret_def in op_def.output_arg:
if ret_def.number_attr or ret_def.type_list_attr:
ret_type_list.append(str(TFRTypes.TENSOR_LIST))
else:
ret_type_list.append(str(TFRTypes.TENSOR))
self.emit('{}) -> ({}) {{'.format(', '.join(arg_list),
', '.join(ret_type_list)))
self.visit_block(node.body)
self._emit_with_loc('\n}', node)
self.symbol_table.exit_scope()
def visit_arguments(self, node):
# TODO(fengliuai): return ordered the types and names.
# We need to order the arguments to match the assumption in the TFR dialect.
raise NotImplementedError('arguments not supported.')
def visit_Lambda(self, node):
raise NotImplementedError('Lambda not supported.')
def _get_mlir_ssa_values(self, name_prefix, out_types):
"""Create MLIR convention SSA values."""
out_ssa_values = []
if not out_types:
return '', out_ssa_values
out_name = self._ssa_name(name_prefix)
if len(out_types) == 1:
out_name_suffix = ''
out_ssa_values.append(out_name)
else:
# For multiple returns, MLIR uses '%s:i' when they are defined and
# '%s#i' when they are used.
out_name_suffix = ':{}'.format(len(out_types))
for idx, _ in enumerate(out_types):
out_ssa_values.append('{}#{}'.format(out_name, idx))
return '{}{}'.format(out_name, out_name_suffix), out_ssa_values
def _visit_if_stmt(self, cond, body_def, orelse_def, get_state, out_symbols,
node):
self.emit('\n')
ret_str, ret_ssa_values = self._get_mlir_ssa_values(
'if_stmt', [TFRTypes.TENSOR] * len(out_symbols))
if ret_ssa_values:
self.emit(ret_str + ' = ')
# add ssa values to the symbol table
out_types = []
for symbol, ssa_value in zip(out_symbols, ret_ssa_values):
self.symbol_table.insert_symbol(symbol, ssa_value, TFRTypes.TENSOR)
out_types.append(str(TFRTypes.TENSOR))
self.emit('scf.if {} -> ({}) {{'.format(cond, ', '.join(out_types)))
# Create a new scope in case the local variables are leaked.
self.symbol_table.enter_scope(scf_scope=True)
self.visit_block(body_def.body)
self.visit_block(get_state.body)
self.symbol_table.exit_scope()
self.emit('\n} else {')
# Create a new scope in case the local variables are leaked.
self.symbol_table.enter_scope(scf_scope=True)
self.visit_block(orelse_def.body)
self.visit_block(get_state.body)
self.symbol_table.exit_scope()
self._emit_with_loc('\n}', node)
return list(zip(ret_ssa_values, out_types))
def _visit_iter(self, node):
if isinstance(node, ast.Call):
f_name = anno.getanno(node.func, anno.Basic.QN)
if f_name == QN('range'):
args = [self.visit(arg) for arg in node.args]
begin = None
step = None
end = None
if len(args) == 1:
end, end_ty = args[0]
elif len(args) == 2:
begin, begin_ty = args[0]
end, end_ty = args[1]
elif len(args) == 3:
begin, begin_ty = args[0]
end, end_ty = args[1]
step, step_ty = args[2]
if begin is None:
begin = self._ssa_name('begin')
self._emit_with_loc('\n{} = constant 0 : index'.format(begin), node)
elif begin_ty != TFRTypes.INDEX:
begin_ = self._ssa_name('begin')
self._emit_with_loc(
'\n{} = index_cast {} : {} to index'.format(
begin_, begin, begin_ty), node)
begin = begin_
if end_ty != TFRTypes.INDEX:
end_ = self._ssa_name('end')
self._emit_with_loc(
'\n{} = index_cast {} : {} to index'.format(end_, end, end_ty),
node)
end = end_
if step is None:
step = self._ssa_name('step')
self._emit_with_loc('\n{} = constant 1 : index'.format(step), node)
elif step_ty != TFRTypes.INDEX:
step_ = self._ssa_name('step')
self._emit_with_loc(
'\n{} = index_cast {} : {} to index'.format(step_, step, step_ty),
node)
step = step_
return begin, end, step
raise NotImplementedError('Iterator entity not supported.' + node)
def _visit_for_stmt(self, range_, body_def, get_state, loop_carried, node):
self.emit('\n')
ret_str, ret_ssa_values = self._get_mlir_ssa_values(
'for_stmt', [TFRTypes.TENSOR] * len(loop_carried))
if ret_ssa_values:
self.emit(ret_str + ' = ')
# Before enter the loop, we use the original ssa values as the initial
# values to the loop iteration arguments. We also create new ssa values as
# the returns of the scf for statements. The symbol table needs to be
# updated to these new ssa values before it enters the scope of the loop.
out_types = []
init_values = []
for symbol, ssa_value in zip(loop_carried, ret_ssa_values):
init, ty = self.symbol_table.lookup(symbol)
self.symbol_table.insert_symbol(symbol, ssa_value, ty)
out_types.append(str(ty))
init_values.append((init, ty))
# Create a new scope in case the local variables are leaked.
self.symbol_table.enter_scope(scf_scope=True)
# Create the iteration variable with index type
assert len(body_def.args.args) == 1
it_name = body_def.args.args[0].id
it = self._ssa_name(it_name)
self.symbol_table.insert_symbol(it_name, it, TFRTypes.INDEX)
self.emit('scf.for {} = {} to {} step {} '.format(it, range_[0], range_[1],
range_[2]))
if loop_carried:
iter_args = []
for symbol, init in zip(loop_carried, init_values):
# create new ssa values for the loop carried variables
it_arg = self._ssa_name('it_arg')
self.symbol_table.insert_symbol(symbol, it_arg, init[1])
iter_args.append('{} = {}'.format(it_arg, init[0]))
self.emit('iter_args({}) '.format(', '.join(iter_args)))
self.emit('-> ({}) {{'.format(', '.join(out_types)))
else:
self.emit(' {')
self.visit_block(body_def.body)
self.visit_block(get_state.body)
self.symbol_table.exit_scope()
self._emit_with_loc('\n}', node)
return list(zip(ret_ssa_values, out_types))
def _emit_default_constant_from_proto(self, attr_def):
"""emit mlir constant statement from default value of the ArgDef proto."""
name = self._ssa_name('cst')
cst_ty = _get_type_from_proto(None, attr_def)
cst_val = _get_val_from_proto(cst_ty, attr_def.default_value)
if cst_ty == TFRTypes.ATTR:
self._emit_with_loc('\n{} = tfr.constant {} -> {}'.format(
name, cst_val, cst_ty))
elif cst_ty == TFRTypes.I1:
self._emit_with_loc('\n{} = constant {}'.format(name, cst_val))
else:
self._emit_with_loc('\n{} = constant {} : {}'.format(
name, cst_val, cst_ty))
return name, cst_ty
def visit_keyword(self, node):
return node.arg, self.visit(node.value)
def _visit_tf_op(self, op_name, args, keywords, node):
op_def, derived_attrs = self._op_defs.lookup(op_name)
ret_tys = [_get_type_from_proto(arg) for arg in op_def.output_arg]
ret_str, ret_ssa_values = self._get_mlir_ssa_values(op_name, ret_tys)
arg_strs = []
ty_strs = []
for arg in args:
value, ty = self.visit(arg)
arg_strs.append(value)
ty_strs.append(str(ty))
input_args = [arg for arg in op_def.input_arg]
attrs_no_default = [
attr for attr in op_def.attr
if not attr.HasField('default_value') and attr.name not in derived_attrs
]
attrs_with_default = [
attr for attr in op_def.attr
if attr.HasField('default_value') and attr.name not in derived_attrs
]
kw_args = {}
for arg in keywords:
value, (ssa_name, ty) = self.visit(arg)
ty = self._get_inferred_type(arg.value, ty)
# TODO(fengliuai): implement the "rename_to" for the customization in
# tensorflow/core/api_def/base_api/*
if value == 'axis':
value = 'split_dim'
kw_args[value] = (ssa_name, ty)
# tensor arguments and attribute arguments
ordered_args = input_args + attrs_no_default + attrs_with_default
for attr_def in ordered_args[len(args):]:
if attr_def.name in kw_args:
value, ty = kw_args[attr_def.name]
if attr_def in input_args:
if ty in _attribute_types:
# the argument shouldn't be used as tf op calls directly.
value, ty = self._value_to_tensor(value, ty, node)
if ty is TFRTypes.TENSOR_LIST and not _require_tensor_list(attr_def):
value, ty = self._pack_tensor_list(value)
else:
value, ty = self._emit_default_constant_from_proto(attr_def)
arg_strs.append(value)
ty_strs.append(str(ty))
if ret_ssa_values:
self.emit('\n{} = '.format(ret_str))
self.emit('tfr.call @tf__{}('.format(_camel_to_snake(op_name)))
arg_str = ', '.join(arg_strs)
arg_ty_str = ', '.join(ty_strs)
ret_ty_str = ', '.join([str(ty) for ty in ret_tys])
self._emit_with_loc(
'{}) : ({}) -> ({})'.format(arg_str, arg_ty_str, ret_ty_str), node)
return list(zip(ret_ssa_values, ret_tys))
def visit_If(self, node):
raise NotImplementedError('If not supported.')
def visit_Name(self, node):
val, lookup_type = self.symbol_table.lookup(node.id)
type_ = self._get_inferred_type(node, lookup_type)
return val, type_
def visit_Return(self, node):
values = self.visit(node.value)
if self.symbol_table.in_scf_scope():
self.emit('\nscf.yield ')
else:
self.emit('\ntfr.return ')
if not values:
return
if isinstance(values, list):
vals, tys = zip(*values)
else:
vals = values[0]
tys = values[1]
if isinstance(tys, list) or isinstance(tys, tuple):
tys = [str(t) for t in tys]
self._emit_with_loc('{} : {}'.format(', '.join(vals), ', '.join(tys)),
node)
elif tys != TFRTypes.NONE:
# TODO(fengliuai): scf region yield uses this branch. Fix it.
self._emit_with_loc('{} : {}'.format(vals, tys), node)
def visit_Subscript(self, node):
val, ty = self.visit(node.value)
type_ = self._get_inferred_type(node.value, ty)
# TODO(fengliuai): Here we hardcode the node.slice here to get the index
# type. Use the visit method once the type inference is done.
# slice_val, slice_ty = self.visit(node.slice)
if isinstance(node.slice, ast.Index):
if isinstance(node.slice.value, ast.Constant):
# TODO(fengliuai): promote to an assignment
idx_val = self._ssa_name('cst')
self._emit_with_loc(
'\n{} = constant {} : index'.format(idx_val,
node.slice.value.value), node)
else:
idx_val, _ = self.visit(node.slice.value)
else:
raise NotImplementedError('non-index slice not supported.')
elt = self._ssa_name('elt')
if type_ == TFRTypes.TENSOR_LIST:
self.emit('\n{} = tfr.get_element {}[{}] '.format(elt, val, idx_val))
self._emit_with_loc(': (!tfr.tensor_list, index) -> !tfr.tensor', node)
return (elt, TFRTypes.TENSOR)
elif type_ == TFRTypes.TF_TENSOR_SHAPE_LIST:
size_ = self._ssa_name('size')
self.emit('\n{} = shape.get_extent {}, {}'.format(size_, val, idx_val))
self._emit_with_loc(': !shape.shape, index -> !shape.size', node)
self._emit_with_loc(
'\n{} = shape.size_to_index {} : !shape.size'.format(elt, size_),
node)
return (elt, TFRTypes.INDEX)
def visit_List(self, node):
out_type = self._get_inferred_type(node)
vals = []
tys = []
for elt in node.elts:
val, ty = self.visit(elt)
if ty in _attribute_types and out_type == TFRTypes.TENSOR_LIST:
# This list is a tensor list, then cast all the input values to tensors.
val, ty = self._value_to_tensor(val, ty, node)
else:
# We shouldn't use index type to build the list because list will be use
# as attribute.
val, ty = self._index_to_I64(val, ty)
vals.append(val)
tys.append(str(ty))
list_val = self._ssa_name('list')
self.emit('\n{} = "tfr.build_list"({})'.format(list_val, ', '.join(vals)))
self._emit_with_loc(' : ({}) -> {}'.format(', '.join(tys), out_type), node)
return (list_val, out_type)
def visit_Tuple(self, node):
return [self.visit(elt) for elt in node.elts]
def visit_UnaryOp(self, node):
value, ty = self.visit(node.operand)
if isinstance(node.op, ast.USub):
zero_value = self._ssa_name('zero')
self._emit_with_loc('\n{} = constant 0 : {}'.format(zero_value, ty), node)
ssa_value = self._ssa_name('cst')
if ty == TFRTypes.I32 or ty == TFRTypes.I64:
self._emit_with_loc(
'\n{} = subi {}, {} : {}'.format(ssa_value, zero_value, value, ty),
node)
elif ty == TFRTypes.F32:
self._emit_with_loc(
'\n{} = subf {}, {} : {}'.format(ssa_value, zero_value, value, ty),
node)
else:
raise NotImplementedError('USub type not recognized: ' + str(ty))
return ssa_value, ty
raise NotImplementedError('USub operator not recognized')
def visit_For(self, node):
raise NotImplementedError('For operator not recognized')
def visit_While(self, node):
raise NotImplementedError('While operator not recognized')
def visit_Try(self, node):
# Only handles the body of the try statement.
self.visit_block(node.body)
def _apply_py_to_tf_passes(node, ctx):
"""Apply transformations from PyToTF to match tf.function tracing."""
# TODO(fengliuai): we don't know which passes are required, thus we evalute
# each one when the corresponding node is handled.
# copied from PyToTF.transform_ast
node = return_statements.transform(node, ctx, False)
node = control_flow.transform(node, ctx)
return node
class TfrGen(transpiler.GenericTranspiler):
"""Transforms Python objects into TFR MLIR source code."""
def __init__(self, op_defs):
self._op_defs = op_defs
def transform_ast(self, node, ctx):
node = _apply_py_to_tf_passes(node, ctx)
# TODO(mdan): Enable this.
# node = anf.transform(node, ctx)
graphs = cfg.build(node)
node = qual_names.resolve(node)
node = activity.resolve(node, ctx)
node = reaching_definitions.resolve(node, ctx, graphs)
node = reaching_fndefs.resolve(node, ctx, graphs)
node = type_inference.resolve(node, ctx, graphs,
TFRTypeResolver(self._op_defs))
mlir_generator = TFRGen(ctx, self._op_defs)
mlir_generator.visit(node)
return mlir_generator.code_buffer
def tfr_gen(func, op_defs):
"""Parse a function and emit the TFR functions."""
mlir_code, _ = TfrGen(op_defs).transform(func, None)
assert tfr.verify(mlir_code), 'mlir code not verified: {}'.format(mlir_code)
return mlir_code
def tfr_gen_from_module(source, method_prefix=None, op_libraries=None):
"""Parse a python code and emit the TFR functions from a target class."""
op_defs = OpDefCache()
if op_libraries:
for m in op_libraries:
lib_dir = os.path.dirname(m.__file__)
prefix_len = len('gen_')
lib_name = os.path.basename(m.__file__)[prefix_len:].replace('.py', '.so')
# Load the op library so the op is added to the op registry. This is
# required when the op cc_library couldn't be statically linked in open
# source.
# This is a no op if the op shared library couldn't be found in the same
# directory of the op Python API.
load_library.load_op_library(os.path.join(lib_dir, lib_name))
mlir_funcs = [
tfr_gen(func, op_defs)
for name, func in tf_inspect.getmembers(source, tf_inspect.isfunction)
if not method_prefix or name.startswith(method_prefix)
]
return '\n'.join(mlir_funcs + op_defs.mlir_external_funcs())
|
# -*- coding: utf-8 -*-
import unittest
from openprocurement.tender.twostage.tests import tender
def suite():
suite = unittest.TestSuite()
suite.addTest(tender.suite())
return suite
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.11.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta1_mutating_webhook_configuration_list import V1beta1MutatingWebhookConfigurationList
class TestV1beta1MutatingWebhookConfigurationList(unittest.TestCase):
""" V1beta1MutatingWebhookConfigurationList unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta1MutatingWebhookConfigurationList(self):
"""
Test V1beta1MutatingWebhookConfigurationList
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta1_mutating_webhook_configuration_list.V1beta1MutatingWebhookConfigurationList()
pass
if __name__ == '__main__':
unittest.main()
|
#Compiled By Ahmad Riswanto
#Facebook : https://www.facebook.com/ahmad.riswanto.180
import marshal
exec(marshal.loads('c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00@\x00\x00\x00s\x94\x00\x00\x00d\x00\x00d\x01\x00l\x00\x00Z\x00\x00d\x00\x00d\x01\x00l\x01\x00Z\x01\x00d\x00\x00d\x01\x00l\x02\x00Z\x02\x00d\x02\x00GHd\x03\x00Z\x03\x00d\x04\x00Z\x04\x00xH\x00e\x04\x00d\x04\x00k\x02\x00r\x7f\x00e\x05\x00d\x05\x00\x83\x01\x00Z\x06\x00e\x06\x00e\x03\x00k\x02\x00rj\x00d\x06\x00GHd\x07\x00Z\x04\x00q8\x00d\x08\x00GHe\x00\x00j\x07\x00d\t\x00\x83\x01\x00\x01q8\x00Wd\n\x00\x84\x00\x00Z\x08\x00e\x08\x00\x83\x00\x00\x01d\x01\x00S(\x0b\x00\x00\x00i\xff\xff\xff\xffNsH\x00\x00\x00\t [Salahin Aja Sandinya Ntar Di Suruh Masuk Link , Ntar Sandinya Muncul\ts\x16\x00\x00\x00Hilman Maulana Gans XDt\x04\x00\x00\x00trues$\x00\x00\x00\x1b[1;92m[?] \x1b[1;97mPASSWORD \x1b[1;97m: s2\x00\x00\x00\n \x1b[1;92mCORRECT \xe2\x9c\x93\n t\x05\x00\x00\x00falses\x0c\x00\x00\x00\x1b[1;91mWRONGs$\x00\x00\x00xdg-open https://pastelink.net/21jp2c\x00\x00\x00\x00\x01\x00\x00\x00\x02\x00\x00\x00C\x00\x00\x00s\xe8\x01\x00\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x02\x00\x83\x01\x00\x01d\x03\x00GHt\x02\x00j\x03\x00d\x04\x00\x83\x01\x00\x01d\x05\x00GHd\x06\x00GHd\x07\x00GHd\x05\x00GHd\x08\x00GHd\t\x00GHd\n\x00GHd\x0b\x00GHd\x0c\x00GHd\r\x00GHd\x0e\x00GHt\x04\x00d\x0f\x00\x83\x01\x00}\x00\x00|\x00\x00d)\x00k\x06\x00r\xa9\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x11\x00\x83\x01\x00\x01d\x12\x00GHd\x13\x00GHt\x05\x00\x83\x00\x00\x01n\x00\x00|\x00\x00d*\x00k\x06\x00r\xe3\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x15\x00\x83\x01\x00\x01d\x16\x00GHd\x17\x00GHt\x05\x00\x83\x00\x00\x01n\x00\x00|\x00\x00d+\x00k\x06\x00r\x1d\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x19\x00\x83\x01\x00\x01d\x1a\x00GHd\x1b\x00GHt\x05\x00\x83\x00\x00\x01n\x00\x00|\x00\x00d,\x00k\x06\x00ra\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d\x1d\x00\x83\x01\x00\x01d\x1e\x00GHd\x1f\x00GHd \x00GHd!\x00GHt\x05\x00\x83\x00\x00\x01n\x00\x00|\x00\x00d-\x00k\x06\x00r\x9b\x01t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x02\x00j\x03\x00d#\x00\x83\x01\x00\x01d$\x00GHd%\x00GHt\x05\x00\x83\x00\x00\x01n\x00\x00|\x00\x00d.\x00k\x06\x00r\xbe\x01t\x00\x00j\x01\x00d\'\x00\x83\x01\x00\x01t\x05\x00\x83\x00\x00\x01n&\x00t\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01d(\x00GHt\x00\x00j\x01\x00d\x01\x00\x83\x01\x00\x01t\x06\x00\x83\x00\x00\x01d\x00\x00S(/\x00\x00\x00Ni\x01\x00\x00\x00t\x05\x00\x00\x00clears\x05\x00\x00\x00\x1b[96ms\x0c\x00\x00\x00figlet Toolss&\x00\x00\x00\x1b[92m=================================s#\x00\x00\x00\x1b[97m Author : \x1b[96mHilman Maulana s%\x00\x00\x00\x1b[97m Team : \x1b[96mBacot-Broterhood s0\x00\x00\x00\x1b[92m++++++++++++ \x1b[97mM E N U \x1b[92m++++++++++++s"\x00\x00\x00\x1b[92m[\x1b[97m1\x1b[92m] \x1b[97mTools Mbf s$\x00\x00\x00\x1b[92m[\x1b[97m2]\x1b[92m \x1b[97mTools Crack s-\x00\x00\x00\x1b[92m[\x1b[97m3\x1b[92m] \x1b[97mTools Crack Fb India s#\x00\x00\x00\x1b[92m[\x1b[97m4\x1b[92m] \x1b[97mTools ManXDs(\x00\x00\x00\x1b[92m[\x1b[97m5\x1b[92m] \x1b[97mDark Fb Diamond s\x1d\x00\x00\x00\x1b[92m[\x1b[91m0\x1b[92m] \x1b[91mExit s\x0e\x00\x00\x00\x1b[97m==>\x1b[93m t\x01\x00\x00\x001s&\x00\x00\x00git clone https://github.com/dz-id/mbfs!\x00\x00\x00\x1b[97m$ lanjut ketikan \x1b[93mcd mbfs\x1f\x00\x00\x00\x1b[97m$ lalu \x1b[93m python run.pyt\x01\x00\x00\x002s,\x00\x00\x00git clone https://github.com/Yayan-XD/Crakers$\x00\x00\x00\x1b[97m$ lanjut ketikan \x1b[93mcd Crakers"\x00\x00\x00\x1b[97m$ lalu\x1b[93m python2 Craker.pyt\x01\x00\x00\x003s3\x00\x00\x00git clone https://github.com/ZahidMahmood786/Indians#\x00\x00\x00\x1b[97m$lanjut ketikan \x1b[93mcd Indians\x1f\x00\x00\x00\x1b[97m$ lalu \x1b[93mpython2 Ind.pyt\x01\x00\x00\x004s0\x00\x00\x00git clone https://github.com/hilman-bebe/bebeksss%\x00\x00\x00\x1b[97m$ lanjut ketikan \x1b[93mcd bebeksss#\x00\x00\x00\x1b[97m$ lalu \x1b[93mpython2 bebekss.pys\x1c\x00\x00\x00\x1b[97m + Username \x1b[92mhilmans\x1a\x00\x00\x00\x1b[97m + Password \x1b[96mganst\x01\x00\x00\x005s/\x00\x00\x00git clone https://github.com/Kagurazaga/New_Ikzs%\x00\x00\x00\x1b[97m$ lanjut ketikan\x1b[93m cd New_Ikzs \x00\x00\x00\x1b[97m$ lalu \x1b[93m python2 ikz.pyt\x01\x00\x00\x000i\x02\x00\x00\x00s(\x00\x00\x00\x1b[97m Pilih Yang Bener \x1b[92mGoblok . . .(\x01\x00\x00\x00R\x03\x00\x00\x00(\x01\x00\x00\x00R\x04\x00\x00\x00(\x01\x00\x00\x00R\x05\x00\x00\x00(\x01\x00\x00\x00R\x06\x00\x00\x00(\x01\x00\x00\x00R\x07\x00\x00\x00(\x01\x00\x00\x00R\x08\x00\x00\x00(\x07\x00\x00\x00t\x04\x00\x00\x00timet\x05\x00\x00\x00sleept\x02\x00\x00\x00ost\x06\x00\x00\x00systemt\t\x00\x00\x00raw_inputt\x04\x00\x00\x00exitt\x04\x00\x00\x00main(\x01\x00\x00\x00t\x04\x00\x00\x00gans(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>R\x0f\x00\x00\x00\x16\x00\x00\x00sn\x00\x00\x00\x00\x01\r\x01\r\x01\x05\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x05\x01\x0c\x01\x0c\x01\r\x01\r\x01\x05\x01\x05\x01\n\x01\x0c\x01\r\x01\r\x01\x05\x01\x05\x01\n\x01\x0c\x01\r\x01\r\x01\x05\x01\x05\x01\n\x01\x0c\x01\r\x01\r\x01\x05\x01\x05\x01\x05\x01\x05\x01\n\x01\x0c\x01\r\x01\r\x01\x05\x01\x05\x01\n\x01\x0c\x01\r\x01\n\x02\r\x01\x05\x01\r\x01(\t\x00\x00\x00R\x0b\x00\x00\x00t\x03\x00\x00\x00sysR\t\x00\x00\x00t\x0f\x00\x00\x00CorrectPasscodet\x04\x00\x00\x00loopR\r\x00\x00\x00t\x08\x00\x00\x00passcodeR\x0c\x00\x00\x00R\x0f\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00(\x00\x00\x00\x00s\x10\x00\x00\x00<Ahmad_Riswanto>t\x08\x00\x00\x00<module>\x04\x00\x00\x00s\x18\x00\x00\x00$\x03\x05\x01\x06\x01\x06\x01\x0f\x01\x0c\x01\x0c\x01\x05\x01\t\x02\x05\x01\x11\x05\t:'))
|
from __future__ import annotations
import asyncio
import datetime
import gzip
import itertools
import json
import string
from pathlib import Path
from typing import Counter, Iterable, Sequence, TypeVar
import platformdirs
import pyperclip
from rich.align import Align
from rich.bar import Bar
from rich.console import Group, RenderableType
from rich.panel import Panel as RichPanel
from rich.table import Table
from textual import events
from textual.app import App
from textual.layout import Layout
from textual.reactive import Reactive
from textual.views import DockView, GridView
from textual.widget import Widget
from textual.widgets import Button, ButtonPressed, Header
from textual.widgets._button import ButtonRenderable
BASE_DIR = Path(__file__).parent
IDLE = "bold white on rgb(130,130,130)"
EMPTY = "bold on rgb(18,18,18)"
ABSENT = 0
PRESENT = 1
CORRECT = 2
LETTER_STATUS = {
ABSENT: "bold white on rgb(58,58,58)",
PRESENT: "bold white on rgb(181,159,59)",
CORRECT: "bold white on rgb(83,141,78)",
}
BLOCKS = {ABSENT: "⬛", PRESENT: "🟨", CORRECT: "🟩"}
INITIAL_STATS = {
"played": 0,
"stats": [0, 0, 0, 0, 0, 0],
"current_streak": 0,
"max_streak": 0,
}
SEED_DATE = datetime.datetime.combine(datetime.datetime(2021, 6, 19), datetime.time())
STATS_JSON = Path(platformdirs.user_data_dir("wordle")) / ".stats.json"
STATS_JSON.parent.mkdir(exist_ok=True, parents=True)
with BASE_DIR.joinpath("La.gz").open("rb") as laf, BASE_DIR.joinpath("Ta.gz").open(
"rb"
) as taf:
La: list[str] = json.loads(gzip.decompress(laf.read()))
Ta: list[str] = json.loads(gzip.decompress(taf.read()))
T = TypeVar("T")
def partition(l: Iterable[T], size: int) -> Iterable[Sequence[T]]:
it = iter(l)
return iter(lambda: tuple(itertools.islice(it, size)), ())
def calculate_eta(target_date: datetime.datetime) -> str | None:
"""Print a human-readable ETA to the next wordle"""
units = [3600, 60, 1]
now = datetime.datetime.now()
dt = (target_date - now).total_seconds()
if dt <= 0:
return None
digits = []
for unit in units:
digits.append("%02d" % int(dt // unit))
dt %= unit
return f'[green]{":".join(digits)}[/green]'
class GameStats(Widget):
def __init__(self, stats: dict) -> None:
super().__init__()
self.stats = stats
def render(self) -> RenderableType:
total_played = self.stats["played"]
total_win = sum(self.stats["stats"])
num_guesses = (
len(self.stats["last_guesses"][0]) // 5 if self.stats["last_result"] else 0
)
data = {
"Played": total_played,
"Win %": round(total_win / total_played * 100, 1) if total_played else 0,
"Current Streak": self.stats.get("current_streak", 0),
"Max Streak": self.stats.get("max_streak", 0),
}
table = Table(*data.keys())
table.add_row(*map(str, data.values()))
bars = Table.grid("idx", "bar", padding=(0, 1))
for i, value in enumerate(self.stats["stats"], 1):
bars.add_row(
str(i),
Bar(
max(self.stats["stats"]),
0,
value,
color="rgb(83,141,78)"
if i == num_guesses and self.stats["last_result"]
else "rgb(58,58,58)",
),
)
render_group = Group(table, bars)
return RichPanel(render_group, title="Stats")
class GameMessage(Widget):
def __init__(self) -> None:
super().__init__()
self.timer = None
content: Reactive[str] = Reactive("")
def show_eta(self, target: datetime.datetime) -> None:
self.target_date = target
self.timer = self.set_interval(1, self.refresh)
async def clear_eta(self) -> None:
if self.timer is not None:
await self.timer.stop()
self.timer = None
def render(self) -> RenderableType:
renderable = self.content
if self.timer is not None:
eta = calculate_eta(self.target_date)
if eta is None:
self._child_tasks.add(asyncio.create_task(self.clear_eta()))
else:
renderable += f"\n\nNext wordle: {eta}"
renderable = Align.center(renderable, vertical="middle")
return RichPanel(renderable, title="Message")
class Letter(Widget):
label: Reactive[RenderableType] = Reactive("")
status: Reactive[int | None] = Reactive(None)
def __init__(self, name: str, clickable: bool = False):
super().__init__(name)
self.name = name
self.label = name
self.clickable = clickable
self.style = IDLE if clickable else EMPTY
def render(self) -> RenderableType:
return ButtonRenderable(
self.label,
self.style if self.status is None else LETTER_STATUS[self.status],
)
async def on_click(self, event: events.Click) -> None:
event.prevent_default().stop()
if self.clickable:
await self.emit(ButtonPressed(self))
class GuessView(GridView):
COLUMN_SIZE = 5
ROW_SIZE = 6
def __init__(self, layout: Layout = None, name: str | None = None) -> None:
super().__init__(layout, name)
self.slots = [Letter("") for _ in range(self.COLUMN_SIZE * self.ROW_SIZE)]
self.current = 0
@property
def current_guess(self) -> list[Letter]:
start = self.current // self.COLUMN_SIZE * self.COLUMN_SIZE
return self.slots[start : start + self.COLUMN_SIZE]
@property
def current_word(self) -> list[str]:
return [b.name for b in self.current_guess]
@property
def valid_guesses(self) -> list[Sequence[Letter]]:
return list(
partition(
itertools.takewhile(lambda x: bool(x.name), self.slots),
self.COLUMN_SIZE,
)
)
def input_letter(self, letter: str) -> None:
button = self.slots[self.current]
if button.name:
if self.current % self.COLUMN_SIZE == self.COLUMN_SIZE - 1:
# The last letter is filled
return
self.current += 1
button = self.slots[self.current]
button.name = letter
button.label = letter
def backspace_letter(self) -> None:
button = self.slots[self.current]
if not button.name:
if self.current % self.COLUMN_SIZE == 0:
# the first letter
return
self.current -= 1
button = self.slots[self.current]
button.name = button.label = ""
async def on_mount(self) -> None:
self.grid.set_align("center", "center")
self.grid.set_gap(1, 1)
self.grid.add_column("column", repeat=self.COLUMN_SIZE, size=7)
self.grid.add_row("row", size=3, repeat=self.ROW_SIZE)
self.grid.place(*self.slots)
def check_solution(self, solution: str) -> bool | None:
word = self.current_word
letters = self.current_guess
self.log("Checking solution")
if list(solution) == word:
for b in letters:
b.status = CORRECT
return True
counter = Counter(solution)
for i, b in enumerate(letters):
if solution[i] == b.name:
counter[b.name] -= 1
b.status = CORRECT
for b in letters:
if b.status == CORRECT:
continue
if counter.get(b.name, 0) <= 0:
b.status = ABSENT
else:
counter[b.name] -= 1
b.status = PRESENT
if self.current < self.COLUMN_SIZE * self.ROW_SIZE - 1:
self.current += 1
else:
return False
class KeyboardRow(GridView):
def __init__(
self, letters: Iterable[str], layout: Layout = None, name: str | None = None
) -> None:
super().__init__(layout=layout, name=name)
self.children = list(letters)
async def on_mount(self) -> None:
self.grid.set_align("center", "center")
self.grid.set_gap(1, 1)
self.grid.add_column("column", repeat=len(self.children), size=7)
self.grid.add_row("row", size=3)
self.grid.place(*self.children)
class WordleApp(App):
KEYBOARD = ["QWERTYUIOP", "ASDFGHJKL", "ZXCVBNM"]
def on_key(self, event: events.Key) -> None:
if self.result is not None:
if event.key == "c":
self.copy_result()
return
self.message.content = ""
if event.key in string.ascii_letters:
self.guess.input_letter(event.key.upper())
elif event.key == "enter":
self.check_input()
elif event.key == "ctrl+h":
self.guess.backspace_letter()
def check_input(self) -> bool | None:
current = self.guess.current_guess
current_word = "".join(self.guess.current_word).lower()
if "" in self.guess.current_word:
self.message.content = "Not enough letters"
return
if current_word not in Ta and current_word not in La:
self.message.content = "Not in word list"
return
self.result = self.guess.check_solution(self.solution)
for l in current:
button = self.buttons[l.name]
button.status = max(button.status or 0, l.status)
self.save_statistics()
if self.result is not None:
self.show_result()
def copy_result(self) -> None:
guesses = self.guess.valid_guesses
trials = len(guesses) if self.result else "x"
result = [f"Wordle {self.index} {trials}/6", ""]
for row in guesses:
result.append("".join(BLOCKS[l.status] for l in row))
text = "\n".join(result)
pyperclip.copy(text)
old_content = self.message.content
self.message.content = "Successfully copied to the clipboard."
def restore():
self.message.content = old_content
self.message.set_timer(2, restore)
def save_statistics(self) -> None:
guesses = self.guess.valid_guesses
if self.result:
self.stats["stats"][len(guesses) - 1] += 1
is_streak = (
"last_played" in self.stats and self.index - self.stats["last_played"] == 1
)
current_streak = self.stats.get("current_streak", 0) if is_streak else 0
if self.result is not None:
self.stats["played"] += 1
current_streak += 1
max_streak = max(current_streak, self.stats.get("max_streak", 0))
data = {
"last_played": self.index,
"last_guesses": (
"".join("".join(str(l.name) for row in guesses for l in row)),
"".join("".join(str(l.status) for row in guesses for l in row)),
),
"last_result": self.result,
"played": self.stats["played"] + 1,
"stats": self.stats["stats"],
"current_streak": current_streak,
"max_streak": max_streak,
}
self.stats.update(data)
self.stats_view.refresh()
with open(STATS_JSON, "w") as f:
json.dump(data, f, indent=2)
def show_result(self) -> None:
if self.result:
content = "You Win!"
else:
content = f"You are almost there! The answer is:\n{self.solution}"
content += "\nPress 'c' to copy the result."
self.message.content = content
self.message.show_eta(SEED_DATE + datetime.timedelta(days=self.index + 1))
def handle_button_pressed(self, message: ButtonPressed) -> None:
if self.result is not None:
return
self.message.content = ""
if message.sender.name == "enter":
self.check_input()
elif message.sender.name == "backspace":
self.guess.backspace_letter()
else:
self.guess.input_letter(message.sender.name)
def get_index(self) -> int:
this_date = datetime.datetime.combine(datetime.date.today(), datetime.time())
return (this_date - SEED_DATE).days
def init_game(self) -> None:
if self.index > self.stats.get("last_played", -1):
self.stats["last_result"] = None
return
slots = self.guess.slots
for i, (letter, status) in enumerate(zip(*self.stats["last_guesses"])):
slots[i].name = slots[i].label = letter
slots[i].status = int(status)
self.buttons[letter].status = max(
self.buttons[letter].status or 0, int(status)
)
self.result = self.stats["last_result"]
self.guess.current = i + 1
if self.result is not None:
self.show_result()
async def on_mount(self) -> None:
self.index = self.get_index()
self.solution = La[self.index].upper()
self.log("Loading stats", STATS_JSON)
if not STATS_JSON.exists():
self.stats = INITIAL_STATS.copy()
else:
with open(STATS_JSON, "rb") as f:
self.stats = json.load(f)
self.result: bool | None = None
self.buttons = {
name: Letter(name, True) for row in self.KEYBOARD for name in row
}
keyboard_rows = [
KeyboardRow([self.buttons[k] for k in row]) for row in self.KEYBOARD
]
# add enter and backspace buttons
keyboard_rows[-1].children.insert(0, Button("ENTER", "enter", style=IDLE))
keyboard_rows[-1].children.append(Button("⌫", "backspace", style=IDLE))
view = await self.push_view(DockView())
header = Header()
self.message = GameMessage()
await view.dock(header, edge="top")
subview = DockView()
self.guess = GuessView()
self.init_game()
await subview.dock(self.guess, size=26)
await subview.dock(*keyboard_rows, size=4)
right_side = DockView()
self.stats_view = GameStats(self.stats)
await right_side.dock(self.message, self.stats_view)
await view.dock(right_side, edge="right", size=40)
await view.dock(subview, edge="right")
def main():
WordleApp.run(title="WORDLE", log="textual.log")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Frappe Technologies and Contributors
# See license.txt
from __future__ import unicode_literals
# import frappe
import unittest
class TestEventProducerLastUpdate(unittest.TestCase):
pass
|
# -*- coding: utf-8 -*-
"""
Azure Resource Manager (ARM) Virtual Network Peering State Module
.. versionadded:: 1.0.0
.. versionchanged:: 4.0.0
:maintainer: <devops@eitr.tech>
:configuration: This module requires Azure Resource Manager credentials to be passed via acct. Note that the
authentication parameters are case sensitive.
Required provider parameters:
if using username and password:
* ``subscription_id``
* ``username``
* ``password``
if using a service principal:
* ``subscription_id``
* ``tenant``
* ``client_id``
* ``secret``
Optional provider parameters:
**cloud_environment**: Used to point the cloud driver to different API endpoints, such as Azure GovCloud.
Possible values:
* ``AZURE_PUBLIC_CLOUD`` (default)
* ``AZURE_CHINA_CLOUD``
* ``AZURE_US_GOV_CLOUD``
* ``AZURE_GERMAN_CLOUD``
Example acct setup for Azure Resource Manager authentication:
.. code-block:: yaml
azurerm:
default:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
tenant: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
client_id: ABCDEFAB-1234-ABCD-1234-ABCDEFABCDEF
secret: XXXXXXXXXXXXXXXXXXXXXXXX
cloud_environment: AZURE_PUBLIC_CLOUD
user_pass_auth:
subscription_id: 3287abc8-f98a-c678-3bde-326766fd3617
username: fletch
password: 123pass
The authentication parameters can also be passed as a dictionary of keyword arguments to the ``connection_auth``
parameter of each state, but this is not preferred and could be deprecated in the future.
"""
# Python libs
from __future__ import absolute_import
import logging
import re
log = logging.getLogger(__name__)
TREQ = {
"present": {
"require": [
"states.azurerm.resource.group.present",
"states.azurerm.network.virtual_network.present",
]
},
}
async def present(
hub,
ctx,
name,
remote_virtual_network,
virtual_network,
resource_group,
remote_vnet_group=None,
allow_virtual_network_access=True,
allow_forwarded_traffic=False,
allow_gateway_transit=False,
use_remote_gateways=False,
connection_auth=None,
**kwargs,
):
"""
.. versionadded:: 1.0.0
.. versionchanged:: 4.0.0
Ensure a virtual network peering object exists.
:param name:
Name of the peering object.
:param remote_virtual_network:
The name of the remote virtual network.
:param remote_vnet_group:
The resource group of the remote virtual network. Defaults to the same resource group as the "local"
virtual network.
:param virtual_network:
Name of the existing virtual network to contain the peering object.
:param resource_group:
The resource group assigned to the local virtual network.
:param allow_virtual_network_access:
Whether the VMs in the local virtual network space would be able to access the VMs in remote virtual
network space.
:param allow_forwarded_traffic:
Whether the forwarded traffic from the VMs in the local virtual network will be allowed/disallowed
in remote virtual network.
:param allow_gateway_transit:
If gateway links can be used in remote virtual networking to link to this virtual network.
:param use_remote_gateways:
If remote gateways can be used on this virtual network. If the flag is set to True, and
allow_gateway_transit on remote peering is also True, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to True. This flag cannot be set if virtual network
already has a gateway.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure virtual network peering exists:
azurerm.network.virtual_network_peering.present:
- name: vnet1_to_vnet2
- virtual_network: vnet1
- resource_group: group1
- remote_virtual_network: vnet2
- remote_vnet_group: group2
- allow_virtual_network_access: True
- allow_forwarded_traffic: False
- allow_gateway_transit: False
- use_remote_gateways: False
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
action = "create"
if not isinstance(connection_auth, dict):
if ctx["acct"]:
connection_auth = ctx["acct"]
else:
ret[
"comment"
] = "Connection information must be specified via acct or connection_auth dictionary!"
return ret
peering = await hub.exec.azurerm.network.virtual_network_peering.get(
ctx,
name,
virtual_network,
resource_group,
azurerm_log_level="info",
**connection_auth,
)
if "error" not in peering:
action = "update"
remote_vnet = None
if peering.get("remote_virtual_network", {}).get("id"):
remote_vnet = peering["remote_virtual_network"]["id"].split("/")[-1]
if remote_virtual_network != remote_vnet:
ret["changes"]["remote_virtual_network"] = {
"old": remote_vnet,
"new": remote_virtual_network,
}
for bool_opt in [
"use_remote_gateways",
"allow_forwarded_traffic",
"allow_virtual_network_access",
"allow_gateway_transit",
]:
if locals()[bool_opt] != peering.get(bool_opt):
ret["changes"][bool_opt] = {
"old": peering.get(bool_opt),
"new": locals()[bool_opt],
}
if not ret["changes"]:
ret["result"] = True
ret["comment"] = "Peering object {0} is already present.".format(name)
return ret
if ctx["test"]:
ret["result"] = None
ret["comment"] = "Peering object {0} would be updated.".format(name)
return ret
if ctx["test"]:
ret["comment"] = "Subnet {0} would be created.".format(name)
ret["result"] = None
return ret
peering_kwargs = kwargs.copy()
peering_kwargs.update(connection_auth)
peering = await hub.exec.azurerm.network.virtual_network_peering.create_or_update(
ctx=ctx,
name=name,
remote_virtual_network=remote_virtual_network,
remote_vnet_group=remote_vnet_group,
virtual_network=virtual_network,
resource_group=resource_group,
use_remote_gateways=use_remote_gateways,
allow_forwarded_traffic=allow_forwarded_traffic,
allow_virtual_network_access=allow_virtual_network_access,
allow_gateway_transit=allow_gateway_transit,
**peering_kwargs,
)
# This is a special case where one side of the peering has been deleted and recreated.
# In order to establish the new peer, the remote peer needs to be set back to "Initiated" state.
if peering.get("error", "").startswith("Azure Error: RemotePeeringIsDisconnected"):
rname_match = re.search(
"because remote peering (\S+) referencing parent virtual network",
peering["error"],
)
remote_name = rname_match.group(1).split("/")[-1]
remote_peering = await hub.exec.azurerm.network.virtual_network_peering.get(
ctx=ctx,
name=remote_name,
virtual_network=remote_virtual_network,
resource_group=remote_vnet_group,
azurerm_log_level="info",
**connection_auth,
)
remote_peering_kwargs = remote_peering.copy()
remote_peering_kwargs.update(connection_auth)
remote_peering_kwargs["peering_state"] = "Initiated"
remote_peering_kwargs.pop("remote_virtual_network")
remote_peering = await hub.exec.azurerm.network.virtual_network_peering.create_or_update(
ctx=ctx,
remote_virtual_network=virtual_network,
remote_vnet_group=resource_group,
virtual_network=remote_virtual_network,
resource_group=remote_vnet_group,
**remote_peering_kwargs,
)
peering = await hub.exec.azurerm.network.virtual_network_peering.create_or_update(
ctx=ctx,
name=name,
remote_virtual_network=remote_virtual_network,
remote_vnet_group=remote_vnet_group,
virtual_network=virtual_network,
resource_group=resource_group,
use_remote_gateways=use_remote_gateways,
allow_forwarded_traffic=allow_forwarded_traffic,
allow_virtual_network_access=allow_virtual_network_access,
allow_gateway_transit=allow_gateway_transit,
**peering_kwargs,
)
if action == "create":
ret["changes"] = {"old": {}, "new": peering}
if "error" not in peering:
ret["result"] = True
ret["comment"] = f"Peering object {name} has been {action}d."
return ret
ret["comment"] = "Failed to {0} peering object {1}! ({2})".format(
action, name, peering.get("error")
)
if not ret["result"]:
ret["changes"] = {}
return ret
async def absent(
hub, ctx, name, virtual_network, resource_group, connection_auth=None, **kwargs
):
"""
.. versionadded:: 1.0.0
Ensure a virtual network peering object does not exist in the virtual network.
:param name:
Name of the peering object.
:param virtual_network:
Name of the existing virtual network containing the peering object.
:param resource_group:
The resource group assigned to the virtual network.
:param connection_auth:
A dict with subscription and authentication parameters to be used in connecting to the
Azure Resource Manager API.
Example usage:
.. code-block:: yaml
Ensure virtual network peer absent:
azurerm.network.virtual_network_peering.absent:
- name: test_lb
- virtual_network: test_vnet
- resource_group: test_group
"""
ret = {"name": name, "result": False, "comment": "", "changes": {}}
if not isinstance(connection_auth, dict):
if ctx["acct"]:
connection_auth = ctx["acct"]
else:
ret[
"comment"
] = "Connection information must be specified via acct or connection_auth dictionary!"
return ret
peering = await hub.exec.azurerm.network.virtual_network_peering.get(
ctx,
name,
virtual_network,
resource_group,
azurerm_log_level="info",
**connection_auth,
)
if "error" in peering:
ret["result"] = True
ret["comment"] = "Peering object {0} was not found.".format(name)
return ret
if ctx["test"]:
ret["comment"] = "Peering object {0} would be deleted.".format(name)
ret["result"] = None
ret["changes"] = {
"old": peering,
"new": {},
}
return ret
deleted = await hub.exec.azurerm.network.virtual_network_peering.delete(
ctx, name, virtual_network, resource_group, **connection_auth
)
if deleted:
ret["result"] = True
ret["comment"] = "Peering object {0} has been deleted.".format(name)
ret["changes"] = {"old": peering, "new": {}}
return ret
ret["comment"] = "Failed to delete peering object {0}!".format(name)
return ret
|
"""
A problem that incurs a constant delay at each `_evaluate` call.
"""
__docformat__ = "google"
from time import sleep
from pymoo.core.problem import Problem
import numpy as np
from .wrapped_problem import WrappedProblem
class Delayer(WrappedProblem):
"""
A problem that sleeps for a set amount of time at each `_evaluate` call,
passing calling the wrapped problem's `_evaluate`.
"""
_delay: float
"""
Sleep time in seconds. Floating point number may be used to indicate a more
precise sleep time.
"""
def __init__(
self,
problem: Problem,
delay: float = 0.05,
*,
name: str = "delayer",
):
"""
Args:
name (str): An optional name for this problem. This will be used
when creating history dump files. Defaults to
`wrapped_problem`.
"""
super().__init__(problem, name=name)
if delay < 0.0:
raise ValueError("Delay must be a positive.")
self._delay = delay
def _evaluate(self, x, out, *args, **kwargs):
sleep(self._delay)
self._problem._evaluate(x, out, *args, **kwargs)
self.add_to_history_x_out(
x,
out,
delay=np.full((x.shape[0],), self._delay),
)
|
""" serverextension for starters
"""
from .handlers import add_handlers
from .manager import StarterManager
def load_jupyter_server_extension(nbapp):
"""create a StarterManager and add handlers"""
manager = StarterManager(parent=nbapp)
add_handlers(nbapp, manager)
nbapp.log.info(f"""💡 starters: {", ".join(manager.starter_names)}""")
|
"""
Collection of MXNet reduction functions, wrapped to fit Ivy syntax and signature.
"""
# global
import mxnet as _mx
from numbers import Number
# local
from ivy.functional.backends.mxnet.core.general import _flat_array_to_1_dim_array
def reduce_sum(x, axis=None, keepdims=False):
if axis is None:
num_dims = len(x.shape)
axis = tuple(range(num_dims))
elif isinstance(axis, Number):
axis = (axis,)
elif isinstance(axis, list):
axis = tuple(axis)
if x.shape == ():
x = _flat_array_to_1_dim_array(x)
return _mx.nd.sum(x, axis=axis, keepdims=keepdims)
def reduce_prod(x, axis=None, keepdims=False):
if axis is None:
num_dims = len(x.shape)
axis = tuple(range(num_dims))
elif isinstance(axis, Number):
axis = (axis,)
elif isinstance(axis, list):
axis = tuple(axis)
if x.shape == ():
x = _flat_array_to_1_dim_array(x)
return _mx.nd.prod(x, axis=axis, keepdims=keepdims)
def reduce_mean(x, axis=None, keepdims=False):
if axis is None:
num_dims = len(x.shape)
axis = tuple(range(num_dims))
elif isinstance(axis, Number):
axis = (axis,)
elif isinstance(axis, list):
axis = tuple(axis)
if x.shape == ():
x = _flat_array_to_1_dim_array(x)
return _mx.nd.mean(x, axis=axis, keepdims=keepdims)
def reduce_var(x, axis=None, keepdims=False):
mean_of_x_sqrd = reduce_mean(x ** 2, axis, keepdims)
mean_of_x = reduce_mean(x, axis, keepdims)
return mean_of_x_sqrd - mean_of_x ** 2
def reduce_min(x, axis=None, keepdims=False):
if axis is None:
num_dims = len(x.shape)
axis = tuple(range(num_dims))
elif isinstance(axis, Number):
axis = (axis,)
elif isinstance(axis, list):
axis = tuple(axis)
if x.shape == ():
x = _flat_array_to_1_dim_array(x)
return _mx.nd.min(x, axis=axis, keepdims=keepdims)
def reduce_max(x, axis=None, keepdims=False):
if axis is None:
num_dims = len(x.shape)
axis = tuple(range(num_dims))
elif isinstance(axis, Number):
axis = (axis,)
elif isinstance(axis, list):
axis = tuple(axis)
if x.shape == ():
x = _flat_array_to_1_dim_array(x)
return _mx.nd.max(x, axis=axis, keepdims=keepdims)
def einsum(equation, *operands):
ret = _mx.np.einsum(equation, *[op.as_np_ndarray() for op in operands])
if ret.shape == ():
return _mx.np.resize(ret, (1,)).as_nd_ndarray()
return ret.as_nd_ndarray()
def all(x, axis=None, keepdims=False):
return reduce_prod(x, axis, keepdims).astype(_mx.np.bool_)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Seaky
# @Date: 2019/6/25 10:00
import json
import random
import re
import warnings
from functools import wraps
from pathlib import Path
from urllib import parse
import requests
from bs4 import BeautifulSoup
from ..func.base import MyClass
from ..func.mrun import MultiRun
from ..func.parser import ArgParseClass
warnings.filterwarnings("ignore")
UA = {'ie': 'Mozilla/5.0 (MSIE 10.0; Windows NT 6.1; Trident/5.0)',
'chrome': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
'firefox': 'Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
'iphone': 'Mozilla/5.0 (iPhone; U; CPU iPhone OS 4_3_3 like Mac OS X; en-us) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8J2 Safari/6533.18.5',
'android': 'Mozilla/5.0 (Linux; U; Android 2.3.7; en-us; Nexus One Build/FRF91) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1',
'wx': 'Mozilla/5.0 (iPhone; CPU iPhone OS 7_0_4 like Mac OS X) AppleWebKit/537.51.1 (KHTML, like Gecko) Mobile/11B554a MicroMessenger/6.2.1'}
HTTP_TIMEOUT = 40
HTTP_RETRY = 3
class Http(MyClass):
def __init__(self, browser_random=False, browser='chrome', http_timeout=HTTP_TIMEOUT, ssl_verify=False,
http_proxy=None, http_headers=None,
http_retry=HTTP_RETRY, *args, **kwargs):
'''
:param ua_random: 随机UA
:param ua: ie/chrome/firefox/iphone/android/wx
:param http_timeout:
:param ssl_verify:
:param http_proxy: str, '127.0.0.1:8087', socks代理使用net.proxy库
:param args:
:param kwargs:
'''
MyClass.__init__(self, *args, **kwargs)
self.kwargs = kwargs
self.control['browser'] = browser
self.control['browser_random'] = browser_random
self.control['http_timeout'] = http_timeout
self.control['http_retry'] = http_retry
self.control['ssl_verify'] = ssl_verify
self.control['http_proxy'] = {'http': http_proxy, 'https': http_proxy} if isinstance(http_proxy, str) else None
self.session = requests.session()
if not browser_random:
self.session.headers.update({'User-Agent': UA.get(browser, 'chrome')})
else:
tag, ua = random.choice(list(UA.items()))
self.session.headers.update({'User-Agent': ua})
if isinstance(http_headers, dict):
self.session.headers.update(http_headers)
# self.status = {} # for some vars preserve
self.url_root = kwargs.get('url_root')
def __getattr__(self, item):
return self.__dict__.get(item, self.kwargs.get(item))
def fetch(self, url, ret_bs=True, method='GET', timeout=None,
retry=3, retry_code=None, retry_not_200=False,
ret_raw=False, ret_dic=False, ret_json=False, charset=None, **kwargs):
'''
:param url:
:param ret_bs: 返回bs4 obj
:param ret_raw: 返回原始数据
:param method: GET、POST
:param retry: 重试次数
:param retry_code: 重试某些code
:param retry_not_200: 重试非200
:param ret_dic: 返回url, args, result的字典
:param charset:
:param kwargs:
get-params, post-data
:return:
'''
d = {'verify': self.default('ssl_verify'), 'timeout': timeout or self.default('http_timeout'),
'proxies': self.default('http_proxy')}
retries = retry or self.default('http_retry')
d.update(kwargs)
if not re.search(r'^http', url, re.I) and self.url_root:
url = re.sub('/*$', '', self.url_root) + '/' + re.sub('^/*', '', url)
flag = False
retry_code = retry_code or []
code_reason = ''
for i in range(retry):
try:
_raw = self.session.post(url, **d) if method == 'POST' else self.session.get(url, **d)
code = _raw.status_code
if retry_not_200 and code != 200:
code_reason = code
continue
elif code in retry_code:
code_reason = code
continue
else:
flag = True
url = _raw.url
break
except Exception as e:
error = e
if not flag:
if code_reason:
raise Exception('fetch {} fail, status_code={}. {}'.format(url, code_reason, d))
else:
raise Exception('fetch {} error, {}. {}'.format(url, error, d))
if ret_raw:
self.fetch_after(_raw, None, None)
return {'result': _raw, 'url': url, 'kwargs': d} if ret_dic else _raw
if ret_json:
return _raw.json()
_raw = _raw.content
if not charset:
m = re.search('charset=\W*(?P<charset>\w+)', _raw[:200].decode(errors='ignore'))
charset = m.groupdict().get('charset', 'utf-8') if m else 'utf-8'
if charset == 'gb2312':
charset = 'cp936'
_content = _raw.decode(encoding=charset, errors='ignore')
bs = BeautifulSoup(_content, features=self.kwargs.get('features', 'html.parser'))
self.fetch_after(_raw, _content, bs)
ret = bs if ret_bs else _content
return {'result': ret, 'url': url, 'kwargs': d} if ret_dic else ret
def multi_job(self, *args, **kwargs):
'''
多进程的job
:return:
'''
return True, self.fetch(*args, **kwargs)
def multi_fetch(self, kws, process_num=20, process_time=None, inline=False):
'''
:param kws: [{'local_graph_id': xxx}]
:param process_num: [{'local_graph_id': xxx}]
:param process_time: [{'local_graph_id': xxx}]
:param inline: 错误继续
:return:
'''
mr = MultiRun(func=self.multi_job, func_kws=kws, log=self.log, add_log_to_common_kw=False,
process_num=process_num,
verbose=self.verbose, debug=self.debug)
is_ok, results = mr.run(mrun_save=False, process_timeout=process_time, inline=inline)
return is_ok, results
def fetch_after(self, *args):
'''
每次交互后,可能需要执行的动作,如获取token
:param args:
:return:
'''
return True
def get(self, *args, **kwargs):
return self.fetch(method='GET', *args, **kwargs)
def post(self, *args, **kwargs):
return self.fetch(method='POST', *args, **kwargs)
def save_stat(self, fn=None):
fn = fn or 'status.json'
json.dump(
{'cookies': self.session.cookies.get_dict()
# , 'status': self.status
},
open(str(fn), 'w'), sort_keys=True, indent=True)
def load_stat(self, fn=None):
fn = fn or 'status.json'
if Path(fn).exists():
d = json.load(open(str(fn)))
self.session.cookies.update(d['cookies'])
# self.status.update(d['status'])
return True
def login(self, load=True, save=True):
'''登陆'''
if load and self.load_stat():
if self.login_verify():
self.cache['login'] = True
return True
if self.login_action():
save and self.save_stat()
self.cache['login'] = True
return True
def login_action(self):
'''
实际登陆过程,重写
:return:
'''
pass
def login_verify(self):
'''
self.login()时,验证load数据
:return:
'''
pass
def job(self):
pass
def run(self):
self.job()
def login_check(f):
@wraps(f)
def wrap(self, *args, **kwargs):
if self.cache.get('login'):
return f(self, *args, **kwargs)
else:
return False
return wrap
def url2list(url):
# 转dict需要注意相同的key会被覆盖
return parse.parse_qsl(url)
class HttpArgParse(ArgParseClass):
def __init__(self, *args, **kwargs):
ArgParseClass.__init__(self, *args, **kwargs)
def add_http(self, group='Http', http_timeout=HTTP_TIMEOUT, http_retry=HTTP_RETRY, browser='chrome'):
self.add('--http_timeout', type=int, default=http_timeout, help='http超时时间,{}'.format(http_timeout), group=group)
self.add('--http_retry', type=int, default=http_retry, help='重试次数, {}'.format(http_retry), group=group)
self.add('--browser', default=browser,
help='浏览器,ie/chrome/firefox/iphone/android/wx'.format(http_retry), group=group)
if __name__ == '__main__':
pass
|
# -*- coding: utf-8 -*-
from pkg_resources import get_distribution, DistributionNotFound
try:
# Change here if project is renamed and does not equal the package name
dist_name = 'actinia_module_plugin.wsgi'
__version__ = get_distribution(dist_name).version
except DistributionNotFound:
__version__ = 'unknown'
|
# System modules
import uuid
from dataclasses import dataclass, field
from typing import List
# 3rd party modules
from flask import make_response, abort
# local modules
from config import ma
from models.model import Model
from models.recipe import Recipe
from models.user import requires_login
from common.database import Database
__author__ = 'dimz'
class ProductSchema(ma.Schema):
class Meta:
# Fields to expose
fields = ("_id", "sumber", "tgl_crawl", "nama_produk", "link_produk", "deskripsi",
"thumb", "harga_unit", "harga_awal", "harga", "discount", "qty", "qty_unit",
"username", "link_toko")
@dataclass(eq=False)
class Product(Model):
# collection = "items"
collection: str = field(init=False, default="products")
sumber: str
tgl_crawl: str
nama_produk: str
link_produk: str
deskripsi: str
thumb: str
harga_unit: str
harga_awal: str
harga: str
discount: str
qty: str
qty_unit: str
nama_toko: str
link_toko: str
_id: str = field(default_factory=lambda: uuid.uuid4().hex)
def json(self):
product_schema = ProductSchema()
return product_schema.dump(self)
@staticmethod
def find_produk(query=None, projecting=None, sort: List = None, offset: int = 0, limit: int = 10):
"""Mencari produk dari database dengan query dan projecting, ditambah opsi sorting, skip, dan limit
@param query: Dict query pencarian
@param projecting: Dict kolom projecting (field yg ingin diambil sbg output query)
@param sort: List of tuple kolom yg digunakan sebagai sorting
@param offset: int posisi awal record yg akan diambil
@param limit: int jumlah record yg akan diambil
@return: hasil pencarian dari database
"""
if query is None:
query = {}
result = Database.DATABASE['products'].find(query, projecting)
if sort is not None:
result = result.sort(sort)
result = result.skip(offset).limit(limit)
return result
# ----------- function to answer API endpoint -----------
@staticmethod
@requires_login
def read_all(f: List, v: List, sort: List = None, offset: int = 0, limit: int = 10, api_key: str = None):
"""Fungsi ini merespon API pada endpoint /api/v1.0/produk, yaitu mencari produk yang sesuai dengan parameter
yang diberikan.
@param f: List dari field/atribut yang digunakan dalam pencarian
@param v: List dari nilai field/atribut yang digunakan dalam pencarian
@param sort: List nama field/atribut yang digunakan untuk melakukan sorting
@param offset: posisi awal record pada hasil pencarian
@param limit: batas jumlah record yang akan diambil
@param api_key: String API_KEY
@return: JSON record produk yang dicari
"""
# build query
if len(f) != len(v):
abort(400, 'Parameter f tidak sama banyak dengan parameter v, mohon cek kembali.')
if limit > 50:
abort(400, 'Parameter limit lebih dari nilai maksimum (50).')
query = {field_a: {'$regex': ".*{}.*".format(value), '$options': 'i'} for field_a in f for value in v}
# build sort
if sort is None:
sort = ['nama_produk']
sort_ = [(s, Database.ASCENDING) for s in sort]
# Cari produk dari database
produk = Product.find_produk(query=query, sort=sort_, offset=offset, limit=limit)
if produk.count() == 0:
# Bila tidak ditemukan sama sekali
return make_response({"message": "Tidak ditemukan produk dengan parameter yang telah diberikan."}, 204)
else:
# Serialize the data for the response
product_schema = ProductSchema(many=True)
data = product_schema.dump(produk)
return data, 200
@staticmethod
@requires_login
def read_resep(q: str, sort: List = None, offset: int = 0, limit: int = 10, api_key: str = None):
"""Fungsi ini merespon API pada endpoint /api/v1.0/produk/resep, yaitu mencari produk yang digunakan dalam resep
tertentu. Resep didapat dari pencarian menggunakan API eksternal.
@param q: Nama resep yang digunakan dalam pencarian
@param sort: List nama field/atribut produk yang digunakan untuk melakukan sorting
@param offset: posisi awal record pada hasil pencarian produk
@param limit: batas jumlah record produk yang akan diambil
@param api_key: String API_KEY
@return: JSON record produk yang dicari
"""
# pengecekan awal
if limit > 50:
abort(400, 'Parameter limit lebih dari nilai maksimum (50).')
# ambil resep dari API pihak ketiga
recipes = Recipe.read_one_resep(q)
if recipes is None:
# Bila tidak ditemukan sama sekali
return make_response("Tidak ditemukan produk dengan parameter yang telah diberikan.", 204)
bahans = [b for b in recipes.pop('bahan')]
nama_bahans = [b.pop('nama_bahan') for b in bahans]
str_nama_bahan = ' '.join(nama_bahans)
# build sort
if sort is None:
sort = ['nama_produk']
sort_ = [(s, Database.ASCENDING) for s in sort]
# Cari produk dari database
query = {'$text': {'$search': "{}".format(str_nama_bahan)}}
produk = Product.find_produk(query=query, sort=sort_, offset=offset, limit=limit)
if produk.count() == 0:
# Bila tidak ditemukan sama sekali
return make_response({"message": "Tidak ditemukan produk dengan parameter yang telah diberikan."}, 204)
else:
# Serialize the data for the response
product_schema = ProductSchema(many=True)
data = product_schema.dump(produk)
return data, 200
@staticmethod
@requires_login
def read_komplemen(q: str, sort: List = None, offset: int = 0, limit: int = 10, api_key: str = None):
"""Fungsi ini merespon API pada endpoint /api/v1.0/produk/komplemen, yaitu mencari produk pelengkap sebagai
rekomendasi bila mencari produk tertentu. Rekomendasi diberikan berdasarkan data bahan dari menu yang didapat
melalui API eksternal.
@param q: Nama produk yang digunakan dalam pencarian
@param sort: List nama field/atribut yang digunakan untuk melakukan sorting
@param offset: posisi awal record pada hasil pencarian
@param limit: batas jumlah record yang akan diambil
@param api_key: String API_KEY
@return: JSON record produk yang dicari
"""
# pengecekan awal
if limit > 50:
abort(400, 'Parameter limit lebih dari nilai maksimum (50).')
# ambil resep dari API eksternal
recipes = Recipe.read_all_resep(q)
bahans = [b for r in recipes for b in r.pop('bahan')]
nama_bahans = [b.pop('nama_bahan') for b in bahans]
str_nama_bahan = ' '.join(nama_bahans)
# build sort
if sort is None:
sort = ['nama_produk']
sort_ = [(s, Database.ASCENDING) for s in sort]
# Cari produk dari database
query = {'$text': {'$search': "{}".format(str_nama_bahan)}}
produk = Product.find_produk(query=query, sort=sort_, offset=offset, limit=limit)
if produk.count() == 0:
# Bila tidak ditemukan sama sekali
return make_response({"message": "Tidak ditemukan produk dengan parameter yang telah diberikan."}, 204)
else:
# Serialize the data for the response
product_schema = ProductSchema(many=True)
data = product_schema.dump(produk)
return data, 200
|
# TODO: needs to be standardized with BaseTask
import pandas as pd
import numpy as np
import os
from tophat.constants import SEED
from config.common import *
from gensim.models.doc2vec import TaggedDocument, FAST_VERSION
from gensim.models import Doc2Vec
import fastavro as avro
import itertools as it
from tophat.schemas import factors_avro
from typing import Iterator, Dict, Any, Callable, Tuple
import multiprocessing
n_cores = multiprocessing.cpu_count()
assert FAST_VERSION > -1, "This will be painfully slow otherwise"
def fit_interactions(interactions_df: pd.DataFrame,
user_col: str='ops_user_id',
item_col: str='ops_product_id',
emb_dim: int=16,
):
# Note: gensim requires the tag(s) and words to be str for silly reasons
# For now, the only doctag will be the user_id,
# but you can imagine user features being tags
interactions_df[item_col+'str'] = interactions_df[item_col].astype(str)
docs = interactions_df\
.groupby(user_col)[item_col+'str'].apply(list).reset_index()\
.apply(lambda row: TaggedDocument(row[item_col+'str'],
tags=[str(row[user_col])]),
axis=1)\
.tolist()
interactions_df.drop(item_col+'str', axis=1, inplace=True)
model = Doc2Vec(docs,
size=emb_dim,
dm=1,
window=4, min_count=5, negative=5,
iter=10,
workers=1, # should be `n_cores`, but see issue gensim#336
seed=SEED,
)
return model
def model_to_dfs(d2v_model) -> Tuple[pd.DataFrame, pd.DataFrame]:
dv = d2v_model.docvecs
wv = d2v_model.wv
user_keys = list(dv.doctags.keys())
user_df = pd.DataFrame(index=pd.Index(user_keys, name='id', dtype=str))
user_df.reset_index(inplace=True) # downstream expects `id` column
user_df['factors'] = dv[user_keys].astype(np.float32).tolist()
user_df['bias'] = 0.
item_keys = list(wv.vocab.keys())
item_df = pd.DataFrame(index=pd.Index(item_keys, name='id', dtype=str))
item_df.reset_index(inplace=True)
item_df['factors'] = [wv.word_vec(w) for w in item_keys]
item_df['bias'] = 0.
return user_df, item_df
def rec_generator(keys: Iterator,
get_vec_fn: Callable,
partition: int = 0,
n_partitions: int = 1) -> Iterator[Dict[str, Any]]:
sliced_keys = it.islice(keys, partition, None, n_partitions)
for k in sliced_keys:
record = {
'id': str(k),
'factors': list(get_vec_fn(k)),
'bias': 0.,
}
yield record
dv_rec_generator = lambda dv, **kwargs: rec_generator(
keys=dv.doctags.keys(), get_vec_fn=lambda doctag: dv[doctag], **kwargs
)
wv_rec_generator = lambda wv, **kwargs: rec_generator(
keys=wv.vocab.keys(), get_vec_fn=wv.word_vec, **kwargs
)
def export(d2v_model: Doc2Vec, dir_export: str):
# Record generators
user_rec_gen = dv_rec_generator(d2v_model.docvecs)
item_rec_gen = wv_rec_generator(d2v_model.wv)
with open(os.path.join(dir_export, 'user_docvecs.avro'), 'wb') as f_out:
avro.writer(f_out, factors_avro, user_rec_gen, codec='snappy')
with open(os.path.join(dir_export, 'item_wordvecs.avro'), 'wb') as f_out:
avro.writer(f_out, factors_avro, item_rec_gen, codec='snappy')
|
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
from electrumx import version
# -- Project information -----------------------------------------------------
project = 'ElectrumX'
copyright = '2016-2020, Neil Booth'
author = 'Neil Booth'
# The full version including branding
release = version
# The short X.Y version
version = version.split()[-1]
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'description': 'Lightweight Electrum Server in Python',
'github_user': 'kyuupichan',
'github_repo': 'electrumx',
'github_button': True,
'github_type': 'star',
'github_banner': True,
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
html_sidebars = {
'**': [
'about.html', 'navigation.html', 'searchbox.html',
]
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ElectrumXdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ElectrumX.tex', 'ElectrumX Documentation',
'Neil Booth', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'electrumx', 'ElectrumX Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ElectrumX', 'ElectrumX Documentation',
author, 'ElectrumX', 'One line description of project.',
'Miscellaneous'),
]
|
"""Testing for Lower Bounds of Dynamic Time Warping."""
import numpy as np
import pytest
import re
from math import sqrt
from pyts.metrics.lower_bounds import (
_lower_bound_yi_x_y, _lower_bound_yi_X_Y, _warping_envelope, _clip
)
from pyts.metrics import (lower_bound_improved, lower_bound_keogh,
lower_bound_kim, lower_bound_yi)
from pyts.metrics import dtw, sakoe_chiba_band
from pyts.metrics.dtw import _dtw_sakoechiba
from sklearn.metrics import pairwise_distances
@pytest.mark.parametrize(
'X, Y, err_msg',
[([[1, 1]], [[1]], "Found input variables with inconsistent numbers of "
"timestamps: [2, 1]"),
([[1]], [[1, 2]], "Found input variables with inconsistent numbers of "
"timestamps: [1, 2]"),
([[3, 1, 1]], [[1]], "Found input variables with inconsistent numbers of "
"timestamps: [3, 1]")]
)
def test_check_consistent_lengths(X, Y, err_msg):
"""Test 'lower_bound_yi' parameter validation."""
with pytest.raises(ValueError, match=re.escape(err_msg)):
lower_bound_yi(X, Y)
@pytest.mark.parametrize(
'x, y, float_desired',
[([1, 5, 3, 2, 8], [3, 2, 5, 4, 5], sqrt(10)),
([4, 5, 3, 6, 8], [1, 2, 0, 1, 2], sqrt(66)),
([4, 5, 3, 6, 8], [1, 5, 3, 2, 1], sqrt(19)),
([3, 2, 5, 4, 5], [1, 5, 3, 2, 8], sqrt(10)),
([1, 2, 0, 1, 2], [4, 5, 3, 6, 8], sqrt(66)),
([1, 5, 3, 2, 1], [4, 5, 3, 6, 8], sqrt(19))]
)
def test_lower_bound_yi_x_y(x, y, float_desired):
"""Test '_lower_bound_yi_x_y' function."""
x, y = np.asarray(x), np.asarray(y)
float_actual = _lower_bound_yi_x_y(x, min(x), max(x), y, min(y), max(y))
np.testing.assert_allclose(float_actual, float_desired, atol=1e-5, rtol=0)
@pytest.mark.parametrize(
'X, Y',
[([[3, 5, 1, 4, 6], [3, 8, 2, 4, 2]],
[[4, 5, 9, 2, 3], [4, 3, 5, 2, 3], [5, 9, 3, 3, 4]])]
)
def test_lower_bound_yi_X_Y(X, Y):
"""Test '_lower_bound_yi_X_Y' function."""
X, Y = np.asarray(X), np.asarray(Y)
arr_actual = _lower_bound_yi_X_Y(X, X.min(axis=1), X.max(axis=1),
Y, Y.min(axis=1), Y.max(axis=1))
arr_desired = np.empty((2, 3))
for i in range(2):
for j in range(3):
arr_desired[i, j] = _lower_bound_yi_x_y(
X[i], X[i].min(), X[i].max(),
Y[j], Y[j].min(), Y[j].max()
)
np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0)
@pytest.mark.parametrize(
'X_train, X_test, arr_desired',
[([[5, 4, 3, 2, 1], [1, 8, 4, 3, 2], [6, 3, 5, 4, 7]],
[[2, 1, 8, 4, 5]],
np.sqrt([[9, 0, 6]]))]
)
def test_actual_results_lower_bound_yi(X_train, X_test, arr_desired):
"""Test that the actual results are the expected ones."""
arr_actual = lower_bound_yi(X_train, X_test)
np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0)
@pytest.mark.parametrize(
'X_train, X_test, arr_desired',
[([[2, 1, 8, 4, 5], [1, 2, 3, 4, 5]],
[[5, 4, 3, 2, 1], [1, 8, 4, 3, 2], [6, 3, 5, 4, 7]],
[[4, 4], [3, 3], [4, 5]])]
)
def test_actual_results_lower_bound_kim(X_train, X_test, arr_desired):
"""Test that the actual results are the expected ones."""
arr_actual = lower_bound_kim(X_train, X_test)
np.testing.assert_array_equal(arr_actual, arr_desired)
@pytest.mark.parametrize(
'X, region, err_msg',
[([0, 1], [[0, 1], [0, 1]], "X must be a two- or three-dimensional."),
([[[[0, 1, 2]]]], [[0, 1], [0, 1]],
"X must be a two- or three-dimensional.")]
)
def test_parameter_check_warping_envelope(X, region, err_msg):
"""Test '_warping_envelope' parameter validation.."""
with pytest.raises(ValueError, match=err_msg):
_warping_envelope(X, region)
@pytest.mark.parametrize(
'X, region, lower_desired, upper_desired',
[([[0, 1, 2, 3], [1, 5, 3, -1]], [[0, 0, 1, 2], [2, 3, 4, 4]],
[[0, 0, 1, 2], [1, 1, -1, -1]], [[1, 2, 3, 3], [5, 5, 5, 3]]),
([[[0, 1, 2, 3], [1, 5, 3, -1]]], [[0, 0, 1, 2], [2, 3, 4, 4]],
[[[0, 0, 1, 2], [1, 1, -1, -1]]], [[[1, 2, 3, 3], [5, 5, 5, 3]]])]
)
def test_actual_results_warping_envelope(X, region,
lower_desired, upper_desired):
"""Test that the actual results are the expected ones."""
lower_actual, upper_actual = _warping_envelope(X, region)
np.testing.assert_array_equal(lower_actual, lower_desired)
np.testing.assert_array_equal(upper_actual, upper_desired)
@pytest.mark.parametrize(
'X, lower, upper, err_msg',
[([[0], [1]], [6], [[1], [1]],
"'lower' must be two- or three-dimensional."),
([[0], [1]], [[[[6]]]], [[1], [1]],
"'lower' must be two- or three-dimensional."),
([[0], [1]], [[[6]]], [[1], [1]],
"'lower' and 'upper' must have the same shape ((1, 1, 1) != (2, 1))")]
)
def test_parameter_check_clip(X, lower, upper, err_msg):
"""Test '_clip' parameter validation.."""
with pytest.raises(ValueError, match=re.escape(err_msg)):
_clip(X, lower, upper)
@pytest.mark.parametrize(
'X, lower, upper, arr_desired',
[([[0, 1, 2, 3], [1, 5, 3, -1]],
[[0, 3, 3, 3], [-1, 2, 4, 6]],
[[1, 5, 4, 6], [1, 3, 6, 8]],
[[[0, 3, 3, 3], [0, 2, 4, 6]], [[1, 5, 3, 3], [1, 3, 4, 6]]]),
([[0, 1, 2, 3], [1, 5, 3, -1]],
[[[0, 3, 3, 3], [-1, 2, 4, 6]]],
[[[1, 5, 4, 6], [1, 3, 6, 8]]],
[[[0, 3, 3, 3]], [[1, 3, 4, 6]]])]
)
def test_actual_results_clip(X, lower, upper, arr_desired):
"""Test that the actual results are the expected ones."""
arr_actual = _clip(X, lower, upper)
np.testing.assert_array_equal(arr_actual, arr_desired)
def test_actual_results_lower_bound_keogh():
"""Test that the actual results are the expected ones."""
# Toy dataset
X_train = np.asarray([[0, 1, 2, 3],
[1, 2, 3, 4]])
X_test = np.asarray([[0, 2.5, 3.5, 6]])
# Region = Sakoe-Chiba band (w=0)
region = [[0, 1, 2, 3],
[1, 2, 3, 4]]
arr_actual = lower_bound_keogh(X_train, X_test, region)
arr_desired = np.sqrt(np.sum(
(X_test[:, None, :] - X_train[None, :, :]) ** 2, axis=-1
))
np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0)
# Region = Sakoe-Chiba band (w=1)
region_window = [[0, 0, 1, 2],
[2, 3, 4, 4]]
arr_actual_window = lower_bound_keogh(X_train, X_test, region_window)
# lower = [[0, 0, 1, 2], [1, 1, 2, 3]]
# upper = [[1, 2, 3, 3], [2, 3, 4, 4]]
# X_proj = [[0, 2, 3, 3], [1, 2.5, 3.5, 4]]
# LB_Keogh = [[sqrt(0.25 + 0.25 + 9), sqrt(1 + 4)]]
arr_desired_window = np.sqrt([[9.5, 5]])
np.testing.assert_allclose(arr_actual_window, arr_desired_window,
atol=1e-5, rtol=0)
def test_actual_results_lower_bound_improved():
"""Test that the actual results are the expected ones."""
# Toy dataset
X_train = np.asarray([[0, 1, 2, 3],
[1, 2, 3, 4]])
X_test = np.asarray([[0, 2.5, 3.5, 3.3]])
# Region = Sakoe-Chiba band (w=0)
region = [[0, 1, 2, 3],
[1, 2, 3, 4]]
arr_actual = lower_bound_improved(X_train, X_test, region)
arr_desired = np.sqrt(np.sum(
(X_test[:, None, :] - X_train[None, :, :]) ** 2, axis=-1
))
np.testing.assert_allclose(arr_actual, arr_desired, atol=1e-5, rtol=0)
# Region = Sakoe-Chiba band (w=1)
region_window = [[0, 0, 1, 2],
[2, 3, 4, 4]]
arr_actual_window = lower_bound_improved(X_train, X_test, region_window)
# lower_train = [[0, 0, 1, 2], [1, 1, 2, 3]
# upper_train = [[1, 2, 3, 3], [2, 3, 4, 4]]
# X_test_proj = [[0, 2, 3, 3], [1, 2.5, 3.5, 3.3]
# LB_Keogh^2 = [[0.25 + 0.25 + 0.09, 1]] = [[0.59, 1]]
# lower_test = [[0, 0, 2, 3], [1, 1, 2.5, 3.3]]
# upper_test = [[2, 3, 3, 3], [2.5, 3.5, 3.5, 3.5]]
# X_train_proj = [[0, 1, 2, 3], [1, 2, 3, 3.5]]
# LB_Improved^2 = [[0, 0.25]]
arr_desired_window = np.sqrt([[0.59 + 0, 1 + 0.25]])
np.testing.assert_allclose(arr_actual_window, arr_desired_window,
atol=1e-5, rtol=0)
def test_lower_bounds_inequalities():
"""Test that the expected inequalities are verified."""
# Toy dataset
rng = np.random.RandomState(42)
n_samples_train, n_samples_test, n_timestamps = 20, 30, 60
window_size = 0.1
X_train = rng.randn(n_samples_train, n_timestamps)
X_test = rng.randn(n_samples_test, n_timestamps)
# DTW
X_dtw = pairwise_distances(X_test, X_train, dtw)
region = sakoe_chiba_band(n_timestamps, window_size=window_size)
X_dtw_window = pairwise_distances(X_test, X_train, _dtw_sakoechiba,
window_size=window_size)
# Lower bounds
lb_yi = lower_bound_yi(X_train, X_test)
lb_kim = lower_bound_kim(X_train, X_test)
lb_keogh = lower_bound_keogh(X_train, X_test, region)
lb_improved = lower_bound_improved(X_train, X_test, region)
# Sanity check
EPS = 1e-8
np.testing.assert_array_less(lb_yi, X_dtw + EPS)
np.testing.assert_array_less(lb_kim, X_dtw + EPS)
np.testing.assert_array_less(lb_keogh, X_dtw_window + EPS)
np.testing.assert_array_less(lb_improved, X_dtw_window + EPS)
np.testing.assert_array_less(lb_keogh, lb_improved + EPS)
|
from math import sqrt
from libs.ustr import ustr
import hashlib
import re
import sys
import numpy as np
import pydicom
try:
from PyQt5.QtGui import *
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
except ImportError:
from PyQt4.QtGui import *
from PyQt4.QtCore import *
def new_icon(icon):
return QIcon(':/' + icon)
def new_button(text, icon=None, slot=None):
b = QPushButton(text)
if icon is not None:
b.setIcon(new_icon(icon))
if slot is not None:
b.clicked.connect(slot)
return b
def new_action(parent, text, slot=None, shortcut=None, icon=None,
tip=None, checkable=False, enabled=True):
"""Create a new action and assign callbacks, shortcuts, etc."""
a = QAction(text, parent)
if icon is not None:
a.setIcon(new_icon(icon))
if shortcut is not None:
if isinstance(shortcut, (list, tuple)):
a.setShortcuts(shortcut)
else:
a.setShortcut(shortcut)
if tip is not None:
a.setToolTip(tip)
a.setStatusTip(tip)
if slot is not None:
a.triggered.connect(slot)
if checkable:
a.setCheckable(True)
a.setEnabled(enabled)
return a
def add_actions(widget, actions):
for action in actions:
if action is None:
widget.addSeparator()
elif isinstance(action, QMenu):
widget.addMenu(action)
else:
widget.addAction(action)
def label_validator():
return QRegExpValidator(QRegExp(r'^[^ \t].+'), None)
class Struct(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def distance(p):
return sqrt(p.x() * p.x() + p.y() * p.y())
def format_shortcut(text):
mod, key = text.split('+', 1)
return '<b>%s</b>+<b>%s</b>' % (mod, key)
def generate_color_by_text(text):
s = ustr(text)
hash_code = int(hashlib.sha256(s.encode('utf-8')).hexdigest(), 16)
r = int((hash_code / 255) % 255)
g = int((hash_code / 65025) % 255)
b = int((hash_code / 16581375) % 255)
return QColor(r, g, b, 100)
def have_qstring():
"""p3/qt5 get rid of QString wrapper as py3 has native unicode str type"""
return not (sys.version_info.major >= 3 or QT_VERSION_STR.startswith('5.'))
def util_qt_strlistclass():
return QStringList if have_qstring() else list
def natural_sort(list, key=lambda s:s):
"""
Sort the list into natural alphanumeric order.
"""
def get_alphanum_key_func(key):
convert = lambda text: int(text) if text.isdigit() else text
return lambda s: [convert(c) for c in re.split('([0-9]+)', key(s))]
sort_key = get_alphanum_key_func(key)
list.sort(key=sort_key)
def transform_to_hu(medical_image, image):
hu_image = image * medical_image.RescaleSlope + medical_image.RescaleIntercept
hu_image[hu_image < -1024] = -1024
return hu_image
def window_image(image, window_center, window_width):
window_image = image.copy()
image_min = window_center - (window_width / 2)
image_max = window_center + (window_width / 2)
window_image[window_image < image_min] = image_min
window_image[window_image > image_max] = image_max
return window_image
def resize_grayscale(image):
image = np.array(image, dtype=np.float64)
image -= np.min(image)
image *= 255 / np.max(image)
return image
def read_dicom(path, window_width, window_level):
image_medical = pydicom.dcmread(path)
image_data = image_medical.pixel_array
image_hu = transform_to_hu(image_medical, image_data)
image_window = window_image(image_hu.copy(), window_level, window_width)
image_window_pixel = resize_grayscale(image_window)
image_window_pixel = np.expand_dims(image_window_pixel, axis=2) # (512, 512, 1)
#image_ths = np.concatenate([image_window_norm, image_window_norm, image_window_norm], axis=2) # (512, 512, 3)
#return image_ths # use 3-channel
return image_window_pixel # use single-channel # use single-channel with norm
|
notebooks_docs = "notebooks.rst"
notebooks_path = "notebooks"
repo_directory = "notebooks"
repo_owner = "openvinotoolkit"
repo_name = "openvino_notebooks"
artifacts_link = "https://repository.toolbox.iotg.sclab.intel.com/projects/ov-notebook/0.1.0-latest/latest/dist/rst_files/"
blacklisted_extensions = ['.xml', '.bin']
section_names = ["Getting Started", "Convert & Optimize",
"Model Demos", "Model Training", "Live Demos"]
# Templates
binder_template = """
This tutorial is also available as a Jupyter notebook that can be cloned directly from GitHub.
See the |installation_link| for instructions to run this tutorial locally on Windows, Linux or macOS.
To run without installing anything, click the launch binder button.
|binder_link| |github_link|
.. |installation_link| raw:: html
<a href="https://github.com/{{ owner }}/{{ repo }}#-installation-guide" target="_blank">installation guide</a>
.. |binder_link| raw:: html
<a href="https://mybinder.org/v2/gh/{{ owner }}/{{ repo }}/HEAD?filepath={{ folder }}%2F{{ notebook }}%2F{{ notebook }}.ipynb" target="_blank"><img src="https://mybinder.org/badge_logo.svg" alt="Binder"></a>
.. |github_link| raw:: html
<a href="https://github.com/{{ owner }}/{{ repo }}" target="_blank"><img src="https://badgen.net/badge/icon/github?icon=github&label" alt="Github"></a>
\n
"""
no_binder_template = """
This tutorial is also available as a Jupyter notebook that can be cloned directly from GitHub.
See the |installation_link| for instructions to run this tutorial locally on Windows, Linux or macOS.
|github_link|
.. |installation_link| raw:: html
<a href="https://github.com/{{ owner }}/{{ repo }}#-installation-guide" target="_blank">installation guide</a>
.. |github_link| raw:: html
<a href="https://github.com/{{ owner }}/{{ repo }}" target="_blank"><img src="https://badgen.net/badge/icon/github?icon=github&label" alt="Github"></a>
\n
"""
rst_template = """
OpenVINO notebooks documentation
================================
{% for section in sections %}
{{section.name}}
--------------------------------
.. toctree::
:maxdepth: 1
{% for notebook in section.notebooks %} {{notebook.path}}\n{% endfor %}
{% endfor %}
"""
|
#!/usr/bin/env python
from os.path import exists
import versioneer
from setuptools import setup
with open("requirements.txt") as f:
install_requires = f.read().strip().split("\n")
if exists("README.rst"):
with open("README.rst") as f:
long_description = f.read()
else:
long_description = ""
setup(
name="dask-jobqueue",
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
description="Easy deployment of Dask Distributed on job queuing systems "
"such as PBS, Slurm, or SGE.*",
url="https://github.com/dask/dask-jobqueue",
python_requires=">=3.5",
license="BSD 3-Clause",
packages=["dask_jobqueue"],
include_package_data=True,
install_requires=install_requires,
tests_require=["pytest >= 2.7.1"],
long_description=long_description,
zip_safe=False,
)
|
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import array
import ctypes
import datetime
import pickle
import sys
import tempfile
import unittest
from pyflink.pyflink_gateway_server import on_windows
from pyflink.serializers import BatchedSerializer, PickleSerializer
from pyflink.java_gateway import get_gateway
from pyflink.table.types import (_infer_schema_from_data, _infer_type,
_array_signed_int_typecode_ctype_mappings,
_array_unsigned_int_typecode_ctype_mappings,
_array_type_mappings, _merge_type,
_create_type_verifier, UserDefinedType, DataTypes, Row, RowField,
RowType, ArrayType, BigIntType, VarCharType, MapType, DataType,
_to_java_type, _from_java_type, ZonedTimestampType,
LocalZonedTimestampType)
from pyflink.testing.test_case_utils import PyFlinkTestCase
class ExamplePointUDT(UserDefinedType):
"""
User-defined type (UDT) for ExamplePoint.
"""
@classmethod
def sql_type(cls):
return DataTypes.ARRAY(DataTypes.DOUBLE(False))
@classmethod
def module(cls):
return 'pyflink.table.tests.test_types'
@classmethod
def java_udt(cls):
return 'org.apache.flink.table.types.python.ExamplePointUserDefinedType'
def serialize(self, obj):
return [obj.x, obj.y]
def deserialize(self, datum):
return ExamplePoint(datum[0], datum[1])
class ExamplePoint:
"""
An example class to demonstrate UDT in Java, and Python.
"""
__UDT__ = ExamplePointUDT()
def __init__(self, x, y):
self.x = x
self.y = y
def __repr__(self):
return "ExamplePoint(%s,%s)" % (self.x, self.y)
def __str__(self):
return "(%s,%s)" % (self.x, self.y)
def __eq__(self, other):
return isinstance(other, self.__class__) and other.x == self.x and other.y == self.y
class PythonOnlyUDT(UserDefinedType):
"""
User-defined type (UDT) for ExamplePoint.
"""
@classmethod
def sql_type(cls):
return DataTypes.ARRAY(DataTypes.DOUBLE(False))
@classmethod
def module(cls):
return '__main__'
def serialize(self, obj):
return [obj.x, obj.y]
def deserialize(self, datum):
return PythonOnlyPoint(datum[0], datum[1])
class PythonOnlyPoint(ExamplePoint):
"""
An example class to demonstrate UDT in only Python
"""
__UDT__ = PythonOnlyUDT()
class UTCOffsetTimezone(datetime.tzinfo):
"""
Specifies timezone in UTC offset
"""
def __init__(self, offset=0):
self.OFFSET = datetime.timedelta(hours=offset)
def utcoffset(self, dt):
return self.OFFSET
def dst(self, dt):
return self.OFFSET
class TypesTests(PyFlinkTestCase):
def test_infer_schema(self):
from decimal import Decimal
class A(object):
def __init__(self):
self.a = 1
from collections import namedtuple
Point = namedtuple('Point', 'x y')
data = [
True,
1,
"a",
u"a",
datetime.date(1970, 1, 1),
datetime.time(0, 0, 0),
datetime.datetime(1970, 1, 1, 0, 0),
1.0,
array.array("d", [1]),
[1],
(1,),
Point(1.0, 5.0),
{"a": 1},
bytearray(1),
Decimal(1),
Row(a=1),
Row("a")(1),
A(),
]
expected = [
'BooleanType(true)',
'BigIntType(true)',
'VarCharType(2147483647, true)',
'VarCharType(2147483647, true)',
'DateType(true)',
'TimeType(0, true)',
'LocalZonedTimestampType(6, true)',
'DoubleType(true)',
"ArrayType(DoubleType(false), true)",
"ArrayType(BigIntType(true), true)",
'RowType(RowField(_1, BigIntType(true), ...))',
'RowType(RowField(x, DoubleType(true), ...),RowField(y, DoubleType(true), ...))',
'MapType(VarCharType(2147483647, false), BigIntType(true), true)',
'VarBinaryType(2147483647, true)',
'DecimalType(38, 18, true)',
'RowType(RowField(a, BigIntType(true), ...))',
'RowType(RowField(a, BigIntType(true), ...))',
'RowType(RowField(a, BigIntType(true), ...))',
]
schema = _infer_schema_from_data([data])
self.assertEqual(expected, [repr(f.data_type) for f in schema.fields])
def test_infer_schema_nulltype(self):
elements = [Row(c1=[], c2={}, c3=None),
Row(c1=[Row(a=1, b='s')], c2={"key": Row(c=1.0, d="2")}, c3="")]
schema = _infer_schema_from_data(elements)
self.assertTrue(isinstance(schema, RowType))
self.assertEqual(3, len(schema.fields))
# first column is array
self.assertTrue(isinstance(schema.fields[0].data_type, ArrayType))
# element type of first column is struct
self.assertTrue(isinstance(schema.fields[0].data_type.element_type, RowType))
self.assertTrue(isinstance(schema.fields[0].data_type.element_type.fields[0].data_type,
BigIntType))
self.assertTrue(isinstance(schema.fields[0].data_type.element_type.fields[1].data_type,
VarCharType))
# second column is map
self.assertTrue(isinstance(schema.fields[1].data_type, MapType))
self.assertTrue(isinstance(schema.fields[1].data_type.key_type, VarCharType))
self.assertTrue(isinstance(schema.fields[1].data_type.value_type, RowType))
# third column is varchar
self.assertTrue(isinstance(schema.fields[2].data_type, VarCharType))
def test_infer_schema_not_enough_names(self):
schema = _infer_schema_from_data([["a", "b"]], ["col1"])
self.assertTrue(schema.names, ['col1', '_2'])
def test_infer_schema_fails(self):
with self.assertRaises(TypeError):
_infer_schema_from_data([[1, 1], ["x", 1]], names=["a", "b"])
def test_infer_nested_schema(self):
NestedRow = Row("f1", "f2")
data1 = [NestedRow([1, 2], {"row1": 1.0}), NestedRow([2, 3], {"row2": 2.0})]
schema1 = _infer_schema_from_data(data1)
expected1 = [
'ArrayType(BigIntType(true), true)',
'MapType(VarCharType(2147483647, false), DoubleType(true), true)'
]
self.assertEqual(expected1, [repr(f.data_type) for f in schema1.fields])
data2 = [NestedRow([[1, 2], [2, 3]], [1, 2]), NestedRow([[2, 3], [3, 4]], [2, 3])]
schema2 = _infer_schema_from_data(data2)
expected2 = [
'ArrayType(ArrayType(BigIntType(true), true), true)',
'ArrayType(BigIntType(true), true)'
]
self.assertEqual(expected2, [repr(f.data_type) for f in schema2.fields])
def test_convert_row_to_dict(self):
row = Row(l=[Row(a=1, b='s')], d={"key": Row(c=1.0, d="2")})
self.assertEqual(1, row.as_dict()['l'][0].a)
self.assertEqual(1.0, row.as_dict()['d']['key'].c)
def test_udt(self):
p = ExamplePoint(1.0, 2.0)
self.assertEqual(_infer_type(p), ExamplePointUDT())
_create_type_verifier(ExamplePointUDT())(ExamplePoint(1.0, 2.0))
self.assertRaises(ValueError, lambda: _create_type_verifier(ExamplePointUDT())([1.0, 2.0]))
p = PythonOnlyPoint(1.0, 2.0)
self.assertEqual(_infer_type(p), PythonOnlyUDT())
_create_type_verifier(PythonOnlyUDT())(PythonOnlyPoint(1.0, 2.0))
self.assertRaises(ValueError, lambda: _create_type_verifier(PythonOnlyUDT())([1.0, 2.0]))
def test_nested_udt_in_df(self):
expected_schema = DataTypes.ROW() \
.add("_1", DataTypes.BIGINT()).add("_2", DataTypes.ARRAY(PythonOnlyUDT()))
data = (1, [PythonOnlyPoint(float(1), float(2))])
self.assertEqual(expected_schema, _infer_type(data))
expected_schema = DataTypes.ROW().add("_1", DataTypes.BIGINT()).add(
"_2", DataTypes.MAP(DataTypes.BIGINT(False), PythonOnlyUDT()))
p = (1, {1: PythonOnlyPoint(1, float(2))})
self.assertEqual(expected_schema, _infer_type(p))
def test_struct_type(self):
row1 = DataTypes.ROW().add("f1", DataTypes.STRING(nullable=True)) \
.add("f2", DataTypes.STRING(nullable=True))
row2 = DataTypes.ROW([DataTypes.FIELD("f1", DataTypes.STRING(nullable=True)),
DataTypes.FIELD("f2", DataTypes.STRING(nullable=True), None)])
self.assertEqual(row1.field_names(), row2.names)
self.assertEqual(row1, row2)
row1 = DataTypes.ROW().add("f1", DataTypes.STRING(nullable=True)) \
.add("f2", DataTypes.STRING(nullable=True))
row2 = DataTypes.ROW([DataTypes.FIELD("f1", DataTypes.STRING(nullable=True))])
self.assertNotEqual(row1.field_names(), row2.names)
self.assertNotEqual(row1, row2)
row1 = (DataTypes.ROW().add(DataTypes.FIELD("f1", DataTypes.STRING(nullable=True)))
.add("f2", DataTypes.STRING(nullable=True)))
row2 = DataTypes.ROW([DataTypes.FIELD("f1", DataTypes.STRING(nullable=True)),
DataTypes.FIELD("f2", DataTypes.STRING(nullable=True))])
self.assertEqual(row1.field_names(), row2.names)
self.assertEqual(row1, row2)
row1 = (DataTypes.ROW().add(DataTypes.FIELD("f1", DataTypes.STRING(nullable=True)))
.add("f2", DataTypes.STRING(nullable=True)))
row2 = DataTypes.ROW([DataTypes.FIELD("f1", DataTypes.STRING(nullable=True))])
self.assertNotEqual(row1.field_names(), row2.names)
self.assertNotEqual(row1, row2)
# Catch exception raised during improper construction
self.assertRaises(ValueError, lambda: DataTypes.ROW().add("name"))
row1 = DataTypes.ROW().add("f1", DataTypes.STRING(nullable=True)) \
.add("f2", DataTypes.STRING(nullable=True))
for field in row1:
self.assertIsInstance(field, RowField)
row1 = DataTypes.ROW().add("f1", DataTypes.STRING(nullable=True)) \
.add("f2", DataTypes.STRING(nullable=True))
self.assertEqual(len(row1), 2)
row1 = DataTypes.ROW().add("f1", DataTypes.STRING(nullable=True)) \
.add("f2", DataTypes.STRING(nullable=True))
self.assertIs(row1["f1"], row1.fields[0])
self.assertIs(row1[0], row1.fields[0])
self.assertEqual(row1[0:1], DataTypes.ROW(row1.fields[0:1]))
self.assertRaises(KeyError, lambda: row1["f9"])
self.assertRaises(IndexError, lambda: row1[9])
self.assertRaises(TypeError, lambda: row1[9.9])
def test_infer_bigint_type(self):
longrow = [Row(f1='a', f2=100000000000000)]
schema = _infer_schema_from_data(longrow)
self.assertEqual(DataTypes.BIGINT(), schema.fields[1].data_type)
self.assertEqual(DataTypes.BIGINT(), _infer_type(1))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 10))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 20))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 31 - 1))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 31))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 61))
self.assertEqual(DataTypes.BIGINT(), _infer_type(2 ** 71))
def test_merge_type(self):
self.assertEqual(_merge_type(DataTypes.BIGINT(), DataTypes.NULL()), DataTypes.BIGINT())
self.assertEqual(_merge_type(DataTypes.NULL(), DataTypes.BIGINT()), DataTypes.BIGINT())
self.assertEqual(_merge_type(DataTypes.BIGINT(), DataTypes.BIGINT()), DataTypes.BIGINT())
self.assertEqual(_merge_type(
DataTypes.ARRAY(DataTypes.BIGINT()),
DataTypes.ARRAY(DataTypes.BIGINT())
), DataTypes.ARRAY(DataTypes.BIGINT()))
with self.assertRaises(TypeError):
_merge_type(DataTypes.ARRAY(DataTypes.BIGINT()), DataTypes.ARRAY(DataTypes.DOUBLE()))
self.assertEqual(_merge_type(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT()),
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())
), DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT()))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT()),
DataTypes.MAP(DataTypes.DOUBLE(), DataTypes.BIGINT()))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT()),
DataTypes.MAP(DataTypes.STRING(), DataTypes.DOUBLE()))
self.assertEqual(_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.BIGINT()),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.BIGINT()),
DataTypes.FIELD('f2', DataTypes.STRING())])
), DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.BIGINT()),
DataTypes.FIELD('f2', DataTypes.STRING())]))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.BIGINT()),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.DOUBLE()),
DataTypes.FIELD('f2', DataTypes.STRING())]))
self.assertEqual(_merge_type(
DataTypes.ROW([DataTypes.FIELD(
'f1', DataTypes.ROW([DataTypes.FIELD('f2', DataTypes.BIGINT())]))]),
DataTypes.ROW([DataTypes.FIELD(
'f1', DataTypes.ROW([DataTypes.FIELD('f2', DataTypes.BIGINT())]))])
), DataTypes.ROW([DataTypes.FIELD(
'f1', DataTypes.ROW([DataTypes.FIELD('f2', DataTypes.BIGINT())]))]))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ROW(
[DataTypes.FIELD('f2', DataTypes.BIGINT())]))]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ROW(
[DataTypes.FIELD('f2', DataTypes.STRING())]))]))
self.assertEqual(_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())])
), DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.ARRAY(DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.ARRAY(DataTypes.DOUBLE())),
DataTypes.FIELD('f2', DataTypes.STRING())]))
self.assertEqual(_merge_type(
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())])
), DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
DataTypes.FIELD('f2', DataTypes.STRING())]),
DataTypes.ROW([
DataTypes.FIELD('f1', DataTypes.MAP(DataTypes.STRING(), DataTypes.DOUBLE())),
DataTypes.FIELD('f2', DataTypes.STRING())]))
self.assertEqual(_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())))]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())))])
), DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())))]))
with self.assertRaises(TypeError):
_merge_type(
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())))]),
DataTypes.ROW([DataTypes.FIELD('f1', DataTypes.ARRAY(
DataTypes.MAP(DataTypes.DOUBLE(), DataTypes.BIGINT())))])
)
def test_array_types(self):
# This test need to make sure that the Scala type selected is at least
# as large as the python's types. This is necessary because python's
# array types depend on C implementation on the machine. Therefore there
# is no machine independent correspondence between python's array types
# and Scala types.
# See: https://docs.python.org/2/library/array.html
def assert_collect_success(typecode, value, element_type):
self.assertEqual(element_type,
str(_infer_type(array.array(typecode, [value])).element_type))
# supported string types
#
# String types in python's array are "u" for Py_UNICODE and "c" for char.
# "u" will be removed in python 4, and "c" is not supported in python 3.
supported_string_types = []
if sys.version_info[0] < 4:
supported_string_types += ['u']
# test unicode
assert_collect_success('u', u'a', 'CHAR')
# supported float and double
#
# Test max, min, and precision for float and double, assuming IEEE 754
# floating-point format.
supported_fractional_types = ['f', 'd']
assert_collect_success('f', ctypes.c_float(1e+38).value, 'FLOAT')
assert_collect_success('f', ctypes.c_float(1e-38).value, 'FLOAT')
assert_collect_success('f', ctypes.c_float(1.123456).value, 'FLOAT')
assert_collect_success('d', sys.float_info.max, 'DOUBLE')
assert_collect_success('d', sys.float_info.min, 'DOUBLE')
assert_collect_success('d', sys.float_info.epsilon, 'DOUBLE')
def get_int_data_type(size):
if size <= 8:
return "TINYINT"
if size <= 16:
return "SMALLINT"
if size <= 32:
return "INT"
if size <= 64:
return "BIGINT"
# supported signed int types
#
# The size of C types changes with implementation, we need to make sure
# that there is no overflow error on the platform running this test.
supported_signed_int_types = list(
set(_array_signed_int_typecode_ctype_mappings.keys()).intersection(
set(_array_type_mappings.keys())))
for t in supported_signed_int_types:
ctype = _array_signed_int_typecode_ctype_mappings[t]
max_val = 2 ** (ctypes.sizeof(ctype) * 8 - 1)
assert_collect_success(t, max_val - 1, get_int_data_type(ctypes.sizeof(ctype) * 8))
assert_collect_success(t, -max_val, get_int_data_type(ctypes.sizeof(ctype) * 8))
# supported unsigned int types
#
# JVM does not have unsigned types. We need to be very careful to make
# sure that there is no overflow error.
supported_unsigned_int_types = list(
set(_array_unsigned_int_typecode_ctype_mappings.keys()).intersection(
set(_array_type_mappings.keys())))
for t in supported_unsigned_int_types:
ctype = _array_unsigned_int_typecode_ctype_mappings[t]
max_val = 2 ** (ctypes.sizeof(ctype) * 8 - 1)
assert_collect_success(t, max_val, get_int_data_type(ctypes.sizeof(ctype) * 8 + 1))
# all supported types
#
# Make sure the types tested above:
# 1. are all supported types
# 2. cover all supported types
supported_types = (supported_string_types +
supported_fractional_types +
supported_signed_int_types +
supported_unsigned_int_types)
self.assertEqual(set(supported_types), set(_array_type_mappings.keys()))
# all unsupported types
#
# Keys in _array_type_mappings is a complete list of all supported types,
# and types not in _array_type_mappings are considered unsupported.
all_types = set(array.typecodes)
unsupported_types = all_types - set(supported_types)
# test unsupported types
for t in unsupported_types:
with self.assertRaises(TypeError):
_infer_schema_from_data([Row(myarray=array.array(t))])
def test_data_type_eq(self):
lt = DataTypes.BIGINT()
lt2 = pickle.loads(pickle.dumps(DataTypes.BIGINT()))
self.assertEqual(lt, lt2)
def test_decimal_type(self):
t1 = DataTypes.DECIMAL(10, 0)
t2 = DataTypes.DECIMAL(10, 2)
self.assertTrue(t2 is not t1)
self.assertNotEqual(t1, t2)
def test_datetype_equal_zero(self):
dt = DataTypes.DATE()
self.assertEqual(dt.from_sql_type(0), datetime.date(1970, 1, 1))
@unittest.skipIf(on_windows(), "Windows x64 system only support the datetime not larger "
"than time.ctime(32536799999), so this test can't run "
"under Windows platform")
def test_timestamp_microsecond(self):
tst = DataTypes.TIMESTAMP()
self.assertEqual(tst.to_sql_type(datetime.datetime.max) % 1000000, 999999)
@unittest.skipIf(on_windows(), "Windows x64 system only support the datetime not larger "
"than time.ctime(32536799999), so this test can't run "
"under Windows platform")
def test_local_zoned_timestamp_type(self):
lztst = DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE()
last_abbreviation = DataTypes.TIMESTAMP_LTZ()
self.assertEqual(lztst, last_abbreviation)
ts = datetime.datetime(1970, 1, 1, 0, 0, 0, 0000)
self.assertEqual(0, lztst.to_sql_type(ts))
import pytz
# suppose the timezone of the data is +9:00
timezone = pytz.timezone("Asia/Tokyo")
orig_epoch = LocalZonedTimestampType.EPOCH_ORDINAL
try:
# suppose the local timezone is +8:00
LocalZonedTimestampType.EPOCH_ORDINAL = 28800000000
ts_tokyo = timezone.localize(ts)
self.assertEqual(-3600000000, lztst.to_sql_type(ts_tokyo))
finally:
LocalZonedTimestampType.EPOCH_ORDINAL = orig_epoch
if sys.version_info >= (3, 6):
ts2 = lztst.from_sql_type(0)
self.assertEqual(ts.astimezone(), ts2.astimezone())
def test_zoned_timestamp_type(self):
ztst = ZonedTimestampType()
ts = datetime.datetime(1970, 1, 1, 0, 0, 0, 0000, tzinfo=UTCOffsetTimezone(1))
self.assertEqual((0, 3600), ztst.to_sql_type(ts))
ts2 = ztst.from_sql_type((0, 3600))
self.assertEqual(ts, ts2)
def test_day_time_inteval_type(self):
ymt = DataTypes.INTERVAL(DataTypes.DAY(), DataTypes.SECOND())
td = datetime.timedelta(days=1, seconds=10)
self.assertEqual(86410000000, ymt.to_sql_type(td))
td2 = ymt.from_sql_type(86410000000)
self.assertEqual(td, td2)
def test_empty_row(self):
row = Row()
self.assertEqual(len(row), 0)
def test_invalid_create_row(self):
row_class = Row("c1", "c2")
self.assertRaises(ValueError, lambda: row_class(1, 2, 3))
def test_nullable(self):
t = DataType(nullable=False)
self.assertEqual(t._nullable, False)
t_nullable = t.nullable()
self.assertEqual(t_nullable._nullable, True)
def test_not_null(self):
t = DataType(nullable=True)
self.assertEqual(t._nullable, True)
t_notnull = t.not_null()
self.assertEqual(t_notnull._nullable, False)
class DataTypeVerificationTests(PyFlinkTestCase):
def test_verify_type_exception_msg(self):
self.assertRaises(
ValueError,
lambda: _create_type_verifier(
DataTypes.STRING(nullable=False), name="test_name")(None))
schema = DataTypes.ROW(
[DataTypes.FIELD('a', DataTypes.ROW([DataTypes.FIELD('b', DataTypes.INT())]))])
self.assertRaises(
TypeError,
lambda: _create_type_verifier(schema)([["data"]]))
def test_verify_type_ok_nullable(self):
obj = None
types = [DataTypes.INT(), DataTypes.FLOAT(), DataTypes.STRING(), DataTypes.ROW([])]
for data_type in types:
try:
_create_type_verifier(data_type)(obj)
except (TypeError, ValueError):
self.fail("verify_type(%s, %s, nullable=True)" % (obj, data_type))
def test_verify_type_not_nullable(self):
import array
import datetime
import decimal
schema = DataTypes.ROW([
DataTypes.FIELD('s', DataTypes.STRING(nullable=False)),
DataTypes.FIELD('i', DataTypes.INT(True))])
class MyObj:
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
# obj, data_type
success_spec = [
# String
("", DataTypes.STRING()),
(u"", DataTypes.STRING()),
# UDT
(ExamplePoint(1.0, 2.0), ExamplePointUDT()),
# Boolean
(True, DataTypes.BOOLEAN()),
# TinyInt
(-(2 ** 7), DataTypes.TINYINT()),
(2 ** 7 - 1, DataTypes.TINYINT()),
# SmallInt
(-(2 ** 15), DataTypes.SMALLINT()),
(2 ** 15 - 1, DataTypes.SMALLINT()),
# Int
(-(2 ** 31), DataTypes.INT()),
(2 ** 31 - 1, DataTypes.INT()),
# BigInt
(2 ** 64, DataTypes.BIGINT()),
# Float & Double
(1.0, DataTypes.FLOAT()),
(1.0, DataTypes.DOUBLE()),
# Decimal
(decimal.Decimal("1.0"), DataTypes.DECIMAL(10, 0)),
# Binary
(bytearray([1]), DataTypes.BINARY(1)),
# Date/Time/Timestamp
(datetime.date(2000, 1, 2), DataTypes.DATE()),
(datetime.datetime(2000, 1, 2, 3, 4), DataTypes.DATE()),
(datetime.time(1, 1, 2), DataTypes.TIME()),
(datetime.datetime(2000, 1, 2, 3, 4), DataTypes.TIMESTAMP()),
# Array
([], DataTypes.ARRAY(DataTypes.INT())),
(["1", None], DataTypes.ARRAY(DataTypes.STRING(nullable=True))),
([1, 2], DataTypes.ARRAY(DataTypes.INT())),
((1, 2), DataTypes.ARRAY(DataTypes.INT())),
(array.array('h', [1, 2]), DataTypes.ARRAY(DataTypes.INT())),
# Map
({}, DataTypes.MAP(DataTypes.STRING(), DataTypes.INT())),
({"a": 1}, DataTypes.MAP(DataTypes.STRING(), DataTypes.INT())),
({"a": None}, DataTypes.MAP(DataTypes.STRING(nullable=False), DataTypes.INT(True))),
# Struct
({"s": "a", "i": 1}, schema),
({"s": "a", "i": None}, schema),
({"s": "a"}, schema),
({"s": "a", "f": 1.0}, schema),
(Row(s="a", i=1), schema),
(Row(s="a", i=None), schema),
(Row(s="a", i=1, f=1.0), schema),
(["a", 1], schema),
(["a", None], schema),
(("a", 1), schema),
(MyObj(s="a", i=1), schema),
(MyObj(s="a", i=None), schema),
(MyObj(s="a"), schema),
]
# obj, data_type, exception class
failure_spec = [
# Char/VarChar (match anything but None)
(None, DataTypes.VARCHAR(1), ValueError),
(None, DataTypes.CHAR(1), ValueError),
# VarChar (length exceeds maximum length)
("abc", DataTypes.VARCHAR(1), ValueError),
# Char (length exceeds length)
("abc", DataTypes.CHAR(1), ValueError),
# UDT
(ExamplePoint(1.0, 2.0), PythonOnlyUDT(), ValueError),
# Boolean
(1, DataTypes.BOOLEAN(), TypeError),
("True", DataTypes.BOOLEAN(), TypeError),
([1], DataTypes.BOOLEAN(), TypeError),
# TinyInt
(-(2 ** 7) - 1, DataTypes.TINYINT(), ValueError),
(2 ** 7, DataTypes.TINYINT(), ValueError),
("1", DataTypes.TINYINT(), TypeError),
(1.0, DataTypes.TINYINT(), TypeError),
# SmallInt
(-(2 ** 15) - 1, DataTypes.SMALLINT(), ValueError),
(2 ** 15, DataTypes.SMALLINT(), ValueError),
# Int
(-(2 ** 31) - 1, DataTypes.INT(), ValueError),
(2 ** 31, DataTypes.INT(), ValueError),
# Float & Double
(1, DataTypes.FLOAT(), TypeError),
(1, DataTypes.DOUBLE(), TypeError),
# Decimal
(1.0, DataTypes.DECIMAL(10, 0), TypeError),
(1, DataTypes.DECIMAL(10, 0), TypeError),
("1.0", DataTypes.DECIMAL(10, 0), TypeError),
# Binary
(1, DataTypes.BINARY(1), TypeError),
# VarBinary (length exceeds maximum length)
(bytearray([1, 2]), DataTypes.VARBINARY(1), ValueError),
# Char (length exceeds length)
(bytearray([1, 2]), DataTypes.BINARY(1), ValueError),
# Date/Time/Timestamp
("2000-01-02", DataTypes.DATE(), TypeError),
("10:01:02", DataTypes.TIME(), TypeError),
(946811040, DataTypes.TIMESTAMP(), TypeError),
# Array
(["1", None], DataTypes.ARRAY(DataTypes.VARCHAR(1, nullable=False)), ValueError),
([1, "2"], DataTypes.ARRAY(DataTypes.INT()), TypeError),
# Map
({"a": 1}, DataTypes.MAP(DataTypes.INT(), DataTypes.INT()), TypeError),
({"a": "1"}, DataTypes.MAP(DataTypes.VARCHAR(1), DataTypes.INT()), TypeError),
({"a": None}, DataTypes.MAP(DataTypes.VARCHAR(1), DataTypes.INT(False)), ValueError),
# Struct
({"s": "a", "i": "1"}, schema, TypeError),
(Row(s="a"), schema, ValueError), # Row can't have missing field
(Row(s="a", i="1"), schema, TypeError),
(["a"], schema, ValueError),
(["a", "1"], schema, TypeError),
(MyObj(s="a", i="1"), schema, TypeError),
(MyObj(s=None, i="1"), schema, ValueError),
]
# Check success cases
for obj, data_type in success_spec:
try:
_create_type_verifier(data_type.not_null())(obj)
except (TypeError, ValueError):
self.fail("verify_type(%s, %s, nullable=False)" % (obj, data_type))
# Check failure cases
for obj, data_type, exp in failure_spec:
msg = "verify_type(%s, %s, nullable=False) == %s" % (obj, data_type, exp)
with self.assertRaises(exp, msg=msg):
_create_type_verifier(data_type.not_null())(obj)
class DataTypeConvertTests(PyFlinkTestCase):
def test_basic_type(self):
test_types = [DataTypes.STRING(),
DataTypes.BOOLEAN(),
DataTypes.BYTES(),
DataTypes.TINYINT(),
DataTypes.SMALLINT(),
DataTypes.INT(),
DataTypes.BIGINT(),
DataTypes.FLOAT(),
DataTypes.DOUBLE(),
DataTypes.DATE(),
DataTypes.TIME(),
DataTypes.TIMESTAMP(3)]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
def test_atomic_type_with_data_type_with_parameters(self):
gateway = get_gateway()
JDataTypes = gateway.jvm.DataTypes
java_types = [JDataTypes.TIME(3).notNull(),
JDataTypes.TIMESTAMP(3).notNull(),
JDataTypes.VARBINARY(100).notNull(),
JDataTypes.BINARY(2).notNull(),
JDataTypes.VARCHAR(30).notNull(),
JDataTypes.CHAR(50).notNull(),
JDataTypes.DECIMAL(20, 10).notNull()]
converted_python_types = [_from_java_type(item) for item in java_types]
expected = [DataTypes.TIME(3, False),
DataTypes.TIMESTAMP(3).not_null(),
DataTypes.VARBINARY(100, False),
DataTypes.BINARY(2, False),
DataTypes.VARCHAR(30, False),
DataTypes.CHAR(50, False),
DataTypes.DECIMAL(20, 10, False)]
self.assertEqual(converted_python_types, expected)
# Legacy type tests
Types = gateway.jvm.org.apache.flink.table.api.Types
BlinkBigDecimalTypeInfo = \
gateway.jvm.org.apache.flink.table.runtime.typeutils.BigDecimalTypeInfo
java_types = [Types.STRING(),
Types.DECIMAL(),
BlinkBigDecimalTypeInfo(12, 5)]
converted_python_types = [_from_java_type(item) for item in java_types]
expected = [DataTypes.VARCHAR(2147483647),
DataTypes.DECIMAL(38, 18),
DataTypes.DECIMAL(12, 5)]
self.assertEqual(converted_python_types, expected)
def test_array_type(self):
# nullable/not_null flag will be lost during the conversion.
test_types = [DataTypes.ARRAY(DataTypes.BIGINT()),
DataTypes.ARRAY(DataTypes.BIGINT()),
DataTypes.ARRAY(DataTypes.STRING()),
DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.BIGINT())),
DataTypes.ARRAY(DataTypes.ARRAY(DataTypes.STRING()))]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
def test_multiset_type(self):
test_types = [DataTypes.MULTISET(DataTypes.BIGINT()),
DataTypes.MULTISET(DataTypes.STRING()),
DataTypes.MULTISET(DataTypes.MULTISET(DataTypes.BIGINT())),
DataTypes.MULTISET(DataTypes.MULTISET(DataTypes.STRING()))]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
def test_map_type(self):
test_types = [DataTypes.MAP(DataTypes.BIGINT(), DataTypes.BIGINT()),
DataTypes.MAP(DataTypes.STRING(), DataTypes.STRING()),
DataTypes.MAP(DataTypes.STRING(),
DataTypes.MAP(DataTypes.STRING(), DataTypes.BIGINT())),
DataTypes.MAP(DataTypes.STRING(),
DataTypes.MAP(DataTypes.STRING(), DataTypes.STRING()))]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
def test_row_type(self):
test_types = [DataTypes.ROW([DataTypes.FIELD("a", DataTypes.INT()),
DataTypes.FIELD("b",
DataTypes.ROW(
[DataTypes.FIELD("c",
DataTypes.STRING())]))])]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
def test_list_view_type(self):
test_types = [DataTypes.LIST_VIEW(DataTypes.BIGINT()),
DataTypes.LIST_VIEW(DataTypes.STRING())]
java_types = [_to_java_type(item) for item in test_types]
converted_python_types = [_from_java_type(item) for item in java_types]
self.assertEqual(test_types, converted_python_types)
class DataSerializerTests(PyFlinkTestCase):
def test_java_pickle_deserializer(self):
temp_file = tempfile.NamedTemporaryFile(delete=False, dir=tempfile.mkdtemp())
serializer = PickleSerializer()
data = [(1, 2), (3, 4), (5, 6), (7, 8)]
try:
serializer.serialize(data, temp_file)
finally:
temp_file.close()
gateway = get_gateway()
result = [tuple(int_pair) for int_pair in
list(gateway.jvm.PythonBridgeUtils.readPythonObjects(temp_file.name, False))]
self.assertEqual(result, [(1, 2), (3, 4), (5, 6), (7, 8)])
def test_java_batch_deserializer(self):
temp_file = tempfile.NamedTemporaryFile(delete=False, dir=tempfile.mkdtemp())
serializer = BatchedSerializer(PickleSerializer(), 2)
data = [(1, 2), (3, 4), (5, 6), (7, 8)]
try:
serializer.serialize(data, temp_file)
finally:
temp_file.close()
gateway = get_gateway()
result = [tuple(int_pair) for int_pair in
list(gateway.jvm.PythonBridgeUtils.readPythonObjects(temp_file.name, True))]
self.assertEqual(result, [(1, 2), (3, 4), (5, 6), (7, 8)])
if __name__ == "__main__":
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports')
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
|
import os
from setuptools import find_packages, setup
# Load version number
__version__ = None
src_dir = os.path.abspath(os.path.dirname(__file__))
version_file = os.path.join(src_dir, 'chemprop', '_version.py')
with open(version_file, encoding='utf-8') as fd:
exec(fd.read())
# Load README
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='chemprop',
version=__version__,
author='Kyle Swanson, Kevin Yang, Wengong Jin, Lior Hirschfeld, Allison Tam',
author_email='chemprop@mit.edu',
description='Molecular Property Prediction with Message Passing Neural Networks',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/chemprop/chemprop',
download_url=f'https://github.com/chemprop/chemprop/v_{__version__}.tar.gz',
project_urls={
'Documentation': 'https://chemprop.readthedocs.io/en/latest/',
'Source': 'https://github.com/chemprop/chemprop',
'PyPi': 'https://pypi.org/project/chemprop/',
'Demo': 'http://chemprop.csail.mit.edu/',
},
license='MIT',
packages=find_packages(),
package_data={'chemprop': ['py.typed']},
entry_points={
'console_scripts': [
'chemprop_train=chemprop.train:chemprop_train',
'chemprop_predict=chemprop.train:chemprop_predict',
'chemprop_hyperopt=chemprop.hyperparameter_optimization:chemprop_hyperopt',
'chemprop_interpret=chemprop.interpret:chemprop_interpret',
'chemprop_web=chemprop.web.run:chemprop_web',
'sklearn_train=chemprop.sklearn_train:sklearn_train',
'sklearn_predict=chemprop.sklearn_predict:sklearn_predict',
]
},
install_requires=[
'flask>=1.1.2',
'hyperopt>=0.2.3',
'matplotlib>=3.1.3',
'numpy>=1.18.1',
'pandas>=1.0.3',
'pandas-flavor>=0.2.0',
'scikit-learn>=0.22.2.post1',
'scipy>=1.4.1',
'sphinx>=3.1.2',
'tensorboardX>=2.0',
'torch>=1.5.1',
'tqdm>=4.45.0',
'typed-argument-parser>=1.6.1'
],
extras_require={
'test': [
'pytest>=6.2.2',
'parameterized>=0.8.1'
]
},
python_requires='>=3.6',
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
],
keywords=[
'chemistry',
'machine learning',
'property prediction',
'message passing neural network',
'graph neural network'
]
)
|
"""
백준 17298번 : 오큰수
"""
n = int(input())
array = list(map(int, input().split()))
answer = [-1] * n
# 스택에 인덱스 넣기
stack = [0]
for i in range(1, n):
while stack and array[stack[-1]] < array[i]:
# 스택의 오른쪽부터 빠져나감
answer[stack.pop()] = array[i]
stack.append(i)
print(*answer)
|
padding_left = 0
padding_right = 8
padding_top = 10
padding_bottom = 20
|
import requests
from bs4 import BeautifulSoup
URL = "https://www.worldometers.info/coronavirus/countries-where-coronavirus-has-spread/"
r = requests.get(URL)
soup = BeautifulSoup(r.content, 'html5lib')
data = {}
number_of_data = 0
for tag in soup.findAll("tr"):
# stag = second tag
list_of_data = []
for stag in tag.findAll("td"):
if number_of_data % 4 == 0:
country = stag.getText()
elif number_of_data % 4 == 1:
list_of_data.append(stag.getText())
elif number_of_data % 4 == 2:
list_of_data.append(stag.getText())
else:
list_of_data.append(stag.getText())
data[country] = list_of_data
number_of_data += 1
for keys, values in data.items():
print(keys, "==> ", "Total cases:", values[0], "Total deaths:", values[1])
save = input("\n\nDo you want save this statistics to a file (yes/no): ")
if save == "yes":
with open("covid19-statistics.txt", "w") as f:
for keys, values in data.items():
string_data = keys + " ==> " + "Total cases: " + values[0] + " Total deaths: " + values[1] + "\n"
f.write(string_data)
|
"""
dj-stripe Webhook Tests.
"""
import json
import warnings
from collections import defaultdict
from copy import deepcopy
from importlib import reload
from unittest.mock import Mock, PropertyMock, call, patch
from django.test import TestCase, override_settings
from django.test.client import Client
from django.urls import reverse
from djstripe import settings as djstripe_settings
from djstripe import webhooks
from djstripe.models import Event, WebhookEventTrigger
from djstripe.webhooks import TEST_EVENT_ID, call_handlers, handler, handler_all
from . import (
FAKE_EVENT_TEST_CHARGE_SUCCEEDED,
FAKE_EVENT_TRANSFER_CREATED,
FAKE_TRANSFER,
IS_STATICMETHOD_AUTOSPEC_SUPPORTED,
)
def mock_webhook_handler(webhook_event_trigger):
webhook_event_trigger.process()
class TestWebhook(TestCase):
def tearDown(self):
reload(djstripe_settings)
def _send_event(self, event_data):
return Client().post(
reverse("djstripe:webhook"),
json.dumps(event_data),
content_type="application/json",
HTTP_STRIPE_SIGNATURE="PLACEHOLDER",
)
def test_webhook_test_event(self):
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
resp = self._send_event(FAKE_EVENT_TEST_CHARGE_SUCCEEDED)
self.assertEqual(resp.status_code, 200)
self.assertFalse(Event.objects.filter(id=TEST_EVENT_ID).exists())
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertTrue(event_trigger.is_test_event)
@override_settings(DJSTRIPE_WEBHOOK_VALIDATION="retrieve_event")
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch(
"stripe.Event.retrieve",
return_value=deepcopy(FAKE_EVENT_TRANSFER_CREATED),
autospec=True,
)
def test_webhook_retrieve_event_fail(
self, event_retrieve_mock, transfer_retrieve_mock
):
reload(djstripe_settings)
invalid_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
invalid_event["id"] = "evt_invalid"
invalid_event["data"]["valid"] = "not really"
resp = self._send_event(invalid_event)
self.assertEqual(resp.status_code, 400)
self.assertFalse(Event.objects.filter(id="evt_invalid").exists())
@override_settings(DJSTRIPE_WEBHOOK_VALIDATION="retrieve_event")
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch(
"stripe.Event.retrieve",
return_value=deepcopy(FAKE_EVENT_TRANSFER_CREATED),
autospec=True,
)
def test_webhook_retrieve_event_pass(
self, event_retrieve_mock, transfer_retrieve_mock
):
reload(djstripe_settings)
resp = self._send_event(FAKE_EVENT_TRANSFER_CREATED)
self.assertEqual(resp.status_code, 200)
event_retrieve_mock.assert_called_once_with(
api_key=djstripe_settings.STRIPE_SECRET_KEY,
id=FAKE_EVENT_TRANSFER_CREATED["id"],
)
@override_settings(
DJSTRIPE_WEBHOOK_VALIDATION="verify_signature",
DJSTRIPE_WEBHOOK_SECRET="whsec_XXXXX",
)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch(
"stripe.Event.retrieve",
return_value=deepcopy(FAKE_EVENT_TRANSFER_CREATED),
autospec=True,
)
def test_webhook_invalid_verify_signature_fail(
self, event_retrieve_mock, transfer_retrieve_mock
):
reload(djstripe_settings)
invalid_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
invalid_event["id"] = "evt_invalid"
invalid_event["data"]["valid"] = "not really"
resp = self._send_event(invalid_event)
self.assertEqual(resp.status_code, 400)
self.assertFalse(Event.objects.filter(id="evt_invalid").exists())
@override_settings(
DJSTRIPE_WEBHOOK_VALIDATION="verify_signature",
DJSTRIPE_WEBHOOK_SECRET="whsec_XXXXX",
)
@patch(
"stripe.WebhookSignature.verify_header",
return_value=True,
autospec=IS_STATICMETHOD_AUTOSPEC_SUPPORTED,
)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch(
"stripe.Event.retrieve",
return_value=deepcopy(FAKE_EVENT_TRANSFER_CREATED),
autospec=True,
)
def test_webhook_verify_signature_pass(
self, event_retrieve_mock, transfer_retrieve_mock, verify_header_mock
):
reload(djstripe_settings)
resp = self._send_event(FAKE_EVENT_TRANSFER_CREATED)
self.assertEqual(resp.status_code, 200)
self.assertFalse(Event.objects.filter(id="evt_invalid").exists())
verify_header_mock.assert_called_once_with(
json.dumps(FAKE_EVENT_TRANSFER_CREATED),
"PLACEHOLDER",
djstripe_settings.WEBHOOK_SECRET,
djstripe_settings.WEBHOOK_TOLERANCE,
)
event_retrieve_mock.assert_not_called()
@override_settings(DJSTRIPE_WEBHOOK_VALIDATION=None)
@patch("stripe.WebhookSignature.verify_header", autospec=True)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch(
"stripe.Event.retrieve",
return_value=deepcopy(FAKE_EVENT_TRANSFER_CREATED),
autospec=True,
)
def test_webhook_no_validation_pass(
self, event_retrieve_mock, transfer_retrieve_mock, verify_header_mock
):
reload(djstripe_settings)
invalid_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
invalid_event["id"] = "evt_invalid"
invalid_event["data"]["valid"] = "not really"
resp = self._send_event(invalid_event)
self.assertEqual(resp.status_code, 200)
self.assertTrue(Event.objects.filter(id="evt_invalid").exists())
event_retrieve_mock.assert_not_called()
verify_header_mock.assert_not_called()
def test_webhook_no_signature(self):
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
resp = Client().post(
reverse("djstripe:webhook"), "{}", content_type="application/json"
)
self.assertEqual(resp.status_code, 400)
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
def test_webhook_remote_addr_is_none(self):
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
Client().post(
reverse("djstripe:webhook"),
"{}",
content_type="application/json",
HTTP_STRIPE_SIGNATURE="PLACEHOLDER",
REMOTE_ADDR=None,
)
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertEqual(event_trigger.remote_ip, "0.0.0.0")
def test_webhook_remote_addr_is_empty_string(self):
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
Client().post(
reverse("djstripe:webhook"),
"{}",
content_type="application/json",
HTTP_STRIPE_SIGNATURE="PLACEHOLDER",
REMOTE_ADDR="",
)
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertEqual(event_trigger.remote_ip, "0.0.0.0")
@patch(
"djstripe.models.WebhookEventTrigger.validate", return_value=True, autospec=True
)
@patch("djstripe.models.WebhookEventTrigger.process", autospec=True)
def test_webhook_reraise_exception(
self, webhook_event_process_mock, webhook_event_validate_mock
):
class ProcessException(Exception):
pass
exception_message = "process fail"
webhook_event_process_mock.side_effect = ProcessException(exception_message)
self.assertEqual(WebhookEventTrigger.objects.count(), 0)
fake_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
with self.assertRaisesMessage(ProcessException, exception_message):
self._send_event(fake_event)
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertEqual(event_trigger.exception, exception_message)
@patch.object(
djstripe_settings, "WEBHOOK_EVENT_CALLBACK", return_value=mock_webhook_handler
)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch("stripe.Event.retrieve", autospec=True)
def test_webhook_with_custom_callback(
self, event_retrieve_mock, transfer_retrieve_mock, webhook_event_callback_mock
):
fake_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
event_retrieve_mock.return_value = fake_event
djstripe_settings.WEBHOOK_SECRET = ""
resp = self._send_event(fake_event)
self.assertEqual(resp.status_code, 200)
webhook_event_trigger = WebhookEventTrigger.objects.get()
webhook_event_callback_mock.called_once_with(webhook_event_trigger)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch("stripe.Event.retrieve", autospec=True)
def test_webhook_with_transfer_event_duplicate(
self, event_retrieve_mock, transfer_retrieve_mock
):
fake_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
event_retrieve_mock.return_value = fake_event
djstripe_settings.WEBHOOK_SECRET = ""
resp = self._send_event(fake_event)
self.assertEqual(resp.status_code, 200)
self.assertTrue(Event.objects.filter(type="transfer.created").exists())
self.assertEqual(1, Event.objects.filter(type="transfer.created").count())
# Duplication
resp = self._send_event(fake_event)
self.assertEqual(resp.status_code, 200)
self.assertEqual(1, Event.objects.filter(type="transfer.created").count())
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch("stripe.Event.retrieve", autospec=True)
def test_webhook_good(self, event_retrieve_mock, transfer_retrieve_mock):
djstripe_settings.WEBHOOK_SECRET = ""
fake_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
event_retrieve_mock.return_value = fake_event
resp = self._send_event(fake_event)
self.assertEqual(resp.status_code, 200)
self.assertEqual(Event.objects.count(), 1)
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertEqual(event_trigger.is_test_event, False)
@patch.object(target=Event, attribute="invoke_webhook_handlers", autospec=True)
@patch(
"stripe.Transfer.retrieve", return_value=deepcopy(FAKE_TRANSFER), autospec=True
)
@patch("stripe.Event.retrieve", autospec=True)
def test_webhook_error(
self, event_retrieve_mock, transfer_retrieve_mock, mock_invoke_webhook_handlers
):
"""Test the case where webhook processing fails to ensure we rollback
and do not commit the Event object to the database.
"""
mock_invoke_webhook_handlers.side_effect = KeyError("Test error")
djstripe_settings.WEBHOOK_SECRET = ""
fake_event = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
event_retrieve_mock.return_value = fake_event
with self.assertRaises(KeyError):
self._send_event(fake_event)
self.assertEqual(Event.objects.count(), 0)
self.assertEqual(WebhookEventTrigger.objects.count(), 1)
event_trigger = WebhookEventTrigger.objects.first()
self.assertEqual(event_trigger.is_test_event, False)
self.assertEqual(event_trigger.exception, "'Test error'")
class TestWebhookHandlers(TestCase):
def setUp(self):
# Reset state of registrations per test
patcher = patch.object(
webhooks, "registrations", new_callable=(lambda: defaultdict(list))
)
self.addCleanup(patcher.stop)
self.registrations = patcher.start()
patcher = patch.object(webhooks, "registrations_global", new_callable=list)
self.addCleanup(patcher.stop)
self.registrations_global = patcher.start()
def test_global_handler_registration(self):
func_mock = Mock()
handler_all()(func_mock)
event = self._call_handlers("wib.ble", {"data": "foo"}) # handled
self.assertEqual(1, func_mock.call_count)
func_mock.assert_called_with(event=event)
def test_event_handler_registration(self):
global_func_mock = Mock()
handler_all()(global_func_mock)
func_mock = Mock()
handler("foo")(func_mock)
event = self._call_handlers("foo.bar", {"data": "foo"}) # handled
self._call_handlers("bar.foo", {"data": "foo"}) # not handled
self.assertEqual(2, global_func_mock.call_count) # called each time
self.assertEqual(1, func_mock.call_count)
func_mock.assert_called_with(event=event)
def test_event_subtype_handler_registration(self):
global_func_mock = Mock()
handler_all()(global_func_mock)
func_mock = Mock()
handler("foo.bar")(func_mock)
event1 = self._call_handlers("foo.bar", {"data": "foo"}) # handled
event2 = self._call_handlers("foo.bar.wib", {"data": "foo"}) # handled
self._call_handlers("foo.baz", {"data": "foo"}) # not handled
self.assertEqual(3, global_func_mock.call_count) # called each time
self.assertEqual(2, func_mock.call_count)
func_mock.assert_has_calls([call(event=event1), call(event=event2)])
def test_global_handler_registration_with_function(self):
func_mock = Mock()
handler_all(func_mock)
event = self._call_handlers("wib.ble", {"data": "foo"}) # handled
self.assertEqual(1, func_mock.call_count)
func_mock.assert_called_with(event=event)
def test_event_handle_registation_with_string(self):
func_mock = Mock()
handler("foo")(func_mock)
event = self._call_handlers("foo.bar", {"data": "foo"}) # handled
self.assertEqual(1, func_mock.call_count)
func_mock.assert_called_with(event=event)
def test_event_handle_registation_with_list_of_strings(self):
func_mock = Mock()
handler("foo", "bar")(func_mock)
event1 = self._call_handlers("foo.bar", {"data": "foo"}) # handled
event2 = self._call_handlers("bar.foo", {"data": "bar"}) # handled
self.assertEqual(2, func_mock.call_count)
func_mock.assert_has_calls([call(event=event1), call(event=event2)])
def test_webhook_event_trigger_invalid_body(self):
trigger = WebhookEventTrigger(remote_ip="127.0.0.1", body="invalid json")
assert not trigger.json_body
#
# Helpers
#
@staticmethod
def _call_handlers(event_spec, data):
event = Mock(spec=Event)
parts = event_spec.split(".")
category = parts[0]
verb = ".".join(parts[1:])
type(event).parts = PropertyMock(return_value=parts)
type(event).category = PropertyMock(return_value=category)
type(event).verb = PropertyMock(return_value=verb)
call_handlers(event=event)
return event
|
#!/usr/bin/env python
# ToDo: comment
# ToDo: make convergence measurements relative
import numpy
import matplotlib.pyplot as plt
import input.read as read
from numba import jitclass, int64, float64
import moc_transport.step_characteristic as moc
from scipy.sparse import csr_matrix
import datetime
import matplotlib.ticker as ticker
class QuasiDiffusionPrecursorConcentration:
"""Initialize the object from an input file. """
def __init__(self, input_file_name):
self.input_file_name = input_file_name
# Quadrature data
self.ab = numpy.array([-0.9739065285171717, -0.8650633666889845, -0.6794095682990244, -0.4333953941292472,
-0.1488743389816312, 0.1488743389816312, 0.4333953941292472, 0.6794095682990244,
0.8650633666889845, 0.9739065285171717], dtype=numpy.float64)
self.weights = numpy.array([0.0666713443086881, 0.1494513491505806, 0.2190863625159820, 0.2692667193099963,
0.2955242247147529, 0.2955242247147529, 0.2692667193099963, 0.2190863625159820,
0.1494513491505806, 0.0666713443086881], dtype=numpy.float64)
# Import from YAML input file
input_data = read.Input(input_file_name)
# Nuclear data
self.sig_t = input_data.data.sig_t # total cross section
self.sig_s = input_data.data.sig_s # scatter cross section
self.sig_f = input_data.data.sig_f # fission cross section
self.nu = input_data.data.nu # number of neutrons produced per fission
self.material = input_data.data.material # material map
self.dx = input_data.data.dx
self.dt = input_data.data.dt
# Problem geometry parameters
self.groups = 1 # energy groups in problem
self.core_mesh_length = input_data.data.cells # number of intervals
self.dmu = 2 / len(self.ab) # discretization in angle
# Alpha approximation parameters
self.alpha = input_data.data.alpha * numpy.ones(self.core_mesh_length, dtype=numpy.float64) # describes change in scalar flux between time steps
self.v = input_data.data.v # neutron velocity
self.beta = input_data.data.beta # delayed neutron fraction
self.lambda_eff = input_data.data.lambda_eff # delayed neutron precursor decay constant
self.delayed_neutron_precursor_concentration = input_data.data.dnp_concentration*numpy.ones((self.core_mesh_length, 2), dtype=numpy.float64)
self.delayed_neutron_precursor_concentration[:, 0] = numpy.ones(self.core_mesh_length)
self.dnpc_velocity = 10 * numpy.ones(self.core_mesh_length, dtype=numpy.float64)
self.dnpc_v_edge = numpy.linspace(input_data.data.dnp_velocity_lhs, input_data.data.dnp_velocity_rhs, self.core_mesh_length + 1)
self.dnpc_velocity = numpy.linspace(input_data.data.dnp_velocity_lhs*(1.0-1.0/(2.0*self.core_mesh_length)), input_data.data.dnp_velocity_rhs*(1.0+1.0/(2.0*self.core_mesh_length)), self.core_mesh_length)
# Set initial values
self.flux = numpy.ones((self.core_mesh_length, 2), dtype=numpy.float64) # initialize flux. (position, 0:new, 1:old)
self.current = numpy.zeros((self.core_mesh_length + 1, 2), dtype=numpy.float64)
#self.current[:,0] = numpy.ones(self.core_mesh_length + 1)
self.eddington_factors = 1*numpy.array(numpy.ones(self.core_mesh_length, dtype=numpy.float64))
self.coefficient_matrix = numpy.empty([2, 2])
self.coefficient_matrix_implicit = numpy.empty([3, 3])
self.coefficient_matrix_stationary_implicit = numpy.empty([2, 2])
self.rhs = numpy.empty(2)
self.rhs_implicit = numpy.empty(3)
self.rhs_stationary_implicit = numpy.empty(2)
self.stationary_linear_system = numpy.zeros([2*self.core_mesh_length + 1, 2*self.core_mesh_length + 1])
self.stationary_linear_system_solution = numpy.zeros([2*self.core_mesh_length + 1, 2])
self.linear_system = numpy.zeros([3 * self.core_mesh_length + 1, 3 * self.core_mesh_length + 1])
self.linear_system_solution = numpy.zeros([3 * self.core_mesh_length + 1, 2])
# Method of manufactured solutions parameters
self.psi_0_mms = 1.0 # constant flux coefficient
self.C_0_mms = 1.0 # constant precursor coefficient
self.q_z_mms = numpy.zeros((self.core_mesh_length, 1), dtype=numpy.float64)
self.q_q_mms = numpy.zeros((self.core_mesh_length + 1, 1), dtype=numpy.float64)
self.q_p_mms = numpy.zeros((self.core_mesh_length, 1), dtype=numpy.float64)
self.a = 1240.59 # where a*pi is the velocity on the LHS
self.a = 1273.239544 # where a*pi is the average velocity in the first cell
""" Update neutron flux, neutron current, Eddington factors, and delayed neutron precursor concentration variables."""
def update_variables(self, _flux, _current, _eddington_factors, _delayed_neutron_precursor_concentration):
self.flux[:, 1] = _flux
self.current[:, 1] = _current
self.eddington_factors = _eddington_factors
self.delayed_neutron_precursor_concentration[:, 1] = _delayed_neutron_precursor_concentration
""" Updates the Eddington factors of the grey group. """
def update_eddington(self, _eddington_factors):
self.eddington_factors = _eddington_factors
# Diffusion
#self.eddington_factors = 1/3*numpy.ones(self.core_mesh_length, dtype=numpy.float64)
"""Deprecated method."""
def explicit_time_solve(self):
for position in xrange(self.core_mesh_length):
ave_sig_t = (self.sig_t[self.material[position - 1]] + self.sig_t[self.material[position]])/2
self.current[position + 1, 0] = (self.current[position + 1, 1] + self.v * self.dt \
* (self.eddington_factors[position - 1]*self.flux[position - 1, 1]
- self.eddington_factors[position]*self.flux[position, 1])/self.dx) \
/ (1 + self.v * self.dt * ave_sig_t)
self.current[position + 1, 0] = 10 * (-1)**position
sig_a = self.sig_t[self.material[position]] - self.sig_s[self.material[position]]
self.coefficient_matrix[0, 0] = 1 + self.v * self.dt * (sig_a - (1 - self.beta) * self.nu[self.material[position]] *
self.sig_f[self.material[position]])
self.coefficient_matrix[0, 1] = -self.v * self.dt * self.lambda_eff
self.coefficient_matrix[1, 0] = -self.dt * self.beta * self.nu[self.material[position]] * self.sig_f[self.material[position]]
self.coefficient_matrix[1, 1] = 1 + self.dt * self.lambda_eff
self.rhs[0] = self.flux[position, 1] + self.v * self.dt * \
(self.current[position, 1] - self.current[position + 1, 1]) / self.dx
self.rhs[1] = self.dt * (self.dnpc_velocity[position - 1] *
self.delayed_neutron_precursor_concentration[position - 1, 1] - self.dnpc_velocity[position] *
self.delayed_neutron_precursor_concentration[position, 1]) / self.dx + \
self.delayed_neutron_precursor_concentration[position, 1]
solutions = numpy.linalg.solve(self.coefficient_matrix, self.rhs)
self.flux[position, 0] = solutions[0]
self.delayed_neutron_precursor_concentration[position, 0] = solutions[1]
"""Solve the linear system of the zero moment neutron transport equation, quasi diffusion equation, and precursor
concentration equation (with no precursor drift)."""
def solve_stationary_linear_system(self):
# BUILD THE LINEAR SYSTEM AND THE SOLUTION WILL COME
# LHS
n = self.core_mesh_length
for position in range(n):
sig_a = self.sig_t[self.material[position]] - self.sig_s[self.material[position]]
ave_sig_t = (self.sig_t[self.material[position - 1]] + self.sig_t[self.material[position]]) / 2
zeta = 1 + self.v * self.dt * (sig_a - (1 - self.beta) * self.nu[self.material[position]] *
self.sig_f[self.material[position]] \
+ self.dt * self.beta * self.nu[self.material[position]] \
* self.sig_f[self.material[position]] * self.lambda_eff \
/ (1 + self.dt * self.lambda_eff))
self.stationary_linear_system[position, position] = zeta
courant = self.dt*self.v/self.dx
column = 0
for row in range(n + 1, 2*n):
self.stationary_linear_system[row, column] = -courant*self.eddington_factors[column]
self.stationary_linear_system[row, column + 1] = courant*self.eddington_factors[column]
column += 1
row = 0
for column in range(n, 2*n):
self.stationary_linear_system[row, column] = -courant
self.stationary_linear_system[row, column + 1] = courant
row += 1
position = 0
for index in xrange(n+1, 2*n):
ave_sig_t = (self.sig_t[self.material[position - 1]] + self.sig_t[self.material[position]]) / 2
self.stationary_linear_system[index, index] = 1 + self.dt*self.v*ave_sig_t
position += 1
self.stationary_linear_system[n , n] = 1
self.stationary_linear_system[2*n, 2*n] = 1
# RHS
self.stationary_linear_system_solution[0:n, 1] = numpy.array(self.flux[:, 1] \
+ self.dt*self.v*self.lambda_eff/ (1+ self.dt*self.v*self.lambda_eff)\
* self.delayed_neutron_precursor_concentration[:,1])
self.stationary_linear_system_solution[n:, 1] = numpy.array(self.current[:, 1])
#solve!
self.stationary_linear_system_solution[:, 0] = numpy.linalg.solve(self.stationary_linear_system, self.stationary_linear_system_solution[:, 1])
self.flux[:, 0] = self.stationary_linear_system_solution[0:n, 0]
self.current[:, 0] = self.stationary_linear_system_solution[n:, 0]
#obtain delayed neutron precursor concentration
for i in range(n):
self.delayed_neutron_precursor_concentration[i, 0] = (self.delayed_neutron_precursor_concentration[i, 1] +\
self.flux[i, 0]*self.dt*self.beta*\
self.nu[self.material[i]] * \
self.sig_f[self.material[i]]) / (1 + self.dt*self.lambda_eff)
"""Solve the linear system of the zero moment neutron transport equation, quasi diffusion equation, and precursor
concentration equation (with a drift term)."""
def solve_linear_system(self):
# BUILD THE LINEAR SYSTEM AND THE SOLUTION WILL COME
# LHS
n = self.core_mesh_length
courant = self.dt*self.v/self.dx
# zeroth moment flux terms
for position in range(n):
sig_a = self.sig_t[self.material[position]] - self.sig_s[self.material[position]]
zeta = 1 + self.v * self.dt * (sig_a - (1 - self.beta) * self.nu[self.material[position]] *
self.sig_f[self.material[position]])
self.linear_system[position, position] = zeta
# qd flux terms
column = 0
for row in range(n + 1, 2*n):
self.linear_system[row, column] = -courant*self.eddington_factors[column]
self.linear_system[row, column + 1] = courant*self.eddington_factors[column + 1]
column += 1
# precursor equation flux terms
column = 1
for row in range(2*n + 2, 3*n+1): # skip first entry, aka BC.
self.linear_system[row, column] = -self.dt*self.beta*self.nu[self.material[column]]\
*self.sig_f[self.material[column]]
column += 1
# zeroth moment current terms
row = 0
for column in range(n, 2*n):
self.linear_system[row, column] = -courant
self.linear_system[row, column + 1] = courant
row += 1
# qd current terms
position = 0
for index in xrange(n+1, 2*n):
ave_sig_t = (self.sig_t[self.material[position - 1]] + self.sig_t[self.material[position]]) / 2
self.linear_system[index, index] = 1 + self.dt*self.v*ave_sig_t
position += 1
self.linear_system[n , n] = 1 # LHS current boundary condition
self.linear_system[2*n, 2*n] = 1 # RHS current boundary condition
# zeroth moment precursor terms
row = 0
for column in range(2 * n + 1, 3 * n + 1):
self.linear_system[row, column] = -self.dt * self.v * self.lambda_eff
# note: removing self.dt in line above makes effects of precursors more obvious, but it is not correct
row += 1
# precursor equation precursor terms
position = 1
self.linear_system[2*n + 1, 2*n + 1] = 1
for index in range(2*n + 2, 3 * n + 1):
self.linear_system[index, index - 1] = -self.dnpc_v_edge[position] * self.dt / self.dx
self.linear_system[index, index] = 1 + self.dnpc_v_edge[position + 1] * self.dt / self.dx + self.dt \
* self.lambda_eff
position += 1
# RHS
self.linear_system_solution[0:n, 1] = numpy.array(self.flux[:, 1])
self.linear_system_solution[n:2*n+1, 1] = numpy.array(self.current[:, 1])
self.linear_system_solution[2*n+1:, 1] = numpy.array(self.delayed_neutron_precursor_concentration[:, 1])
# periodic boundary condition on precursor concentration
self.linear_system_solution[2*n+1, 1] = numpy.array(self.delayed_neutron_precursor_concentration[-1, 1])
# Solve!
self.linear_system_solution[:, 0] = numpy.linalg.solve(self.linear_system, self.linear_system_solution[:, 1])
# Assign solutions!
self.flux[:, 0] = self.linear_system_solution[0:n, 0]
self.current[:, 0] = self.linear_system_solution[n:2*n +1, 0]
self.delayed_neutron_precursor_concentration[:, 0] = self.linear_system_solution[2*n + 1:, 0]
"""Solve the transient problem by taking the Eddington factors from a StepCharacteristic solve and putting them
into a linear system of equations."""
def solve_transient(self, steps):
# Initialize arrays to store transient solutions
flux_t = numpy.zeros([self.core_mesh_length, steps + 1])
precursor_t = numpy.zeros([self.core_mesh_length, steps + 1])
# Initialize a StepCharacteristic object
test_moc = moc.StepCharacteristic(self.input_file_name)
# Record initial conditions
flux_t[:, 0] = test_moc.flux[:, 1]
precursor_t[:, 0] = self.delayed_neutron_precursor_concentration[:, 1]
self.update_variables(test_moc.flux[:, 1], test_moc.current, test_moc.eddington_factors,
test_moc.delayed_neutron_precursor_concentration)
for iteration in xrange(steps):
converged = False
while not converged:
self.update_eddington(test_moc.eddington_factors)
# Store previous solutions to evaluate convergence
last_flux = numpy.array(self.flux[:, 0])
last_current = numpy.array(self.current[:, 0])
last_dnpc = numpy.array(self.delayed_neutron_precursor_concentration[:, 0])
self.solve_linear_system()
# Calculate difference between previous and present solutions
flux_diff = abs(last_flux - self.flux[:, 0])
current_diff = abs(last_current[1:-1] - self.current[1:-1, 0])
dnpc_diff = abs(last_dnpc - self.delayed_neutron_precursor_concentration[:, 0])
eddington_diff = abs(test_moc.eddington_factors - test_moc.eddington_factors_old)
if numpy.max(flux_diff / abs(self.flux[:, 0])) < 1E-6 \
and numpy.max(current_diff) < 1E-10 \
and numpy.max(dnpc_diff) < 1E-10\
and numpy.max(eddington_diff / test_moc.eddington_factors) < 1E-6:
test_moc.iterate_alpha()
# Calculate difference between previous and present alpha
alpha_diff = abs(test_moc.alpha - test_moc.alpha_old)/abs(test_moc.alpha_old)
if numpy.max(alpha_diff) < 1E-4:
converged = True
test_moc.flux_t = numpy.array(self.flux[:, 0])
else:
test_moc.update_variables(self.flux[:, 0],
self.delayed_neutron_precursor_concentration[:, 0])
#test_moc.iterate_alpha()
test_moc.solve(False, True)
self.flux[:, 1] = numpy.array(self.flux[:, 0])
self.current[:, 1] = numpy.array(self.current[:, 0])
self.delayed_neutron_precursor_concentration[:, 1] = numpy.array(self.delayed_neutron_precursor_concentration[
:, 0])
flux_t[:, iteration + 1] = numpy.array(self.flux[:, 1])
precursor_t[:, iteration + 1] = numpy.array(self.delayed_neutron_precursor_concentration[:, 0])
# plot flux at each time step
x = numpy.arange(0, self.core_mesh_length)
ax = plt.subplot(111)
for iteration in xrange(steps + 1):
ax.plot(x, flux_t[:, iteration], label= "t = " + "{:.1E}".format(self.dt * iteration))
ax.grid(True)
plt.xlabel('Position [cm]')
plt.ylabel('Flux' + r'$\left[\frac{1}{s cm^{2}}\right]$')
#plt.title('Neutron Flux')
plt.tight_layout()
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.75, box.height])
# Put a legend to the right of the current axis
ax.legend(loc='center left', bbox_to_anchor=(1, -0.1))
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.0e'))
plt.show()
# plot precursor concentration at each time step
ax = plt.subplot(111)
for iteration in xrange(steps + 1):
ax.plot(x, precursor_t[:, iteration], label="t = " + "{:.1E}".format(self.dt * iteration))
ax.grid(True)
plt.xlabel('Position'+r'[cm]')
plt.ylabel('DNPC' + r'$\left[\frac{1}{cm^3}\right]$')
plt.title('Precursor Concentration')
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.75, box.height])
# Put a legend to the right of the current axis
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.show()
dnpc_filename = "output/precursor_concentration_"+ str(self.input_file_name) +"_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
flux_filename = "output/flux_" + str(self.input_file_name) +"_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
numpy.savetxt(dnpc_filename, precursor_t, delimiter=",")
numpy.savetxt(flux_filename, flux_t, delimiter=",")
"""Solver for method of manufactured solutions."""
def solve_linear_system_mms(self, t):
# BUILD THE LINEAR SYSTEM AND THE SOLUTION WILL COME
# LHS
n = self.core_mesh_length
courant = self.dt*self.v/self.dx
# zeroth moment flux terms
for position in range(n):
sig_a = self.sig_t[self.material[position]] - self.sig_s[self.material[position]]
zeta = 1 + self.v * self.dt * (sig_a - (1 - self.beta) * self.nu[self.material[position]] *
self.sig_f[self.material[position]])
self.linear_system[position, position] = zeta
# qd flux terms
column = 0
for row in range(n + 1, 2*n):
self.linear_system[row, column] = -courant*self.eddington_factors[column]
self.linear_system[row, column + 1] = courant*self.eddington_factors[column+1]
column += 1
# precursor equation flux terms
column = 0
for row in range(2*n + 1, 3*n+1):
self.linear_system[row, column] = -self.dt*self.beta*self.nu[self.material[column]]\
*self.sig_f[self.material[column]]
column += 1
# zeroth moment current terms
row = 0
for column in range(n, 2*n):
self.linear_system[row, column] = -courant
self.linear_system[row, column + 1] = courant
row += 1
# qd current terms
position = 0
for index in xrange(n+1, 2*n):
ave_sig_t = (self.sig_t[self.material[position - 1]] + self.sig_t[self.material[position]]) / 2
self.linear_system[index, index] = 1 + self.dt*self.v*ave_sig_t
#self.linear_system[index, index] = 1 # debugging
position += 1
self.linear_system[n , n] = 1 # boundary condition
self.linear_system[2*n, 2*n] = 1 # boundary condition
# zeroth moment precursor terms
row = 0
for column in range(2 * n + 1, 3 * n + 1):
self.linear_system[row, column] = -self.dt * self.v * self.lambda_eff
row += 1
# precursor equation precursor terms
position = 1
self.linear_system[2*n + 1, 2*n + 1] = 1 + self.dnpc_v_edge[0] * self.dt / self.dx + self.dt \
* self.lambda_eff # boundary condition
for index in range(2*n + 2, 3 * n + 1):
self.linear_system[index, index - 1] = -self.dnpc_velocity[position - 1] * self.dt / self.dx
self.linear_system[index, index] = 1 + self.dnpc_velocity[position] * self.dt / self.dx + self.dt \
* self.lambda_eff
# might need to evaluate fluxes on cell boundaries, not at cell centers
self.linear_system[index, index - 1] = -self.dnpc_v_edge[position] * self.dt / self.dx
self.linear_system[index, index] = 1 + self.dnpc_v_edge[position+1] * self.dt / self.dx + self.dt \
* self.lambda_eff
position += 1
# RHS
self.calc_q_z_mms(t)
self.calc_q_q_mms(t)
self.calc_q_p_mms(t)
self.linear_system_solution[0:n, 1] = numpy.array(self.flux[:, 1] + self.v * self.dt * self.q_z_mms[:, 0])
self.linear_system_solution[n:2*n+1, 1] = numpy.array(self.current[:, 1] + self.v * self.dt * self.q_q_mms[:, 0])
self.linear_system_solution[2*n+1:, 1] = numpy.array(self.delayed_neutron_precursor_concentration[:, 1] \
+ self.dt * self.q_p_mms[:, 0])
#solve! Uses LAPACK routine_gesv that employs an LU decomposition with partial pivoting.
self.linear_system_solution[:, 0] = numpy.linalg.solve(self.linear_system, self.linear_system_solution[:, 1])
#Assign solutions!
self.flux[:, 0] = self.linear_system_solution[0:n, 0]
self.current[:, 0] = self.linear_system_solution[n:2*n + 1, 0]
self.delayed_neutron_precursor_concentration[:, 0] = self.linear_system_solution[2*n + 1:, 0]
"""Solve the modified transient problem by taking known Eddington factors and putting them
into a linear system of equations. Includes MMS source terms."""
def solve_transient_mms(self, steps):
# Initialize arrays to store transient solutions
flux_t = numpy.zeros([self.core_mesh_length, steps + 1])
precursor_t = numpy.zeros([self.core_mesh_length, steps + 1])
# Initialize a StepCharacteristic object
test_moc = moc.StepCharacteristic(self.input_file_name)
# Initial flux and precursor concentration
for position in xrange(self.core_mesh_length):
self.flux[position, 1] = 2.0*self.psi_0_mms * numpy.sin(position*self.dx + self.dx/2.0)
self.delayed_neutron_precursor_concentration[position, 1] = self.C_0_mms * numpy.sin(position * self.dx + self.dx/2.0)
self.current[:, 1] = numpy.zeros(self.core_mesh_length+1)
# Record initial conditions
flux_t[:, 0] = self.flux[:, 1]
precursor_t[:, 0] = self.delayed_neutron_precursor_concentration[:, 1]
# set Eddington factors to MMS values
self.eddington_factors = (1.0 / 3.0) * numpy.array(numpy.ones(self.core_mesh_length, dtype=numpy.float64))
t = self.dt
for iteration in xrange(steps):
converged = False
while not converged:
# Store previous solutions to evaluate convergence
last_flux = numpy.array(self.flux[:, 0])
last_current = numpy.array(self.current[:, 0])
last_dnpc = numpy.array(self.delayed_neutron_precursor_concentration[:, 0])
self.solve_linear_system_mms(t)
# Calculate difference between previous and present solutions
flux_diff = abs(last_flux - self.flux[:, 0])
current_diff = abs(last_current[1:-1] - self.current[1:-1, 0])
dnpc_diff = abs(last_dnpc - self.delayed_neutron_precursor_concentration[:, 0])
eddington_diff = abs(test_moc.eddington_factors - test_moc.eddington_factors_old)
if numpy.max(flux_diff / abs(self.flux[:, 0])) < 1E-6 \
and numpy.max(dnpc_diff) < 1E-10:
#test_moc.iterate_alpha()
# Calculate difference between previous and present alpha
#alpha_diff = abs(test_moc.alpha - test_moc.alpha_old)
#if numpy.max(alpha_diff/abs(test_moc.alpha)) < 1E-4:
converged = True
test_moc.flux_t = numpy.array(self.flux[:, 0])
t = t + self.dt
else:
test_moc.update_variables(self.flux[:, 0],
self.delayed_neutron_precursor_concentration[:, 0])
#test_moc.iterate_alpha()
#test_moc.solve(False, True)
self.flux[:, 1] = self.flux[:, 0]
self.current[:, 1] = self.current[:, 0]
self.delayed_neutron_precursor_concentration[:, 1] = self.delayed_neutron_precursor_concentration[
:, 0]
flux_t[:, iteration + 1] = self.flux[:, 0]
precursor_t[:, iteration + 1] = self.delayed_neutron_precursor_concentration[:, 0]
# plot flux at each time step
x = numpy.arange(0, self.core_mesh_length)
ax = plt.subplot(111)
for iteration in xrange(steps + 1):
ax.plot(x, flux_t[:, iteration], label= "t = " + str(self.dt * iteration))
ax.grid(True)
plt.xlabel('Position [cm]')
plt.ylabel('Flux [s^-1 cm^-2]')
plt.title('Grey Group: Neutron Flux')
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.show()
# plot precursor concentration at each time step
ax = plt.subplot(111)
for iteration in xrange(steps + 1):
ax.plot(x, precursor_t[:, iteration], label="t = " + str(self.dt * iteration))
ax.grid(True)
plt.xlabel('Position [cm]')
plt.ylabel('Concentration [cm^-3]')
plt.title('Grey Group: Precursor Concentration')
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.show()
#save final curves to csv files
dnpc_filename = "output/precursor_concentration_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
flux_filename = "output/flux_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
current_filename = "output/current_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
numpy.savetxt(dnpc_filename, self.delayed_neutron_precursor_concentration[:, 0], delimiter=",")
numpy.savetxt(flux_filename, self.flux[:, 0], delimiter=",")
numpy.savetxt(current_filename, self.current[:, 0], delimiter=",")
"""Solve the modified transient problem by taking the Eddington factors from a StepCharacteristic solve and putting them
into a linear system of equations. Includes MMS source terms."""
def solve_transient_mms_coupled(self, steps):
# Initialize arrays to store transient solutions
flux_t = numpy.zeros([self.core_mesh_length, steps + 1])
precursor_t = numpy.zeros([self.core_mesh_length, steps + 1])
# Initialize a StepCharacteristic object
test_moc = moc.StepCharacteristic(self.input_file_name)
# Initial flux and precursor concentration
for position in xrange(self.core_mesh_length):
self.flux[position, 1] = 2.0*self.psi_0_mms * numpy.sin(position*self.dx + self.dx/2.0)
test_moc.flux_t[position] = 2.0 * self.psi_0_mms * numpy.sin(position * self.dx + self.dx / 2.0)
self.delayed_neutron_precursor_concentration[position, 1] = self.C_0_mms * numpy.sin(position * self.dx + self.dx/2.0)
self.current[:, 1] = numpy.zeros(self.core_mesh_length+1)
# Record initial conditions
flux_t[:, 0] = numpy.array(self.flux[:, 1])
precursor_t[:, 0] = numpy.array(self.delayed_neutron_precursor_concentration[:, 1])
t = self.dt
for iteration in xrange(steps):
converged = False
while not converged:
self.update_eddington(test_moc.eddington_factors)
# Store previous solutions to evaluate convergence
last_flux = numpy.array(self.flux[:, 0])
last_current = numpy.array(self.current[:, 0])
last_dnpc = numpy.array(self.delayed_neutron_precursor_concentration[:, 0])
self.solve_linear_system_mms(t)
# Calculate difference between previous and present solutions
flux_diff = abs(last_flux - self.flux[:, 0])
current_diff = abs(last_current[1:-1] - self.current[1:-1, 0])
dnpc_diff = abs(last_dnpc - self.delayed_neutron_precursor_concentration[:, 0])
eddington_diff = abs(test_moc.eddington_factors - test_moc.eddington_factors_old)
if numpy.max(flux_diff / abs(self.flux[:, 0])) < 1E-6 \
and numpy.max(current_diff) < 1E-6 \
and numpy.max(dnpc_diff) < 1E-6\
and numpy.max(eddington_diff / test_moc.eddington_factors) < 1E-6:
test_moc.update_variables(self.flux[:, 0],
self.delayed_neutron_precursor_concentration[:, 0])
test_moc.iterate_alpha()
# Calculate difference between previous and present alpha
alpha_diff = abs(test_moc.alpha - test_moc.alpha_old)
if numpy.max(alpha_diff) < 1E-6:
converged = True
test_moc.flux_t = numpy.array(self.flux[:, 0])
t = t + self.dt
print "time step t=" + str(t) + " completed. " + str(datetime.datetime.now().time())
else:
test_moc.solve_mms(t, False, False)
else:
test_moc.update_variables(self.flux[:, 0],
self.delayed_neutron_precursor_concentration[:, 0])
test_moc.solve_mms(t, False, False)
self.flux[:, 1] = numpy.array(self.flux[:, 0])
self.current[:, 1] = numpy.array(self.current[:, 0])
self.delayed_neutron_precursor_concentration[:, 1] = numpy.array(self.delayed_neutron_precursor_concentration[
:, 0])
flux_t[:, iteration + 1] = numpy.array(self.flux[:, 0])
precursor_t[:, iteration + 1] = numpy.array(self.delayed_neutron_precursor_concentration[:, 0])
# # plot flux at each time step
# x = numpy.arange(0, self.core_mesh_length)
# ax = plt.subplot(111)
# for iteration in xrange(steps + 1):
# ax.plot(x, flux_t[:, iteration], label= "t = " + str(self.dt * iteration))
# ax.grid(True)
# plt.xlabel('Position [cm]')
# plt.ylabel('Flux [s^-1 cm^-2]')
# plt.title('Grey Group: Neutron Flux')
#
# # Shrink current axis by 20%
# box = ax.get_position()
# ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# # Put a legend to the right of the current axis
# ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
# plt.show()
#
# # plot precursor concentration at each time step
# ax = plt.subplot(111)
# for iteration in xrange(steps + 1):
# ax.plot(x, precursor_t[:, iteration], label="t = " + str(self.dt * iteration))
# ax.grid(True)
# plt.xlabel('Position [cm]')
# plt.ylabel('Concentration [cm^-3]')
# plt.title('Grey Group: Precursor Concentration')
# # Shrink current axis by 20%
# box = ax.get_position()
# ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# # Put a legend to the right of the current axis
# ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
# plt.show()
#save final curves to csv files
dnpc_filename = "output/precursor_concentration_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
flux_filename = "output/flux_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
current_filename = "output/current_N=" + str(self.core_mesh_length) + "_dt=" + str(self.dt) + ".csv"
numpy.savetxt(dnpc_filename, self.delayed_neutron_precursor_concentration[:, 0], delimiter=",")
numpy.savetxt(flux_filename, self.flux[:, 0], delimiter=",")
numpy.savetxt(current_filename, self.current[:, 0], delimiter=",")
completion_message = "Done! Saved N=" + str(self.core_mesh_length) + " dt=" + str(self.dt) + ".csv final time step solutions. "
print(completion_message)
def calc_q_z_mms(self, t):
for position in xrange(self.core_mesh_length):
sig_a = self.sig_t[self.material[position]] - self.sig_s[self.material[position]]
A = 2.0*self.psi_0_mms*((1.0/self.v) + sig_a - (1-self.beta)*self.nu[self.material[position]]\
*self.sig_f[self.material[position]])
B = self.C_0_mms*self.lambda_eff
self.q_z_mms[position, 0] = numpy.sin(self.dx * position + self.dx/2.0) * numpy.exp(t) * (A - B)
def calc_q_q_mms(self, t):
for position in xrange(self.core_mesh_length+1):
self.q_q_mms[position, 0] = (2.0 / 3.0) * self.psi_0_mms * numpy.cos(self.dx * position) * numpy.exp(t)
# accomodate initial conditions
self.q_q_mms[0, 0] = 0.0
self.q_q_mms[-1,0] = 0.0
def calc_q_p_mms(self, t):
for position in xrange(self.core_mesh_length):
A = self.C_0_mms*(1+self.lambda_eff-self.a) - 2.0 * self.beta * self.nu[self.material[position]]\
* self.sig_f[self.material[position]] * self.psi_0_mms
B = self.a*(2.0*numpy.pi - self.dx*position - self.dx/2.0)*self.C_0_mms
B = self.dnpc_velocity[position] * self.C_0_mms
self.q_p_mms[position, 0] = A * numpy.sin(self.dx * position + self.dx/2.0) * numpy.exp(t)\
+ B * numpy.cos(self.dx * position + self.dx/2.0) * numpy.exp(t)
if __name__ == "__main__":
#test = QuasiDiffusionPrecursorConcentration("mms_input.yaml") # test for initialization
#test.solve_transient_mms(100)
#mms1 = QuasiDiffusionPrecursorConcentration("mms_inputs/n50dt01.yaml") # test for initialization
#mms1.solve_transient_mms(100)
#mms2 = QuasiDiffusionPrecursorConcentration("mms_inputs/n100dt005.yaml") # test for initialization
#mms2.solve_transient_mms(200)
#mms3 = QuasiDiffusionPrecursorConcentration("mms_inputs/n200dt0025.yaml") # test for initialization
#mms3.solve_transient_mms(400)
#mms4 = QuasiDiffusionPrecursorConcentration("mms_inputs/n500dt001.yaml") # test for initialization
#mms4.solve_transient_mms(1000)
#mms5 = QuasiDiffusionPrecursorConcentration("mms_inputs/n1000dt0005.yaml") # test for initialization
#mms5.solve_transient_mms(2000)
# illustrative result
test = QuasiDiffusionPrecursorConcentration("test_input.yaml") # test for initialization
test.solve_transient(5)
# test
#mms1 = QuasiDiffusionPrecursorConcentration("mms_inputs/n50dt01.yaml") # test for initialization
#mms1.solve_transient_mms_coupled(10)
#test = QuasiDiffusionPrecursorConcentration("mms_input.yaml") # test for initialization
#test.solve_transient_mms_coupled(100)
#mms1 = QuasiDiffusionPrecursorConcentration("mms_inputs/n50dt01.yaml") # test for initialization
#mms1.solve_transient_mms_coupled(100)
#mms2 = QuasiDiffusionPrecursorConcentration("mms_inputs/n100dt005.yaml") # test for initialization
#mms2.solve_transient_mms_coupled(200)
#mms3 = QuasiDiffusionPrecursorConcentration("mms_inputs/n200dt0025.yaml") # test for initialization
#mms3.solve_transient_mms_coupled(400)
#mms4 = QuasiDiffusionPrecursorConcentration("mms_inputs/n500dt001.yaml") # test for initialization
#mms4.solve_transient_mms_coupled(1000)
#mms5 = QuasiDiffusionPrecursorConcentration("mms_inputs/n1000dt0005.yaml") # test for initialization
#mms5.solve_transient_mms_coupled(2000)
|
# The tools in this script positionally calculate operations on matrices,
# create blank and identity matrices, and convert short matrices to and from long matrices.
#
# Matrices use the following example notation:
# [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
#
# Which can be visually expressed as:
# [[0, [3, [6,
# 1, 4, 7,
# 2], 5], 8]]
#
# That is, a short matrix is composed of a list of sublists, where the sublists are lists of values in a column
# Creates a matrix of zeroes with a given number of rows and columns
def blank_matrix(sublists, items):
return [[0 for i in range(items)] for j in range(sublists)]
# Create a short identity matrix
# e.g. 3 sublists and 3 items returns [[1,0,0],[0,1,0],[0,0,1]]
def identity_matrix(sublists, items):
Blank = blank_matrix(sublists, items)
for i in range(sublists):
Blank[i][i] = 1
return Blank
# Turn a long matrix (i.e. list of values) into a short matrix (i.e. list of sublists of values)
def short_to_long(shortMatrix):
return [[item for sublist in shortMatrix for item in sublist], len(shortMatrix), len(shortMatrix[0])]
# Turn a short matrix (i.e. list of sublists of values) into a long matrix (i.e. list of values)
def long_to_short(longMatrix):
return [i for i in map(list, zip(*[iter(longMatrix[0])]*longMatrix[2]))]
# Test if two short matrices have the same number of rows and columns
def short_matrices_compatible(MatrixA, MatrixB):
Along = short_to_long(MatrixA)
Blong = short_to_long(MatrixB)
output = [0, 0, 0]
# Check for same number of sublists
if Along[1] == Blong[1]:
output[0] = 1
else:
output[0] = 0
# Check for same number of values
if Along[2] == Blong[2]:
output[1] = 1
else:
output[1] = 0
# Check for same total length
if len(Along[0]) == len(Blong[0]):
output[2] = 1
else:
output[2] = 0
return output
# Turn a list into a list of sublists where sublist contents are repeated list values
# e.g. turns [[0, 1, 2], 3] into [[0, 0, 0], [1, 1, 1], [2, 2, 2]]
def list_to_sublists(List, Repeat):
return [[j for i in range(Repeat)] for j in List]
# Turns a list of sublists into a list of sums
# e.g. turns [[0, 0, 0], [1, 1, 1], [2, 2, 2]] into [0, 3, 6]
def list_of_sublists_to_list_of_sums(ListOfSublists):
return [sum(i) for i in ListOfSublists]
# Positionally add the values in one matrix to the values in a second matrix
def add_short(shortA, shortB):
if short_matrices_compatible(shortA, shortB) == [1, 1, 1]:
longA, longB = short_to_long(shortA), short_to_long(shortB)
longC = [[longA[0][i] + longB[0][i] for i in range(len(longA[0]))], longA[1], longA[2]]
return long_to_short(longC)
else:
raise Exception('Tried positionally adding two matrices of different dimensions')
# Positionally subtract the values in one matrix for the values in a second matrix
def subtract_short(shortA, shortB):
if short_matrices_compatible(shortA, shortB) == [1, 1, 1]:
longA, longB = short_to_long(shortA), short_to_long(shortB)
longC = [[longA[0][i] - longB[0][i] for i in range(len(longA[0]))], longA[1], longA[2]]
return long_to_short(longC)
else:
raise Exception('Tried positionally subtracting two matrices of different dimensions')
# Positionally multiply the values in one matrix by the values in a second matrix
def multiply_short(shortA, shortB):
if short_matrices_compatible(shortA, shortB) == [1, 1, 1]:
longA, longB = short_to_long(shortA), short_to_long(shortB)
longC = [[longA[0][i] * longB[0][i] for i in range(len(longA[0]))], longA[1], longA[2]]
return long_to_short(longC)
else:
raise Exception('Tried positionally multiplying two matrices of different dimensions')
# Positionally divide the values in one matrix by the values in a second matrix
def divide_short(shortA, shortB):
if short_matrices_compatible(shortA, shortB) == [1, 1, 1]:
longA, longB = short_to_long(shortA), short_to_long(shortB)
longC = [[longA[0][i] / longB[0][i] for i in range(len(longA[0]))], longA[1], longA[2]]
return long_to_short(longC)
else:
raise Exception('Tried positionally dividing two matrices of different dimensions')
# Turn a list of columns into a list of rows
def transpose(shortA):
transposed = [list(i) for i in zip(*shortA)]
print(transposed)
return transposed
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Request body validating middleware for OpenStack Identity resources."""
from keystone.common.validation import validators
def lazy_validate(request_body_schema, resource_to_validate):
"""A non-decorator way to validate a request, to be used inline.
:param request_body_schema: a schema to validate the resource reference
:param resource_to_validate: dictionary to validate
:raises keystone.exception.ValidationError: if `resource_to_validate` is
None. (see wrapper method below).
:raises TypeError: at decoration time when the expected resource to
validate isn't found in the decorated method's
signature
"""
schema_validator = validators.SchemaValidator(request_body_schema)
schema_validator.validate(resource_to_validate)
def nullable(property_schema):
"""Clone a property schema into one that is nullable.
:param dict property_schema: schema to clone into a nullable schema
:returns: a new dict schema
"""
# TODO(dstanek): deal with the case where type is already a list; we don't
# do that yet so I'm not wasting time on it
new_schema = property_schema.copy()
new_schema['type'] = [property_schema['type'], 'null']
# NOTE(kmalloc): If enum is specified (such as our boolean case) ensure we
# add null to the enum as well so that null can be passed/validated as
# expected. Without adding to the enum, null will not validate as enum is
# explicitly listing valid values. According to the JSON Schema
# specification, the values must be unique in the enum array.
if 'enum' in new_schema and None not in new_schema['enum']:
# In the enum the 'null' is NoneType
new_schema['enum'].append(None)
return new_schema
def add_array_type(property_schema):
"""Convert the parameter schema to be of type list.
:param dict property_schema: schema to add array type to
:returns: a new dict schema
"""
new_schema = property_schema.copy()
new_schema['type'] = [property_schema['type'], 'array']
return new_schema
|
import mock
import re
from testtools import TestCase, ExpectedException # noqa
from padre import channel as c
from padre.tests import common
from padre.wsgi_servers import sensu
from webob import exc
class SensuHookApplicationTest(TestCase):
def setUp(self):
super(SensuHookApplicationTest, self).setUp()
self.bot = common.make_bot()
self.hook = sensu.HookApplication(self.bot)
def test_creation(self):
self.assertEqual(
[(re.compile('^sensu-webhook[/]?(.*)$'),
['GET', 'POST'], self.hook.hook)],
self.hook.urls
)
self.assertEqual(self.bot, self.hook.bot)
def test_raised_with_wrong_env(self):
with ExpectedException(TypeError, 'WSGI environ must be a dict;'):
self.hook.__call__('env', 'resp')
def test_raised_with_wrong_resp(self):
env = dict()
env['PATH_INFO'] = '.'
with ExpectedException(TypeError, "'str' object is not callable"):
self.hook.__call__(env, 'resp')
@mock.patch('padre.wsgi_servers.sensu.Request')
def test_magic_call(self, request):
env = dict()
env['PATH_INFO'] = '.'
resp = mock.MagicMock()
resp.path.lstrip.return_value = 'sensu-webhook'
resp.method = 'GET'
request.return_value = resp
handler = mock.MagicMock()
handler.return_value = lambda x, y: 'success, lol'
self.hook.hook = handler
re, meth, hook = self.hook.urls[0]
self.hook.urls = [(re, meth, handler)]
self.assertEqual('success, lol', self.hook.__call__(env, 'resp'))
def test_hook_raises_with_bad_kind(self):
req = mock.MagicMock()
req.headers.get.return_value = None
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_raises_with_bad_request_type(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'GET'
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_raises_with_wrong_content_type(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'text/html'
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_raises_with_zero_length(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 0
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_raises_with_failed_network(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
req.body_file.read.side_effect = IOError('IO Error')
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_raises_with_empty_body(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
with ExpectedException(
exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_successful_with_proper_data(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
req.body_file.read.return_value = ('{ "action": "create",'
'"first": "data" }')
self.assertEqual('202 Accepted', self.hook.hook(req).status)
self.bot.submit_message.assert_any_call(mock.ANY, c.TARGETED)
self.bot.submit_message.assert_any_call(mock.ANY, c.BROADCAST)
def test_hook_raises_with_wrong_credentials(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10', 'signature']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
self.bot.config.sensu.hook.secret = 'secret'
with ExpectedException(
exc.HTTPUnauthorized,
'This server could not verify that you are authorized'):
self.hook.hook(req)
def test_hook_raises_with_wrong_body(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10', 'signature']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
req.body_file.read.return_value = b'[ "test" ]'
with ExpectedException(exc.HTTPBadRequest,
'The server could not comply with the request'):
self.hook.hook(req)
def test_hook_successful_if_message_not_sent(self):
req = mock.MagicMock()
req.headers.get.side_effect = ['kind', '10']
req.method = 'POST'
req.content_type = 'application/json'
req.content_length = 100
req.body_file.read.return_value = ('{ "action": "create",'
'"first": "data" }')
self.hook.bot.submit_message.side_effect = RuntimeError(
'Error')
self.assertEqual('202 Accepted', self.hook.hook(req).status)
self.bot.submit_message.assert_called()
|
from logging import getLogger
class AbstractETAEncoder(object):
"""ETA Encoder
ETA Encoder is used to encode the spatiotemporal information in trajectory.
We abstract the encoding operation from the Dataset Module to facilitate developers
to achive more flexible and diverse trajectory representation extraction. It is worth
noting that the representation extraction involved here is not learnable and fixed.
Any learnable representation extraction, e.g. embedding, should be emplemented in
Model Module.
Attributes:
config (libcity.ConfigParser): The configuration of the encoder.
pad_item (dict): The key is a feature's name and the value should be corresponding
padding value. If a feature dose not need to be padded, don't insert it into
this dict. In other word, libcity.dataset.Batch will pad all features in pad_item.keys().
feature_dict (dict): The key is a feature's name and the value should be the data type of
the corresponding feature. When libcity.dataset.Batch converts the encoded trajectory tuple
to tensor, It will refer to this attribute to know the feature name and data type corresponding
to each element in the tuple.
data_feature (dict): The data_feature contains the statistics features of the encoded dataset, which is
used to init the model. For example, if the model use torch.nn.Embedding to embed location id and time id,
the data_feature should contain loc_size and time_size to tell model how to init the embedding layer.
"""
def __init__(self, config):
"""Init Encoder with its config
Args:
config (libcity.ConfigParser): Dict-like Object. Can access any config by config[key].
"""
self.config = config
self._logger = getLogger()
self.pad_item = {}
self.feature_dict = {}
self.data_feature = {}
self.cache_file_name = ''
def encode(self, uid, trajectories, dyna_feature_column):
"""Encode trajectories of user uid.
Args:
uid (int): The uid of user. If there is no need to encode uid, just keep it.
trajectories (list of trajectory): The trajectories of user. Each trajectory is
a sequence of spatiotemporal point. The spatiotemporal point is represented by
a list. Thus, a trajectory is represented by a list of lists. For example:
trajectory1 = [
[dyna_id, type, time, entity_id, traj_id, coordinates/location, properties],
[dyna_id, type, time, entity_id, traj_id, coordinates/location, properties],
.....
]
Every spatiotemporal tuple contains all useful information in a record of the Raw
Data (refer to corresponding .dyna file for details). In addition, the trajectories
are represented as:
[
[ # trajectory1
[dyna_id, type, time, entity_id, traj_id, coordinates/location, properties],
[dyna_id, type, time, entity_id, traj_id, coordinates/location, properties],
...
],
trajectory2,
...
]
dyna_feature_column (dict): The key is a feature's name and the value should be corresponding
column id in .dyna file.
Returns:
list: The return value of this function is the list of encoded trajectories.
Same as the input format, each encoded trajectory should be a tuple, which contains
all features extracted from the input trajectory. The encoded trajectory will
subsequently be converted to a torch.tensor and then directly input to the model.
(see more in libcity.Batch)
Take the DeeptteEncoder as an example.
encoded_trajectory = [current_longi, current_lati, current_tim, current_dis, current_state,
uid, weekid, timeid, dist, time]
Please make sure the order of the features in the list is consistent with the order
of the features in self.feature_dict.
"""
def gen_data_feature(self):
"""After encode all trajectories, this method will be called to tell encoder that you can generate the
data_feature and pad_item
"""
|
from util.conf import JIRA_SETTINGS, CONFLUENCE_SETTINGS, BITBUCKET_SETTINGS, JSM_SETTINGS
from util.api.jira_clients import JiraRestClient
from util.api.confluence_clients import ConfluenceRestClient
from util.api.bitbucket_clients import BitbucketRestClient
from lxml import etree
import json
JIRA = 'jira'
CONFLUENCE = 'confluence'
BITBUCKET = 'bitbucket'
JSM = 'jsm'
DEFAULT_ACTIONS = 'util/default_test_actions.json'
def read_json_file(file_path):
with open(file_path) as json_file:
data = json.load(json_file)
return data
class BaseApplication:
type = None
version = None
nodes_count = None
dataset_information = None
def __init__(self, api_client, config_yml):
self.client = api_client(host=config_yml.server_url,
user=config_yml.admin_login, password=config_yml.admin_password)
self.config = config_yml
def get_default_actions(self):
actions_json = read_json_file(DEFAULT_ACTIONS)
return actions_json[self.type]
@property
def jmeter_default_actions(self):
return self.get_default_actions()['jmeter']
@property
def selenium_default_actions(self):
return self.get_default_actions()['selenium']
@property
def locust_default_actions(self):
return self.get_default_actions()['locust']
class Jira(BaseApplication):
type = JIRA
@property
def version(self):
jira_server_info = self.client.get_server_info()
jira_server_version = jira_server_info.get('version', '')
return jira_server_version
@property
def nodes_count(self):
return self.client.get_cluster_nodes_count(jira_version=self.version)
def __issues_count(self):
return self.client.get_total_issues_count()
@property
def dataset_information(self):
return f"{self.__issues_count()} issues"
class Confluence(BaseApplication):
type = CONFLUENCE
@property
def version(self):
return self.client.get_confluence_version()
@property
def nodes_count(self):
return self.client.get_confluence_nodes_count()
@property
def dataset_information(self):
return f"{self.client.get_total_pages_count()} pages"
class Bitbucket(BaseApplication):
type = BITBUCKET
bitbucket_repos_selector = "#content-bitbucket\.atst\.repositories-0>.field-group>.field-value" # noqa W605
@property
def version(self):
return self.client.get_bitbucket_version()
@property
def nodes_count(self):
return self.client.get_bitbucket_nodes_count()
@property
def dataset_information(self):
system_page_html = self.client.get_bitbucket_system_page()
if 'Repositories' in system_page_html:
dom = etree.HTML(system_page_html)
repos_count = dom.cssselect(self.bitbucket_repos_selector)[0].text
return f'{repos_count} repositories'
else:
return 'Could not parse number of Bitbucket repositories'
class Jsm(BaseApplication):
type = JSM
@property
def version(self):
jsm_server_info = self.client.get_service_desk_info()
return jsm_server_info.get('version', '')
@property
def nodes_count(self):
jira_server_info = self.client.get_server_info()
jira_server_version = jira_server_info.get('version', '')
return self.client.get_cluster_nodes_count(jira_version=jira_server_version)
def __issues_count(self):
return self.client.get_total_issues_count()
@property
def dataset_information(self):
return f"{self.__issues_count()} issues"
class ApplicationSelector:
APP_TYPE_MSG = ('ERROR: Please run util/analytics.py with application type as argument. '
'E.g. python util/analytics.py jira/confluence/bitbucket/jsm')
def __init__(self, app_name):
self.application_type = self.__get_application_type(app_name)
def __get_application_type(self, app_name):
if app_name.lower() not in [JIRA, CONFLUENCE, BITBUCKET, JSM]:
raise SystemExit(self.APP_TYPE_MSG)
return app_name.lower()
@property
def application(self):
if self.application_type == JIRA:
return Jira(api_client=JiraRestClient, config_yml=JIRA_SETTINGS)
if self.application_type == CONFLUENCE:
return Confluence(api_client=ConfluenceRestClient, config_yml=CONFLUENCE_SETTINGS)
if self.application_type == BITBUCKET:
return Bitbucket(api_client=BitbucketRestClient, config_yml=BITBUCKET_SETTINGS)
if self.application_type == JSM:
return Jsm(api_client=JiraRestClient, config_yml=JSM_SETTINGS)
|
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for classifier."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from six.moves import xrange
import paddle.fluid as fluid
from model.ernie import ErnieModel
import csv
def create_model(args, pyreader_name, ernie_config, is_prediction=False):
pyreader = fluid.layers.py_reader(
capacity=50,
shapes=[[-1, args.max_seq_len, 1], [-1, args.max_seq_len, 1],
[-1, args.max_seq_len, 1], [-1, args.max_seq_len, 1], [-1, 1],
[-1, 1]],
dtypes=['int64', 'int64', 'int64', 'float32', 'int64', 'int64'],
lod_levels=[0, 0, 0, 0, 0, 0],
name=pyreader_name,
use_double_buffer=True)
(src_ids, sent_ids, pos_ids, input_mask, labels,
qids) = fluid.layers.read_file(pyreader)
ernie = ErnieModel(
src_ids=src_ids,
position_ids=pos_ids,
sentence_ids=sent_ids,
input_mask=input_mask,
config=ernie_config,
use_fp16=args.use_fp16)
cls_feats = ernie.get_pooled_output()
cls_feats = fluid.layers.dropout(
x=cls_feats,
dropout_prob=0.1,
dropout_implementation="upscale_in_train")
logits = fluid.layers.fc(
input=cls_feats,
size=args.num_labels,
param_attr=fluid.ParamAttr(
name="cls_out_w",
initializer=fluid.initializer.TruncatedNormal(scale=0.02)),
bias_attr=fluid.ParamAttr(
name="cls_out_b", initializer=fluid.initializer.Constant(0.)))
if is_prediction:
probs = fluid.layers.softmax(logits)
feed_targets_name = [
src_ids.name, pos_ids.name, sent_ids.name, input_mask.name
]
return pyreader, probs, feed_targets_name
ce_loss, probs = fluid.layers.softmax_with_cross_entropy(
logits=logits, label=labels, return_softmax=True)
loss = fluid.layers.mean(x=ce_loss)
if args.use_fp16 and args.loss_scaling > 1.0:
loss *= args.loss_scaling
num_seqs = fluid.layers.create_tensor(dtype='int64')
accuracy = fluid.layers.accuracy(input=probs, label=labels, total=num_seqs)
graph_vars = {
"loss": loss,
"probs": probs,
"accuracy": accuracy,
"labels": labels,
"num_seqs": num_seqs,
"qids": qids
}
for k, v in graph_vars.items():
v.persistable = True
return pyreader, graph_vars
def evaluate_mrr(preds):
last_qid = None
total_mrr = 0.0
qnum = 0.0
rank = 0.0
correct = False
for qid, score, label in preds:
if qid != last_qid:
rank = 0.0
qnum += 1
correct = False
last_qid = qid
rank += 1
if not correct and label != 0:
total_mrr += 1.0 / rank
correct = True
return total_mrr / qnum
def evaluate_map(preds):
def singe_map(st, en):
total_p = 0.0
correct_num = 0.0
for index in xrange(st, en):
if int(preds[index][2]) != 0:
correct_num += 1
total_p += correct_num / (index - st + 1)
if int(correct_num) == 0:
return 0.0
return total_p / correct_num
last_qid = None
total_map = 0.0
qnum = 0.0
st = 0
for i in xrange(len(preds)):
qid = preds[i][0]
if qid != last_qid:
qnum += 1
if last_qid != None:
total_map += singe_map(st, i)
st = i
last_qid = qid
total_map += singe_map(st, len(preds))
return total_map / qnum
def evaluate(exe, test_program, test_pyreader, graph_vars, eval_phase):
train_fetch_list = [
graph_vars["loss"].name, graph_vars["accuracy"].name,
graph_vars["num_seqs"].name
]
if eval_phase == "train":
if "learning_rate" in graph_vars:
train_fetch_list.append(graph_vars["learning_rate"].name)
outputs = exe.run(fetch_list=train_fetch_list)
ret = {"loss": np.mean(outputs[0]), "accuracy": np.mean(outputs[1])}
if "learning_rate" in graph_vars:
ret["learning_rate"] = float(outputs[4][0])
return ret
test_pyreader.start()
total_cost, total_acc, total_num_seqs, total_label_pos_num, total_pred_pos_num, total_correct_num = 0.0, 0.0, 0.0, 0.0, 0.0, 0.0
qids, labels, scores = [], [], []
time_begin = time.time()
fetch_list = [
graph_vars["loss"].name, graph_vars["accuracy"].name,
graph_vars["probs"].name, graph_vars["labels"].name,
graph_vars["num_seqs"].name, graph_vars["qids"].name
]
probs_list = []
while True:
try:
np_loss, np_acc, np_probs, np_labels, np_num_seqs, np_qids = exe.run(
program=test_program, fetch_list=fetch_list)
# print(np_probs)
probs_list.append(np_probs)
total_cost += np.sum(np_loss * np_num_seqs)
total_acc += np.sum(np_acc * np_num_seqs)
total_num_seqs += np.sum(np_num_seqs)
labels.extend(np_labels.reshape((-1)).tolist())
qids.extend(np_qids.reshape(-1).tolist())
scores.extend(np_probs[:, 1].reshape(-1).tolist())
np_preds = np.argmax(np_probs, axis=1).astype(np.float32)
total_label_pos_num += np.sum(np_labels)
total_pred_pos_num += np.sum(np_preds)
total_correct_num += np.sum(np.dot(np_preds, np_labels))
except fluid.core.EOFException:
test_pyreader.reset()
break
time_end = time.time()
# li = []
# for x in probs_list:
# li.append(x[0][1])
# li.append(x[1][1])
# with open('output_test.csv', 'w') as f:
# wr = csv.writer(f)
# wr.writerow(li)
if len(qids) == 0:
print(
"[%s evaluation] ave loss: %f, ave acc: %f, data_num: %d, elapsed time: %f s"
% (eval_phase, total_cost / total_num_seqs, total_acc /
total_num_seqs, total_num_seqs, time_end - time_begin))
else:
r = total_correct_num / total_label_pos_num
p = total_correct_num / total_pred_pos_num
f = 2 * p * r / (p + r)
assert len(qids) == len(labels) == len(scores)
preds = sorted(
zip(qids, scores, labels), key=lambda elem: (elem[0], -elem[1]))
mrr = evaluate_mrr(preds)
map = evaluate_map(preds)
print(
"[%s evaluation] ave loss: %f, ave_acc: %f, mrr: %f, map: %f, p: %f, r: %f, f1: %f, data_num: %d, elapsed time: %f s"
% (eval_phase, total_cost / total_num_seqs,
total_acc / total_num_seqs, mrr, map, p, r, f, total_num_seqs,
time_end - time_begin))
|
import os
from unittest.mock import Mock
import boto3
from boto3.dynamodb.table import TableResource
from boto3.resources.base import ServiceResource
from botocore.session import Session
from mock.mock import MagicMock
profile = os.environ['AWS_PROFILE'] if 'AWS_PROFILE' in os.environ else None
def resource(service_name, region_name=None, api_version=None,
use_ssl=True, verify=None, endpoint_url=None,
aws_access_key_id=None, aws_secret_access_key=None,
aws_session_token=None, config=None):
"""
:param self:
:param service_name:
:param region_name:
:param api_version:
:param use_ssl:
:param verify:
:param endpoint_url:
:param aws_access_key_id:
:param aws_secret_access_key:
:param aws_session_token:
:param config:
:return: boto3.resources.base.ServiceResource
"""
# resource_mock = Mock(ServiceResource)
resource_mock = Mock()
resource_mock.Table.return_value = table_mock
return resource_mock
iterable = MagicMock(return_value=iter([MagicMock(return_value=1), MagicMock(return_value=2)]))
table_mock = Mock()
# table_mock.scan = Mock()
table_mock.scan.side_effect = lambda: {
'Items': []
}
table_mock.item_count = 0
table_mock.put_item.side_effect = lambda: {"success": "true"}
botocore_session = Mock(Session)
botocore_session.profile = profile
session_mock = Mock(spec=boto3.session.Session)
session_mock._session = botocore_session
session_mock.resource.side_effect = resource
connection_mock = session_mock.resource('dynamodb', region_name="sa-east-1")
|
import typing
import logging
import urllib.parse
import requests
import presalytics.lib.tools.ooxml_tools
import presalytics.lib.exceptions
if typing.TYPE_CHECKING:
from presalytics.client.api import Client
from io import BytesIO
logger = logging.getLogger(__name__)
def story_post_file_bytes(client: 'Client',
binary_obj: 'BytesIO',
filename: str,
mime_type: str = None):
"""
Create a Presalytics API Story object from a file-like `io.BytesIO` object. Helpful for server-side
interaction with the Presalytics Story API
Parameters
----------
client : presalytics.client.api.Client
A client object for making api calls
binary_obj : io.BytesIO
A file-like object for storing file-data in memory. Often found in multipart messages
uploaded from browsers.
filename : str
The filename of the object to be uploaded
mimetype : str, optional
If known, please add the mimetype of the file. Otherwise, this method will execute an
additional API call ascertain the file's mimetype
Returns
----------
A `presalytics.client.presalytics_story.models.story.Story` containing information about the Story object in the Presalytics API
"""
if not mime_type:
mime_type = presalytics.lib.tools.ooxml_tools.get_mime_type_from_filename(client, filename)
_file = {'file': (filename, binary_obj, mime_type,)}
headers = client.get_auth_header()
headers.update(client.get_request_id_header())
headers.update({
'User-Agent': client.story.api_client.user_agent,
'Accept': 'application/json'
})
endpoint = urllib.parse.urljoin(client.story.api_client.configuration.host, 'story/file')
try:
resp = requests.post(endpoint, headers=headers, files=_file)
except Exception as ex:
message = "An error occured in the presalytics API client"
if locals().get("resp", None):
code = resp.status_code
else:
code = 500
raise presalytics.lib.exceptions.ApiError(message=message, status_code=code)
data = resp.json()
if resp.status_code > 299:
logger.error(data['detail'])
raise presalytics.lib.exceptions.ApiError(message=data["detail"], status_code=resp.status_code)
else:
try:
story = client.story.api_client._ApiClient__deserialize(data, 'Story')
return story
except Exception as ex:
logger.error("Story object could not be deserialized.")
logger.exception(ex)
return data
|
# subprocess - Subprocesses with accessible I/O streams
#
# For more information about this module, see PEP 324.
#
# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
#
# Licensed to PSF under a Contributor Agreement.
# See http://www.python.org/2.4/license for licensing details.
r"""Subprocesses with accessible I/O streams
This module allows you to spawn processes, connect to their
input/output/error pipes, and obtain their return codes.
For a complete description of this module see the Python documentation.
Main API
========
run(...): Runs a command, waits for it to complete, then returns a
CompletedProcess instance.
Popen(...): A class for flexibly executing a command in a new process
Constants
---------
DEVNULL: Special value that indicates that os.devnull should be used
PIPE: Special value that indicates a pipe should be created
STDOUT: Special value that indicates that stderr should go to stdout
Older API
=========
call(...): Runs a command, waits for it to complete, then returns
the return code.
check_call(...): Same as call() but raises CalledProcessError()
if return code is not 0
check_output(...): Same as check_call() but returns the contents of
stdout instead of a return code
getoutput(...): Runs a command in the shell, waits for it to complete,
then returns the output
getstatusoutput(...): Runs a command in the shell, waits for it to complete,
then returns a (exitcode, output) tuple
"""
import builtins
import errno
import io
import os
import time
import signal
import sys
import threading
import warnings
import contextlib
from time import monotonic as _time
import types
try:
import pwd
except ImportError:
pwd = None
try:
import grp
except ImportError:
grp = None
__all__ = ["Popen", "PIPE", "STDOUT", "call", "check_call", "getstatusoutput",
"getoutput", "check_output", "run", "CalledProcessError", "DEVNULL",
"SubprocessError", "TimeoutExpired", "CompletedProcess"]
# NOTE: We intentionally exclude list2cmdline as it is
# considered an internal implementation detail. issue10838.
try:
import msvcrt
import _winapi
_mswindows = True
except ModuleNotFoundError:
_mswindows = False
import _posixsubprocess
import select
import selectors
else:
from _winapi import (CREATE_NEW_CONSOLE, CREATE_NEW_PROCESS_GROUP,
STD_INPUT_HANDLE, STD_OUTPUT_HANDLE,
STD_ERROR_HANDLE, SW_HIDE,
STARTF_USESTDHANDLES, STARTF_USESHOWWINDOW,
ABOVE_NORMAL_PRIORITY_CLASS, BELOW_NORMAL_PRIORITY_CLASS,
HIGH_PRIORITY_CLASS, IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS, REALTIME_PRIORITY_CLASS,
CREATE_NO_WINDOW, DETACHED_PROCESS,
CREATE_DEFAULT_ERROR_MODE, CREATE_BREAKAWAY_FROM_JOB)
__all__.extend(["CREATE_NEW_CONSOLE", "CREATE_NEW_PROCESS_GROUP",
"STD_INPUT_HANDLE", "STD_OUTPUT_HANDLE",
"STD_ERROR_HANDLE", "SW_HIDE",
"STARTF_USESTDHANDLES", "STARTF_USESHOWWINDOW",
"STARTUPINFO",
"ABOVE_NORMAL_PRIORITY_CLASS", "BELOW_NORMAL_PRIORITY_CLASS",
"HIGH_PRIORITY_CLASS", "IDLE_PRIORITY_CLASS",
"NORMAL_PRIORITY_CLASS", "REALTIME_PRIORITY_CLASS",
"CREATE_NO_WINDOW", "DETACHED_PROCESS",
"CREATE_DEFAULT_ERROR_MODE", "CREATE_BREAKAWAY_FROM_JOB"])
# Exception classes used by this module.
class SubprocessError(Exception): pass
class CalledProcessError(SubprocessError):
"""Raised when run() is called with check=True and the process
returns a non-zero exit status.
Attributes:
cmd, returncode, stdout, stderr, output
"""
def __init__(self, returncode, cmd, output=None, stderr=None):
self.returncode = returncode
self.cmd = cmd
self.output = output
self.stderr = stderr
def __str__(self):
if self.returncode and self.returncode < 0:
try:
return "Command '%s' died with %r." % (
self.cmd, signal.Signals(-self.returncode))
except ValueError:
return "Command '%s' died with unknown signal %d." % (
self.cmd, -self.returncode)
else:
return "Command '%s' returned non-zero exit status %d." % (
self.cmd, self.returncode)
@property
def stdout(self):
"""Alias for output attribute, to match stderr"""
return self.output
@stdout.setter
def stdout(self, value):
# There's no obvious reason to set this, but allow it anyway so
# .stdout is a transparent alias for .output
self.output = value
class TimeoutExpired(SubprocessError):
"""This exception is raised when the timeout expires while waiting for a
child process.
Attributes:
cmd, output, stdout, stderr, timeout
"""
def __init__(self, cmd, timeout, output=None, stderr=None):
self.cmd = cmd
self.timeout = timeout
self.output = output
self.stderr = stderr
def __str__(self):
return ("Command '%s' timed out after %s seconds" %
(self.cmd, self.timeout))
@property
def stdout(self):
return self.output
@stdout.setter
def stdout(self, value):
# There's no obvious reason to set this, but allow it anyway so
# .stdout is a transparent alias for .output
self.output = value
if _mswindows:
class STARTUPINFO:
def __init__(self, *, dwFlags=0, hStdInput=None, hStdOutput=None,
hStdError=None, wShowWindow=0, lpAttributeList=None):
self.dwFlags = dwFlags
self.hStdInput = hStdInput
self.hStdOutput = hStdOutput
self.hStdError = hStdError
self.wShowWindow = wShowWindow
self.lpAttributeList = lpAttributeList or {"handle_list": []}
def copy(self):
attr_list = self.lpAttributeList.copy()
if 'handle_list' in attr_list:
attr_list['handle_list'] = list(attr_list['handle_list'])
return STARTUPINFO(dwFlags=self.dwFlags,
hStdInput=self.hStdInput,
hStdOutput=self.hStdOutput,
hStdError=self.hStdError,
wShowWindow=self.wShowWindow,
lpAttributeList=attr_list)
class Handle(int):
closed = False
def Close(self, CloseHandle=_winapi.CloseHandle):
if not self.closed:
self.closed = True
CloseHandle(self)
def Detach(self):
if not self.closed:
self.closed = True
return int(self)
raise ValueError("already closed")
def __repr__(self):
return "%s(%d)" % (self.__class__.__name__, int(self))
__del__ = Close
else:
# When select or poll has indicated that the file is writable,
# we can write up to _PIPE_BUF bytes without risk of blocking.
# POSIX defines PIPE_BUF as >= 512.
_PIPE_BUF = getattr(select, 'PIPE_BUF', 512)
# poll/select have the advantage of not requiring any extra file
# descriptor, contrarily to epoll/kqueue (also, they require a single
# syscall).
if hasattr(selectors, 'PollSelector'):
_PopenSelector = selectors.PollSelector
else:
_PopenSelector = selectors.SelectSelector
if _mswindows:
# On Windows we just need to close `Popen._handle` when we no longer need
# it, so that the kernel can free it. `Popen._handle` gets closed
# implicitly when the `Popen` instance is finalized (see `Handle.__del__`,
# which is calling `CloseHandle` as requested in [1]), so there is nothing
# for `_cleanup` to do.
#
# [1] https://docs.microsoft.com/en-us/windows/desktop/ProcThread/
# creating-processes
_active = None
def _cleanup():
pass
else:
# This lists holds Popen instances for which the underlying process had not
# exited at the time its __del__ method got called: those processes are
# wait()ed for synchronously from _cleanup() when a new Popen object is
# created, to avoid zombie processes.
_active = []
def _cleanup():
if _active is None:
return
for inst in _active[:]:
res = inst._internal_poll(_deadstate=sys.maxsize)
if res is not None:
try:
_active.remove(inst)
except ValueError:
# This can happen if two threads create a new Popen instance.
# It's harmless that it was already removed, so ignore.
pass
PIPE = -1
STDOUT = -2
DEVNULL = -3
# XXX This function is only used by multiprocessing and the test suite,
# but it's here so that it can be imported when Python is compiled without
# threads.
def _optim_args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
optimization settings in sys.flags."""
args = []
value = sys.flags.optimize
if value > 0:
args.append('-' + 'O' * value)
return args
def _args_from_interpreter_flags():
"""Return a list of command-line arguments reproducing the current
settings in sys.flags, sys.warnoptions and sys._xoptions."""
flag_opt_map = {
'debug': 'd',
# 'inspect': 'i',
# 'interactive': 'i',
'dont_write_bytecode': 'B',
'no_site': 'S',
'verbose': 'v',
'bytes_warning': 'b',
'quiet': 'q',
# -O is handled in _optim_args_from_interpreter_flags()
}
args = _optim_args_from_interpreter_flags()
for flag, opt in flag_opt_map.items():
v = getattr(sys.flags, flag)
if v > 0:
args.append('-' + opt * v)
if sys.flags.isolated:
args.append('-I')
else:
if sys.flags.ignore_environment:
args.append('-E')
if sys.flags.no_user_site:
args.append('-s')
# -W options
warnopts = sys.warnoptions[:]
bytes_warning = sys.flags.bytes_warning
xoptions = getattr(sys, '_xoptions', {})
dev_mode = ('dev' in xoptions)
if bytes_warning > 1:
warnopts.remove("error::BytesWarning")
elif bytes_warning:
warnopts.remove("default::BytesWarning")
if dev_mode:
warnopts.remove('default')
for opt in warnopts:
args.append('-W' + opt)
# -X options
if dev_mode:
args.extend(('-X', 'dev'))
for opt in ('faulthandler', 'tracemalloc', 'importtime',
'showrefcount', 'utf8', 'oldparser'):
if opt in xoptions:
value = xoptions[opt]
if value is True:
arg = opt
else:
arg = '%s=%s' % (opt, value)
args.extend(('-X', arg))
return args
def call(*popenargs, timeout=None, **kwargs):
"""Run command with arguments. Wait for command to complete or
timeout, then return the returncode attribute.
The arguments are the same as for the Popen constructor. Example:
retcode = call(["ls", "-l"])
"""
with Popen(*popenargs, **kwargs) as p:
try:
return p.wait(timeout=timeout)
except: # Including KeyboardInterrupt, wait handled that.
p.kill()
# We don't call p.wait() again as p.__exit__ does that for us.
raise
def check_call(*popenargs, **kwargs):
"""Run command with arguments. Wait for command to complete. If
the exit code was zero then return, otherwise raise
CalledProcessError. The CalledProcessError object will have the
return code in the returncode attribute.
The arguments are the same as for the call function. Example:
check_call(["ls", "-l"])
"""
retcode = call(*popenargs, **kwargs)
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd)
return 0
def check_output(*popenargs, timeout=None, **kwargs):
r"""Run command with arguments and return its output.
If the exit code was non-zero it raises a CalledProcessError. The
CalledProcessError object will have the return code in the returncode
attribute and output in the output attribute.
The arguments are the same as for the Popen constructor. Example:
>>> check_output(["ls", "-l", "/dev/null"])
b'crw-rw-rw- 1 root root 1, 3 Oct 18 2007 /dev/null\n'
The stdout argument is not allowed as it is used internally.
To capture standard error in the result, use stderr=STDOUT.
>>> check_output(["/bin/sh", "-c",
... "ls -l non_existent_file ; exit 0"],
... stderr=STDOUT)
b'ls: non_existent_file: No such file or directory\n'
There is an additional optional argument, "input", allowing you to
pass a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it too will be used internally. Example:
>>> check_output(["sed", "-e", "s/foo/bar/"],
... input=b"when in the course of fooman events\n")
b'when in the course of barman events\n'
By default, all communication is in bytes, and therefore any "input"
should be bytes, and the return value will be bytes. If in text mode,
any "input" should be a string, and the return value will be a string
decoded according to locale encoding, or by "encoding" if set. Text mode
is triggered by setting any of text, encoding, errors or universal_newlines.
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
if 'input' in kwargs and kwargs['input'] is None:
# Explicitly passing input=None was previously equivalent to passing an
# empty string. That is maintained here for backwards compatibility.
if kwargs.get('universal_newlines') or kwargs.get('text'):
empty = ''
else:
empty = b''
kwargs['input'] = empty
return run(*popenargs, stdout=PIPE, timeout=timeout, check=True,
**kwargs).stdout
class CompletedProcess(object):
"""A process that has finished running.
This is returned by run().
Attributes:
args: The list or str args passed to run().
returncode: The exit code of the process, negative for signals.
stdout: The standard output (None if not captured).
stderr: The standard error (None if not captured).
"""
def __init__(self, args, returncode, stdout=None, stderr=None):
self.args = args
self.returncode = returncode
self.stdout = stdout
self.stderr = stderr
def __repr__(self):
args = ['args={!r}'.format(self.args),
'returncode={!r}'.format(self.returncode)]
if self.stdout is not None:
args.append('stdout={!r}'.format(self.stdout))
if self.stderr is not None:
args.append('stderr={!r}'.format(self.stderr))
return "{}({})".format(type(self).__name__, ', '.join(args))
__class_getitem__ = classmethod(types.GenericAlias)
def check_returncode(self):
"""Raise CalledProcessError if the exit code is non-zero."""
if self.returncode:
raise CalledProcessError(self.returncode, self.args, self.stdout,
self.stderr)
def run(*popenargs,
input=None, capture_output=False, timeout=None, check=False, **kwargs):
"""Run command with arguments and return a CompletedProcess instance.
The returned instance will have attributes args, returncode, stdout and
stderr. By default, stdout and stderr are not captured, and those attributes
will be None. Pass stdout=PIPE and/or stderr=PIPE in order to capture them.
If check is True and the exit code was non-zero, it raises a
CalledProcessError. The CalledProcessError object will have the return code
in the returncode attribute, and output & stderr attributes if those streams
were captured.
If timeout is given, and the process takes too long, a TimeoutExpired
exception will be raised.
There is an optional argument "input", allowing you to
pass bytes or a string to the subprocess's stdin. If you use this argument
you may not also use the Popen constructor's "stdin" argument, as
it will be used internally.
By default, all communication is in bytes, and therefore any "input" should
be bytes, and the stdout and stderr will be bytes. If in text mode, any
"input" should be a string, and stdout and stderr will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode is
triggered by setting any of text, encoding, errors or universal_newlines.
The other arguments are the same as for the Popen constructor.
"""
if input is not None:
if kwargs.get('stdin') is not None:
raise ValueError('stdin and input arguments may not both be used.')
kwargs['stdin'] = PIPE
if capture_output:
if kwargs.get('stdout') is not None or kwargs.get('stderr') is not None:
raise ValueError('stdout and stderr arguments may not be used '
'with capture_output.')
kwargs['stdout'] = PIPE
kwargs['stderr'] = PIPE
with Popen(*popenargs, **kwargs) as process:
try:
stdout, stderr = process.communicate(input, timeout=timeout)
except TimeoutExpired as exc:
process.kill()
if _mswindows:
# Windows accumulates the output in a single blocking
# read() call run on child threads, with the timeout
# being done in a join() on those threads. communicate()
# _after_ kill() is required to collect that and add it
# to the exception.
exc.stdout, exc.stderr = process.communicate()
else:
# POSIX _communicate already populated the output so
# far into the TimeoutExpired exception.
process.wait()
raise
except: # Including KeyboardInterrupt, communicate handled that.
process.kill()
# We don't call process.wait() as .__exit__ does that for us.
raise
retcode = process.poll()
if check and retcode:
raise CalledProcessError(retcode, process.args,
output=stdout, stderr=stderr)
return CompletedProcess(process.args, retcode, stdout, stderr)
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
# or search http://msdn.microsoft.com for
# "Parsing C++ Command-Line Arguments"
result = []
needquote = False
for arg in map(os.fsdecode, seq):
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
# Various tools for executing commands and looking at their output and status.
#
def getstatusoutput(cmd):
"""Return (exitcode, output) of executing cmd in a shell.
Execute the string 'cmd' in a shell with 'check_output' and
return a 2-tuple (status, output). The locale encoding is used
to decode the output and process newlines.
A trailing newline is stripped from the output.
The exit status for the command can be interpreted
according to the rules for the function 'wait'. Example:
>>> import subprocess
>>> subprocess.getstatusoutput('ls /bin/ls')
(0, '/bin/ls')
>>> subprocess.getstatusoutput('cat /bin/junk')
(1, 'cat: /bin/junk: No such file or directory')
>>> subprocess.getstatusoutput('/bin/junk')
(127, 'sh: /bin/junk: not found')
>>> subprocess.getstatusoutput('/bin/kill $$')
(-15, '')
"""
try:
data = check_output(cmd, shell=True, text=True, stderr=STDOUT)
exitcode = 0
except CalledProcessError as ex:
data = ex.output
exitcode = ex.returncode
if data[-1:] == '\n':
data = data[:-1]
return exitcode, data
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell.
Like getstatusoutput(), except the exit status is ignored and the return
value is a string containing the command's output. Example:
>>> import subprocess
>>> subprocess.getoutput('ls /bin/ls')
'/bin/ls'
"""
return getstatusoutput(cmd)[1]
def _use_posix_spawn():
"""Check if posix_spawn() can be used for subprocess.
subprocess requires a posix_spawn() implementation that properly reports
errors to the parent process, & sets errno on the following failures:
* Process attribute actions failed.
* File actions failed.
* exec() failed.
Prefer an implementation which can use vfork() in some cases for best
performance.
"""
if _mswindows or not hasattr(os, 'posix_spawn'):
# os.posix_spawn() is not available
return False
if sys.platform == 'darwin':
# posix_spawn() is a syscall on macOS and properly reports errors
return True
# Check libc name and runtime libc version
try:
ver = os.confstr('CS_GNU_LIBC_VERSION')
# parse 'glibc 2.28' as ('glibc', (2, 28))
parts = ver.split(maxsplit=1)
if len(parts) != 2:
# reject unknown format
raise ValueError
libc = parts[0]
version = tuple(map(int, parts[1].split('.')))
if sys.platform == 'linux' and libc == 'glibc' and version >= (2, 24):
# glibc 2.24 has a new Linux posix_spawn implementation using vfork
# which properly reports errors to the parent process.
return True
# Note: Don't use the implementation in earlier glibc because it doesn't
# use vfork (even if glibc 2.26 added a pipe to properly report errors
# to the parent process).
except (AttributeError, ValueError, OSError):
# os.confstr() or CS_GNU_LIBC_VERSION value not available
pass
# By default, assume that posix_spawn() does not properly report errors.
return False
_USE_POSIX_SPAWN = _use_posix_spawn()
class Popen(object):
""" Execute a child program in a new process.
For a complete description of the arguments see the Python documentation.
Arguments:
args: A string, or a sequence of program arguments.
bufsize: supplied as the buffering argument to the open() function when
creating the stdin/stdout/stderr pipe file objects
executable: A replacement program to execute.
stdin, stdout and stderr: These specify the executed programs' standard
input, standard output and standard error file handles, respectively.
preexec_fn: (POSIX only) An object to be called in the child process
just before the child is executed.
close_fds: Controls closing or inheriting of file descriptors.
shell: If true, the command will be executed through the shell.
cwd: Sets the current directory before the child is executed.
env: Defines the environment variables for the new process.
text: If true, decode stdin, stdout and stderr using the given encoding
(if set) or the system default otherwise.
universal_newlines: Alias of text, provided for backwards compatibility.
startupinfo and creationflags (Windows only)
restore_signals (POSIX only)
start_new_session (POSIX only)
group (POSIX only)
extra_groups (POSIX only)
user (POSIX only)
umask (POSIX only)
pass_fds (POSIX only)
encoding and errors: Text mode encoding and error handling to use for
file objects stdin, stdout and stderr.
Attributes:
stdin, stdout, stderr, pid, returncode
"""
_child_created = False # Set here since __del__ checks it
def __init__(self, args, bufsize=-1, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=True,
shell=False, cwd=None, env=None, universal_newlines=None,
startupinfo=None, creationflags=0,
restore_signals=True, start_new_session=False,
pass_fds=(), *, user=None, group=None, extra_groups=None,
encoding=None, errors=None, text=None, umask=-1):
"""Create new Popen instance."""
_cleanup()
# Held while anything is calling waitpid before returncode has been
# updated to prevent clobbering returncode if wait() or poll() are
# called from multiple threads at once. After acquiring the lock,
# code must re-check self.returncode to see if another thread just
# finished a waitpid() call.
self._waitpid_lock = threading.Lock()
self._input = None
self._communication_started = False
if bufsize is None:
bufsize = -1 # Restore default
if not isinstance(bufsize, int):
raise TypeError("bufsize must be an integer")
if _mswindows:
if preexec_fn is not None:
raise ValueError("preexec_fn is not supported on Windows "
"platforms")
else:
# POSIX
if pass_fds and not close_fds:
warnings.warn("pass_fds overriding close_fds.", RuntimeWarning)
close_fds = True
if startupinfo is not None:
raise ValueError("startupinfo is only supported on Windows "
"platforms")
if creationflags != 0:
raise ValueError("creationflags is only supported on Windows "
"platforms")
self.args = args
self.stdin = None
self.stdout = None
self.stderr = None
self.pid = None
self.returncode = None
self.encoding = encoding
self.errors = errors
# Validate the combinations of text and universal_newlines
if (text is not None and universal_newlines is not None
and bool(universal_newlines) != bool(text)):
raise SubprocessError('Cannot disambiguate when both text '
'and universal_newlines are supplied but '
'different. Pass one or the other.')
# Input and output objects. The general principle is like
# this:
#
# Parent Child
# ------ -----
# p2cwrite ---stdin---> p2cread
# c2pread <--stdout--- c2pwrite
# errread <--stderr--- errwrite
#
# On POSIX, the child objects are file descriptors. On
# Windows, these are Windows file handles. The parent objects
# are file descriptors on both platforms. The parent objects
# are -1 when not using PIPEs. The child objects are -1
# when not redirecting.
(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite) = self._get_handles(stdin, stdout, stderr)
# We wrap OS handles *before* launching the child, otherwise a
# quickly terminating child could make our fds unwrappable
# (see #8458).
if _mswindows:
if p2cwrite != -1:
p2cwrite = msvcrt.open_osfhandle(p2cwrite.Detach(), 0)
if c2pread != -1:
c2pread = msvcrt.open_osfhandle(c2pread.Detach(), 0)
if errread != -1:
errread = msvcrt.open_osfhandle(errread.Detach(), 0)
self.text_mode = encoding or errors or text or universal_newlines
# How long to resume waiting on a child after the first ^C.
# There is no right value for this. The purpose is to be polite
# yet remain good for interactive users trying to exit a tool.
self._sigint_wait_secs = 0.25 # 1/xkcd221.getRandomNumber()
self._closed_child_pipe_fds = False
if self.text_mode:
if bufsize == 1:
line_buffering = True
# Use the default buffer size for the underlying binary streams
# since they don't support line buffering.
bufsize = -1
else:
line_buffering = False
gid = None
if group is not None:
if not hasattr(os, 'setregid'):
raise ValueError("The 'group' parameter is not supported on the "
"current platform")
elif isinstance(group, str):
if grp is None:
raise ValueError("The group parameter cannot be a string "
"on systems without the grp module")
gid = grp.getgrnam(group).gr_gid
elif isinstance(group, int):
gid = group
else:
raise TypeError("Group must be a string or an integer, not {}"
.format(type(group)))
if gid < 0:
raise ValueError(f"Group ID cannot be negative, got {gid}")
gids = None
if extra_groups is not None:
if not hasattr(os, 'setgroups'):
raise ValueError("The 'extra_groups' parameter is not "
"supported on the current platform")
elif isinstance(extra_groups, str):
raise ValueError("Groups must be a list, not a string")
gids = []
for extra_group in extra_groups:
if isinstance(extra_group, str):
if grp is None:
raise ValueError("Items in extra_groups cannot be "
"strings on systems without the "
"grp module")
gids.append(grp.getgrnam(extra_group).gr_gid)
elif isinstance(extra_group, int):
gids.append(extra_group)
else:
raise TypeError("Items in extra_groups must be a string "
"or integer, not {}"
.format(type(extra_group)))
# make sure that the gids are all positive here so we can do less
# checking in the C code
for gid_check in gids:
if gid_check < 0:
raise ValueError(f"Group ID cannot be negative, got {gid_check}")
uid = None
if user is not None:
if not hasattr(os, 'setreuid'):
raise ValueError("The 'user' parameter is not supported on "
"the current platform")
elif isinstance(user, str):
if pwd is None:
raise ValueError("The user parameter cannot be a string "
"on systems without the pwd module")
uid = pwd.getpwnam(user).pw_uid
elif isinstance(user, int):
uid = user
else:
raise TypeError("User must be a string or an integer")
if uid < 0:
raise ValueError(f"User ID cannot be negative, got {uid}")
try:
if p2cwrite != -1:
self.stdin = io.open(p2cwrite, 'wb', bufsize)
if self.text_mode:
self.stdin = io.TextIOWrapper(self.stdin, write_through=True,
line_buffering=line_buffering,
encoding=encoding, errors=errors)
if c2pread != -1:
self.stdout = io.open(c2pread, 'rb', bufsize)
if self.text_mode:
self.stdout = io.TextIOWrapper(self.stdout,
encoding=encoding, errors=errors)
if errread != -1:
self.stderr = io.open(errread, 'rb', bufsize)
if self.text_mode:
self.stderr = io.TextIOWrapper(self.stderr,
encoding=encoding, errors=errors)
self._execute_child(args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals,
gid, gids, uid, umask,
start_new_session)
except:
# Cleanup if the child failed starting.
for f in filter(None, (self.stdin, self.stdout, self.stderr)):
try:
f.close()
except OSError:
pass # Ignore EBADF or other errors.
if not self._closed_child_pipe_fds:
to_close = []
if stdin == PIPE:
to_close.append(p2cread)
if stdout == PIPE:
to_close.append(c2pwrite)
if stderr == PIPE:
to_close.append(errwrite)
if hasattr(self, '_devnull'):
to_close.append(self._devnull)
for fd in to_close:
try:
if _mswindows and isinstance(fd, Handle):
fd.Close()
else:
os.close(fd)
except OSError:
pass
raise
def __repr__(self):
obj_repr = (
f"<{self.__class__.__name__}: "
f"returncode: {self.returncode} args: {list(self.args)!r}>"
)
if len(obj_repr) > 80:
obj_repr = obj_repr[:76] + "...>"
return obj_repr
__class_getitem__ = classmethod(types.GenericAlias)
@property
def universal_newlines(self):
# universal_newlines as retained as an alias of text_mode for API
# compatibility. bpo-31756
return self.text_mode
@universal_newlines.setter
def universal_newlines(self, universal_newlines):
self.text_mode = bool(universal_newlines)
def _translate_newlines(self, data, encoding, errors):
data = data.decode(encoding, errors)
return data.replace("\r\n", "\n").replace("\r", "\n")
def __enter__(self):
return self
def __exit__(self, exc_type, value, traceback):
if self.stdout:
self.stdout.close()
if self.stderr:
self.stderr.close()
try: # Flushing a BufferedWriter may raise an error
if self.stdin:
self.stdin.close()
finally:
if exc_type == KeyboardInterrupt:
# https://bugs.python.org/issue25942
# In the case of a KeyboardInterrupt we assume the SIGINT
# was also already sent to our child processes. We can't
# block indefinitely as that is not user friendly.
# If we have not already waited a brief amount of time in
# an interrupted .wait() or .communicate() call, do so here
# for consistency.
if self._sigint_wait_secs > 0:
try:
self._wait(timeout=self._sigint_wait_secs)
except TimeoutExpired:
pass
self._sigint_wait_secs = 0 # Note that this has been done.
return # resume the KeyboardInterrupt
# Wait for the process to terminate, to avoid zombies.
self.wait()
def __del__(self, _maxsize=sys.maxsize, _warn=warnings.warn):
if not self._child_created:
# We didn't get to successfully create a child process.
return
if self.returncode is None:
# Not reading subprocess exit status creates a zombie process which
# is only destroyed at the parent python process exit
_warn("subprocess %s is still running" % self.pid,
ResourceWarning, source=self)
# In case the child hasn't been waited on, check if it's done.
self._internal_poll(_deadstate=_maxsize)
if self.returncode is None and _active is not None:
# Child is still running, keep us alive until we can wait on it.
_active.append(self)
def _get_devnull(self):
if not hasattr(self, '_devnull'):
self._devnull = os.open(os.devnull, os.O_RDWR)
return self._devnull
def _stdin_write(self, input):
if input:
try:
self.stdin.write(input)
except BrokenPipeError:
pass # communicate() must ignore broken pipe errors.
except OSError as exc:
if exc.errno == errno.EINVAL:
# bpo-19612, bpo-30418: On Windows, stdin.write() fails
# with EINVAL if the child process exited or if the child
# process is still running but closed the pipe.
pass
else:
raise
try:
self.stdin.close()
except BrokenPipeError:
pass # communicate() must ignore broken pipe errors.
except OSError as exc:
if exc.errno == errno.EINVAL:
pass
else:
raise
def communicate(self, input=None, timeout=None):
"""Interact with process: Send data to stdin and close it.
Read data from stdout and stderr, until end-of-file is
reached. Wait for process to terminate.
The optional "input" argument should be data to be sent to the
child process, or None, if no data should be sent to the child.
communicate() returns a tuple (stdout, stderr).
By default, all communication is in bytes, and therefore any
"input" should be bytes, and the (stdout, stderr) will be bytes.
If in text mode (indicated by self.text_mode), any "input" should
be a string, and (stdout, stderr) will be strings decoded
according to locale encoding, or by "encoding" if set. Text mode
is triggered by setting any of text, encoding, errors or
universal_newlines.
"""
if self._communication_started and input:
raise ValueError("Cannot send input after starting communication")
# Optimization: If we are not worried about timeouts, we haven't
# started communicating, and we have one or zero pipes, using select()
# or threads is unnecessary.
if (timeout is None and not self._communication_started and
[self.stdin, self.stdout, self.stderr].count(None) >= 2):
stdout = None
stderr = None
if self.stdin:
self._stdin_write(input)
elif self.stdout:
stdout = self.stdout.read()
self.stdout.close()
elif self.stderr:
stderr = self.stderr.read()
self.stderr.close()
self.wait()
else:
if timeout is not None:
endtime = _time() + timeout
else:
endtime = None
try:
stdout, stderr = self._communicate(input, endtime, timeout)
except KeyboardInterrupt:
# https://bugs.python.org/issue25942
# See the detailed comment in .wait().
if timeout is not None:
sigint_timeout = min(self._sigint_wait_secs,
self._remaining_time(endtime))
else:
sigint_timeout = self._sigint_wait_secs
self._sigint_wait_secs = 0 # nothing else should wait.
try:
self._wait(timeout=sigint_timeout)
except TimeoutExpired:
pass
raise # resume the KeyboardInterrupt
finally:
self._communication_started = True
sts = self.wait(timeout=self._remaining_time(endtime))
return (stdout, stderr)
def poll(self):
"""Check if child process has terminated. Set and return returncode
attribute."""
return self._internal_poll()
def _remaining_time(self, endtime):
"""Convenience for _communicate when computing timeouts."""
if endtime is None:
return None
else:
return endtime - _time()
def _check_timeout(self, endtime, orig_timeout, stdout_seq, stderr_seq,
skip_check_and_raise=False):
"""Convenience for checking if a timeout has expired."""
if endtime is None:
return
if skip_check_and_raise or _time() > endtime:
raise TimeoutExpired(
self.args, orig_timeout,
output=b''.join(stdout_seq) if stdout_seq else None,
stderr=b''.join(stderr_seq) if stderr_seq else None)
def wait(self, timeout=None):
"""Wait for child process to terminate; returns self.returncode."""
if timeout is not None:
endtime = _time() + timeout
try:
return self._wait(timeout=timeout)
except KeyboardInterrupt:
# https://bugs.python.org/issue25942
# The first keyboard interrupt waits briefly for the child to
# exit under the common assumption that it also received the ^C
# generated SIGINT and will exit rapidly.
if timeout is not None:
sigint_timeout = min(self._sigint_wait_secs,
self._remaining_time(endtime))
else:
sigint_timeout = self._sigint_wait_secs
self._sigint_wait_secs = 0 # nothing else should wait.
try:
self._wait(timeout=sigint_timeout)
except TimeoutExpired:
pass
raise # resume the KeyboardInterrupt
def _close_pipe_fds(self,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
# self._devnull is not always defined.
devnull_fd = getattr(self, '_devnull', None)
with contextlib.ExitStack() as stack:
if _mswindows:
if p2cread != -1:
stack.callback(p2cread.Close)
if c2pwrite != -1:
stack.callback(c2pwrite.Close)
if errwrite != -1:
stack.callback(errwrite.Close)
else:
if p2cread != -1 and p2cwrite != -1 and p2cread != devnull_fd:
stack.callback(os.close, p2cread)
if c2pwrite != -1 and c2pread != -1 and c2pwrite != devnull_fd:
stack.callback(os.close, c2pwrite)
if errwrite != -1 and errread != -1 and errwrite != devnull_fd:
stack.callback(os.close, errwrite)
if devnull_fd is not None:
stack.callback(os.close, devnull_fd)
# Prevent a double close of these handles/fds from __init__ on error.
self._closed_child_pipe_fds = True
if _mswindows:
#
# Windows methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
if stdin is None and stdout is None and stderr is None:
return (-1, -1, -1, -1, -1, -1)
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
p2cread = _winapi.GetStdHandle(_winapi.STD_INPUT_HANDLE)
if p2cread is None:
p2cread, _ = _winapi.CreatePipe(None, 0)
p2cread = Handle(p2cread)
_winapi.CloseHandle(_)
elif stdin == PIPE:
p2cread, p2cwrite = _winapi.CreatePipe(None, 0)
p2cread, p2cwrite = Handle(p2cread), Handle(p2cwrite)
elif stdin == DEVNULL:
p2cread = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdin, int):
p2cread = msvcrt.get_osfhandle(stdin)
else:
# Assuming file-like object
p2cread = msvcrt.get_osfhandle(stdin.fileno())
p2cread = self._make_inheritable(p2cread)
if stdout is None:
c2pwrite = _winapi.GetStdHandle(_winapi.STD_OUTPUT_HANDLE)
if c2pwrite is None:
_, c2pwrite = _winapi.CreatePipe(None, 0)
c2pwrite = Handle(c2pwrite)
_winapi.CloseHandle(_)
elif stdout == PIPE:
c2pread, c2pwrite = _winapi.CreatePipe(None, 0)
c2pread, c2pwrite = Handle(c2pread), Handle(c2pwrite)
elif stdout == DEVNULL:
c2pwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stdout, int):
c2pwrite = msvcrt.get_osfhandle(stdout)
else:
# Assuming file-like object
c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
c2pwrite = self._make_inheritable(c2pwrite)
if stderr is None:
errwrite = _winapi.GetStdHandle(_winapi.STD_ERROR_HANDLE)
if errwrite is None:
_, errwrite = _winapi.CreatePipe(None, 0)
errwrite = Handle(errwrite)
_winapi.CloseHandle(_)
elif stderr == PIPE:
errread, errwrite = _winapi.CreatePipe(None, 0)
errread, errwrite = Handle(errread), Handle(errwrite)
elif stderr == STDOUT:
errwrite = c2pwrite
elif stderr == DEVNULL:
errwrite = msvcrt.get_osfhandle(self._get_devnull())
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
# Assuming file-like object
errwrite = msvcrt.get_osfhandle(stderr.fileno())
errwrite = self._make_inheritable(errwrite)
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
h = _winapi.DuplicateHandle(
_winapi.GetCurrentProcess(), handle,
_winapi.GetCurrentProcess(), 0, 1,
_winapi.DUPLICATE_SAME_ACCESS)
return Handle(h)
def _filter_handle_list(self, handle_list):
"""Filter out console handles that can't be used
in lpAttributeList["handle_list"] and make sure the list
isn't empty. This also removes duplicate handles."""
# An handle with it's lowest two bits set might be a special console
# handle that if passed in lpAttributeList["handle_list"], will
# cause it to fail.
return list({handle for handle in handle_list
if handle & 0x3 != 0x3
or _winapi.GetFileType(handle) !=
_winapi.FILE_TYPE_CHAR})
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
unused_restore_signals,
unused_gid, unused_gids, unused_uid,
unused_umask,
unused_start_new_session):
"""Execute program (MS Windows version)"""
assert not pass_fds, "pass_fds not supported on Windows."
if isinstance(args, str):
pass
elif isinstance(args, bytes):
if shell:
raise TypeError('bytes args is not allowed on Windows')
args = list2cmdline([args])
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = list2cmdline([args])
else:
args = list2cmdline(args)
if executable is not None:
executable = os.fsdecode(executable)
# Process startup details
if startupinfo is None:
startupinfo = STARTUPINFO()
else:
# bpo-34044: Copy STARTUPINFO since it is modified above,
# so the caller can reuse it multiple times.
startupinfo = startupinfo.copy()
use_std_handles = -1 not in (p2cread, c2pwrite, errwrite)
if use_std_handles:
startupinfo.dwFlags |= _winapi.STARTF_USESTDHANDLES
startupinfo.hStdInput = p2cread
startupinfo.hStdOutput = c2pwrite
startupinfo.hStdError = errwrite
attribute_list = startupinfo.lpAttributeList
have_handle_list = bool(attribute_list and
"handle_list" in attribute_list and
attribute_list["handle_list"])
# If we were given an handle_list or need to create one
if have_handle_list or (use_std_handles and close_fds):
if attribute_list is None:
attribute_list = startupinfo.lpAttributeList = {}
handle_list = attribute_list["handle_list"] = \
list(attribute_list.get("handle_list", []))
if use_std_handles:
handle_list += [int(p2cread), int(c2pwrite), int(errwrite)]
handle_list[:] = self._filter_handle_list(handle_list)
if handle_list:
if not close_fds:
warnings.warn("startupinfo.lpAttributeList['handle_list'] "
"overriding close_fds", RuntimeWarning)
# When using the handle_list we always request to inherit
# handles but the only handles that will be inherited are
# the ones in the handle_list
close_fds = False
if shell:
startupinfo.dwFlags |= _winapi.STARTF_USESHOWWINDOW
startupinfo.wShowWindow = _winapi.SW_HIDE
comspec = os.environ.get("COMSPEC", "cmd.exe")
args = '{} /c "{}"'.format (comspec, args)
if cwd is not None:
cwd = os.fsdecode(cwd)
sys.audit("subprocess.Popen", executable, args, cwd, env)
# Start the process
try:
hp, ht, pid, tid = _winapi.CreateProcess(executable, args,
# no special security
None, None,
int(not close_fds),
creationflags,
env,
cwd,
startupinfo)
finally:
# Child is launched. Close the parent's copy of those pipe
# handles that only the child should have open. You need
# to make sure that no handles to the write end of the
# output pipe are maintained in this process or else the
# pipe will not close when the child process exits and the
# ReadFile will hang.
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
# Retain the process handle, but close the thread handle
self._child_created = True
self._handle = Handle(hp)
self.pid = pid
_winapi.CloseHandle(ht)
def _internal_poll(self, _deadstate=None,
_WaitForSingleObject=_winapi.WaitForSingleObject,
_WAIT_OBJECT_0=_winapi.WAIT_OBJECT_0,
_GetExitCodeProcess=_winapi.GetExitCodeProcess):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it can only refer to objects
in its local scope.
"""
if self.returncode is None:
if _WaitForSingleObject(self._handle, 0) == _WAIT_OBJECT_0:
self.returncode = _GetExitCodeProcess(self._handle)
return self.returncode
def _wait(self, timeout):
"""Internal implementation of wait() on Windows."""
if timeout is None:
timeout_millis = _winapi.INFINITE
else:
timeout_millis = int(timeout * 1000)
if self.returncode is None:
# API note: Returns immediately if timeout_millis == 0.
result = _winapi.WaitForSingleObject(self._handle,
timeout_millis)
if result == _winapi.WAIT_TIMEOUT:
raise TimeoutExpired(self.args, timeout)
self.returncode = _winapi.GetExitCodeProcess(self._handle)
return self.returncode
def _readerthread(self, fh, buffer):
buffer.append(fh.read())
fh.close()
def _communicate(self, input, endtime, orig_timeout):
# Start reader threads feeding into a list hanging off of this
# object, unless they've already been started.
if self.stdout and not hasattr(self, "_stdout_buff"):
self._stdout_buff = []
self.stdout_thread = \
threading.Thread(target=self._readerthread,
args=(self.stdout, self._stdout_buff))
self.stdout_thread.daemon = True
self.stdout_thread.start()
if self.stderr and not hasattr(self, "_stderr_buff"):
self._stderr_buff = []
self.stderr_thread = \
threading.Thread(target=self._readerthread,
args=(self.stderr, self._stderr_buff))
self.stderr_thread.daemon = True
self.stderr_thread.start()
if self.stdin:
self._stdin_write(input)
# Wait for the reader threads, or time out. If we time out, the
# threads remain reading and the fds left open in case the user
# calls communicate again.
if self.stdout is not None:
self.stdout_thread.join(self._remaining_time(endtime))
if self.stdout_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
if self.stderr is not None:
self.stderr_thread.join(self._remaining_time(endtime))
if self.stderr_thread.is_alive():
raise TimeoutExpired(self.args, orig_timeout)
# Collect the output from and close both pipes, now that we know
# both have been read successfully.
stdout = None
stderr = None
if self.stdout:
stdout = self._stdout_buff
self.stdout.close()
if self.stderr:
stderr = self._stderr_buff
self.stderr.close()
# All data exchanged. Translate lists into strings.
stdout = stdout[0] if stdout else None
stderr = stderr[0] if stderr else None
return (stdout, stderr)
def send_signal(self, sig):
"""Send a signal to the process."""
# Don't signal a process that we know has already died.
if self.returncode is not None:
return
if sig == signal.SIGTERM:
self.terminate()
elif sig == signal.CTRL_C_EVENT:
os.kill(self.pid, signal.CTRL_C_EVENT)
elif sig == signal.CTRL_BREAK_EVENT:
os.kill(self.pid, signal.CTRL_BREAK_EVENT)
else:
raise ValueError("Unsupported signal: {}".format(sig))
def terminate(self):
"""Terminates the process."""
# Don't terminate a process that we know has already died.
if self.returncode is not None:
return
try:
_winapi.TerminateProcess(self._handle, 1)
except PermissionError:
# ERROR_ACCESS_DENIED (winerror 5) is received when the
# process already died.
rc = _winapi.GetExitCodeProcess(self._handle)
if rc == _winapi.STILL_ACTIVE:
raise
self.returncode = rc
kill = terminate
else:
#
# POSIX methods
#
def _get_handles(self, stdin, stdout, stderr):
"""Construct and return tuple with IO objects:
p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
"""
p2cread, p2cwrite = -1, -1
c2pread, c2pwrite = -1, -1
errread, errwrite = -1, -1
if stdin is None:
pass
elif stdin == PIPE:
p2cread, p2cwrite = os.pipe()
elif stdin == DEVNULL:
p2cread = self._get_devnull()
elif isinstance(stdin, int):
p2cread = stdin
else:
# Assuming file-like object
p2cread = stdin.fileno()
if stdout is None:
pass
elif stdout == PIPE:
c2pread, c2pwrite = os.pipe()
elif stdout == DEVNULL:
c2pwrite = self._get_devnull()
elif isinstance(stdout, int):
c2pwrite = stdout
else:
# Assuming file-like object
c2pwrite = stdout.fileno()
if stderr is None:
pass
elif stderr == PIPE:
errread, errwrite = os.pipe()
elif stderr == STDOUT:
if c2pwrite != -1:
errwrite = c2pwrite
else: # child's stdout is not set, use parent's stdout
errwrite = sys.__stdout__.fileno()
elif stderr == DEVNULL:
errwrite = self._get_devnull()
elif isinstance(stderr, int):
errwrite = stderr
else:
# Assuming file-like object
errwrite = stderr.fileno()
return (p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _posix_spawn(self, args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite):
"""Execute program using os.posix_spawn()."""
if env is None:
env = os.environ
kwargs = {}
if restore_signals:
# See _Py_RestoreSignals() in Python/pylifecycle.c
sigset = []
for signame in ('SIGPIPE', 'SIGXFZ', 'SIGXFSZ'):
signum = getattr(signal, signame, None)
if signum is not None:
sigset.append(signum)
kwargs['setsigdef'] = sigset
file_actions = []
for fd in (p2cwrite, c2pread, errread):
if fd != -1:
file_actions.append((os.POSIX_SPAWN_CLOSE, fd))
for fd, fd2 in (
(p2cread, 0),
(c2pwrite, 1),
(errwrite, 2),
):
if fd != -1:
file_actions.append((os.POSIX_SPAWN_DUP2, fd, fd2))
if file_actions:
kwargs['file_actions'] = file_actions
self.pid = os.posix_spawn(executable, args, env, **kwargs)
self._child_created = True
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
def _execute_child(self, args, executable, preexec_fn, close_fds,
pass_fds, cwd, env,
startupinfo, creationflags, shell,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite,
restore_signals,
gid, gids, uid, umask,
start_new_session):
"""Execute program (POSIX version)"""
if isinstance(args, (str, bytes)):
args = [args]
elif isinstance(args, os.PathLike):
if shell:
raise TypeError('path-like args is not allowed when '
'shell is true')
args = [args]
else:
args = list(args)
if shell:
# On Android the default shell is at '/system/bin/sh'.
unix_shell = ('/system/bin/sh' if
hasattr(sys, 'getandroidapilevel') else '/bin/sh')
args = [unix_shell, "-c"] + args
if executable:
args[0] = executable
if executable is None:
executable = args[0]
sys.audit("subprocess.Popen", executable, args, cwd, env)
if (_USE_POSIX_SPAWN
and os.path.dirname(executable)
and preexec_fn is None
and not close_fds
and not pass_fds
and cwd is None
and (p2cread == -1 or p2cread > 2)
and (c2pwrite == -1 or c2pwrite > 2)
and (errwrite == -1 or errwrite > 2)
and not start_new_session
and gid is None
and gids is None
and uid is None
and umask < 0):
self._posix_spawn(args, executable, env, restore_signals,
p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
return
orig_executable = executable
# For transferring possible exec failure from child to parent.
# Data format: "exception name:hex errno:description"
# Pickle is not used; it is complex and involves memory allocation.
errpipe_read, errpipe_write = os.pipe()
# errpipe_write must not be in the standard io 0, 1, or 2 fd range.
low_fds_to_close = []
while errpipe_write < 3:
low_fds_to_close.append(errpipe_write)
errpipe_write = os.dup(errpipe_write)
for low_fd in low_fds_to_close:
os.close(low_fd)
try:
try:
# We must avoid complex work that could involve
# malloc or free in the child process to avoid
# potential deadlocks, thus we do all this here.
# and pass it to fork_exec()
if env is not None:
env_list = []
for k, v in env.items():
k = os.fsencode(k)
if b'=' in k:
raise ValueError("illegal environment variable name")
env_list.append(k + b'=' + os.fsencode(v))
else:
env_list = None # Use execv instead of execve.
executable = os.fsencode(executable)
if os.path.dirname(executable):
executable_list = (executable,)
else:
# This matches the behavior of os._execvpe().
executable_list = tuple(
os.path.join(os.fsencode(dir), executable)
for dir in os.get_exec_path(env))
fds_to_keep = set(pass_fds)
fds_to_keep.add(errpipe_write)
self.pid = _posixsubprocess.fork_exec(
args, executable_list,
close_fds, tuple(sorted(map(int, fds_to_keep))),
cwd, env_list,
p2cread, p2cwrite, c2pread, c2pwrite,
errread, errwrite,
errpipe_read, errpipe_write,
restore_signals, start_new_session,
gid, gids, uid, umask,
preexec_fn)
self._child_created = True
finally:
# be sure the FD is closed no matter what
os.close(errpipe_write)
self._close_pipe_fds(p2cread, p2cwrite,
c2pread, c2pwrite,
errread, errwrite)
# Wait for exec to fail or succeed; possibly raising an
# exception (limited in size)
errpipe_data = bytearray()
while True:
part = os.read(errpipe_read, 50000)
errpipe_data += part
if not part or len(errpipe_data) > 50000:
break
finally:
# be sure the FD is closed no matter what
os.close(errpipe_read)
if errpipe_data:
try:
pid, sts = os.waitpid(self.pid, 0)
if pid == self.pid:
self._handle_exitstatus(sts)
else:
self.returncode = sys.maxsize
except ChildProcessError:
pass
try:
exception_name, hex_errno, err_msg = (
errpipe_data.split(b':', 2))
# The encoding here should match the encoding
# written in by the subprocess implementations
# like _posixsubprocess
err_msg = err_msg.decode()
except ValueError:
exception_name = b'SubprocessError'
hex_errno = b'0'
err_msg = 'Bad exception data from child: {!r}'.format(
bytes(errpipe_data))
child_exception_type = getattr(
builtins, exception_name.decode('ascii'),
SubprocessError)
if issubclass(child_exception_type, OSError) and hex_errno:
errno_num = int(hex_errno, 16)
child_exec_never_called = (err_msg == "noexec")
if child_exec_never_called:
err_msg = ""
# The error must be from chdir(cwd).
err_filename = cwd
else:
err_filename = orig_executable
if errno_num != 0:
err_msg = os.strerror(errno_num)
raise child_exception_type(errno_num, err_msg, err_filename)
raise child_exception_type(err_msg)
def _handle_exitstatus(self, sts,
waitstatus_to_exitcode=os.waitstatus_to_exitcode,
_WIFSTOPPED=os.WIFSTOPPED,
_WSTOPSIG=os.WSTOPSIG):
"""All callers to this function MUST hold self._waitpid_lock."""
# This method is called (indirectly) by __del__, so it cannot
# refer to anything outside of its local scope.
if _WIFSTOPPED(sts):
self.returncode = -_WSTOPSIG(sts)
else:
self.returncode = waitstatus_to_exitcode(sts)
def _internal_poll(self, _deadstate=None, _waitpid=os.waitpid,
_WNOHANG=os.WNOHANG, _ECHILD=errno.ECHILD):
"""Check if child process has terminated. Returns returncode
attribute.
This method is called by __del__, so it cannot reference anything
outside of the local scope (nor can any methods it calls).
"""
if self.returncode is None:
if not self._waitpid_lock.acquire(False):
# Something else is busy calling waitpid. Don't allow two
# at once. We know nothing yet.
return None
try:
if self.returncode is not None:
return self.returncode # Another thread waited.
pid, sts = _waitpid(self.pid, _WNOHANG)
if pid == self.pid:
self._handle_exitstatus(sts)
except OSError as e:
if _deadstate is not None:
self.returncode = _deadstate
elif e.errno == _ECHILD:
# This happens if SIGCLD is set to be ignored or
# waiting for child processes has otherwise been
# disabled for our process. This child is dead, we
# can't get the status.
# http://bugs.python.org/issue15756
self.returncode = 0
finally:
self._waitpid_lock.release()
return self.returncode
def _try_wait(self, wait_flags):
"""All callers to this function MUST hold self._waitpid_lock."""
try:
(pid, sts) = os.waitpid(self.pid, wait_flags)
except ChildProcessError:
# This happens if SIGCLD is set to be ignored or waiting
# for child processes has otherwise been disabled for our
# process. This child is dead, we can't get the status.
pid = self.pid
sts = 0
return (pid, sts)
def _wait(self, timeout):
"""Internal implementation of wait() on POSIX."""
if self.returncode is not None:
return self.returncode
if timeout is not None:
endtime = _time() + timeout
# Enter a busy loop if we have a timeout. This busy loop was
# cribbed from Lib/threading.py in Thread.wait() at r71065.
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
if self._waitpid_lock.acquire(False):
try:
if self.returncode is not None:
break # Another thread waited.
(pid, sts) = self._try_wait(os.WNOHANG)
assert pid == self.pid or pid == 0
if pid == self.pid:
self._handle_exitstatus(sts)
break
finally:
self._waitpid_lock.release()
remaining = self._remaining_time(endtime)
if remaining <= 0:
raise TimeoutExpired(self.args, timeout)
delay = min(delay * 2, remaining, .05)
time.sleep(delay)
else:
while self.returncode is None:
with self._waitpid_lock:
if self.returncode is not None:
break # Another thread waited.
(pid, sts) = self._try_wait(0)
# Check the pid and loop as waitpid has been known to
# return 0 even without WNOHANG in odd situations.
# http://bugs.python.org/issue14396.
if pid == self.pid:
self._handle_exitstatus(sts)
return self.returncode
def _communicate(self, input, endtime, orig_timeout):
if self.stdin and not self._communication_started:
# Flush stdio buffer. This might block, if the user has
# been writing to .stdin in an uncontrolled fashion.
try:
self.stdin.flush()
except BrokenPipeError:
pass # communicate() must ignore BrokenPipeError.
if not input:
try:
self.stdin.close()
except BrokenPipeError:
pass # communicate() must ignore BrokenPipeError.
stdout = None
stderr = None
# Only create this mapping if we haven't already.
if not self._communication_started:
self._fileobj2output = {}
if self.stdout:
self._fileobj2output[self.stdout] = []
if self.stderr:
self._fileobj2output[self.stderr] = []
if self.stdout:
stdout = self._fileobj2output[self.stdout]
if self.stderr:
stderr = self._fileobj2output[self.stderr]
self._save_input(input)
if self._input:
input_view = memoryview(self._input)
with _PopenSelector() as selector:
if self.stdin and input:
selector.register(self.stdin, selectors.EVENT_WRITE)
if self.stdout and not self.stdout.closed:
selector.register(self.stdout, selectors.EVENT_READ)
if self.stderr and not self.stderr.closed:
selector.register(self.stderr, selectors.EVENT_READ)
while selector.get_map():
timeout = self._remaining_time(endtime)
if timeout is not None and timeout < 0:
self._check_timeout(endtime, orig_timeout,
stdout, stderr,
skip_check_and_raise=True)
raise RuntimeError( # Impossible :)
'_check_timeout(..., skip_check_and_raise=True) '
'failed to raise TimeoutExpired.')
ready = selector.select(timeout)
self._check_timeout(endtime, orig_timeout, stdout, stderr)
# XXX Rewrite these to use non-blocking I/O on the file
# objects; they are no longer using C stdio!
for key, events in ready:
if key.fileobj is self.stdin:
chunk = input_view[self._input_offset :
self._input_offset + _PIPE_BUF]
try:
self._input_offset += os.write(key.fd, chunk)
except BrokenPipeError:
selector.unregister(key.fileobj)
key.fileobj.close()
else:
if self._input_offset >= len(self._input):
selector.unregister(key.fileobj)
key.fileobj.close()
elif key.fileobj in (self.stdout, self.stderr):
data = os.read(key.fd, 32768)
if not data:
selector.unregister(key.fileobj)
key.fileobj.close()
self._fileobj2output[key.fileobj].append(data)
self.wait(timeout=self._remaining_time(endtime))
# All data exchanged. Translate lists into strings.
if stdout is not None:
stdout = b''.join(stdout)
if stderr is not None:
stderr = b''.join(stderr)
# Translate newlines, if requested.
# This also turns bytes into strings.
if self.text_mode:
if stdout is not None:
stdout = self._translate_newlines(stdout,
self.stdout.encoding,
self.stdout.errors)
if stderr is not None:
stderr = self._translate_newlines(stderr,
self.stderr.encoding,
self.stderr.errors)
return (stdout, stderr)
def _save_input(self, input):
# This method is called from the _communicate_with_*() methods
# so that if we time out while communicating, we can continue
# sending input if we retry.
if self.stdin and self._input is None:
self._input_offset = 0
self._input = input
if input is not None and self.text_mode:
self._input = self._input.encode(self.stdin.encoding,
self.stdin.errors)
def send_signal(self, sig):
"""Send a signal to the process."""
# bpo-38630: Polling reduces the risk of sending a signal to the
# wrong process if the process completed, the Popen.returncode
# attribute is still None, and the pid has been reassigned
# (recycled) to a new different process. This race condition can
# happens in two cases.
#
# Case 1. Thread A calls Popen.poll(), thread B calls
# Popen.send_signal(). In thread A, waitpid() succeed and returns
# the exit status. Thread B calls kill() because poll() in thread A
# did not set returncode yet. Calling poll() in thread B prevents
# the race condition thanks to Popen._waitpid_lock.
#
# Case 2. waitpid(pid, 0) has been called directly, without
# using Popen methods: returncode is still None is this case.
# Calling Popen.poll() will set returncode to a default value,
# since waitpid() fails with ProcessLookupError.
self.poll()
if self.returncode is not None:
# Skip signalling a process that we know has already died.
return
# The race condition can still happen if the race condition
# described above happens between the returncode test
# and the kill() call.
try:
os.kill(self.pid, sig)
except ProcessLookupError:
# Supress the race condition error; bpo-40550.
pass
def terminate(self):
"""Terminate the process with SIGTERM
"""
self.send_signal(signal.SIGTERM)
def kill(self):
"""Kill the process with SIGKILL
"""
self.send_signal(signal.SIGKILL)
|
#!/usr/bin/env python3
"""
Rules for building C/API module with f2py2e.
Here is a skeleton of a new wrapper function (13Dec2001):
wrapper_function(args)
declarations
get_python_arguments, say, `a' and `b'
get_a_from_python
if (successful) {
get_b_from_python
if (successful) {
callfortran
if (successful) {
put_a_to_python
if (successful) {
put_b_to_python
if (successful) {
buildvalue = ...
}
}
}
}
cleanup_b
}
cleanup_a
return buildvalue
Copyright 1999,2000 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the NumPy License.
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Date: 2005/08/30 08:58:42 $
Pearu Peterson
"""
__version__ = "$Revision: 1.129 $"[10:-1]
from . import __version__
f2py_version = __version__.version
from .. import version as _numpy_version
numpy_version = _numpy_version.version
import os
import time
import copy
from .auxfuncs import (
applyrules, debugcapi, dictappend, errmess, gentitle, getargs2,
hascallstatement, hasexternals, hasinitvalue, hasnote, hasresultnote,
isarray, isarrayofstrings, iscomplex, iscomplexarray,
iscomplexfunction, iscomplexfunction_warn, isdummyroutine, isexternal,
isfunction, isfunction_wrap, isint1array, isintent_aux, isintent_c,
isintent_callback, isintent_copy, isintent_hide, isintent_inout,
isintent_nothide, isintent_out, isintent_overwrite, islogical,
islong_complex, islong_double, islong_doublefunction, islong_long,
islong_longfunction, ismoduleroutine, isoptional, isrequired, isscalar,
issigned_long_longarray, isstring, isstringarray, isstringfunction,
issubroutine, issubroutine_wrap, isthreadsafe, isunsigned,
isunsigned_char, isunsigned_chararray, isunsigned_long_long,
isunsigned_long_longarray, isunsigned_short, isunsigned_shortarray,
l_and, l_not, l_or, outmess, replace, stripcomma, requiresf90wrapper
)
from . import capi_maps
from . import cfuncs
from . import common_rules
from . import use_rules
from . import f90mod_rules
from . import func2subr
options = {}
sepdict = {}
#for k in ['need_cfuncs']: sepdict[k]=','
for k in ['decl',
'frompyobj',
'cleanupfrompyobj',
'topyarr', 'method',
'pyobjfrom', 'closepyobjfrom',
'freemem',
'userincludes',
'includes0', 'includes', 'typedefs', 'typedefs_generated',
'cppmacros', 'cfuncs', 'callbacks',
'latexdoc',
'restdoc',
'routine_defs', 'externroutines',
'initf2pywraphooks',
'commonhooks', 'initcommonhooks',
'f90modhooks', 'initf90modhooks']:
sepdict[k] = '\n'
#################### Rules for C/API module #################
generationtime = int(os.environ.get('SOURCE_DATE_EPOCH', time.time()))
module_rules = {
'modulebody': """\
/* File: #modulename#module.c
* This file is auto-generated with f2py (version:#f2py_version#).
* f2py is a Fortran to Python Interface Generator (FPIG), Second Edition,
* written by Pearu Peterson <pearu@cens.ioc.ee>.
* Generation date: """ + time.asctime(time.gmtime(generationtime)) + """
* Do not edit this file directly unless you know what you are doing!!!
*/
#ifdef __cplusplus
extern \"C\" {
#endif
""" + gentitle("See f2py2e/cfuncs.py: includes") + """
#includes#
#includes0#
""" + gentitle("See f2py2e/rules.py: mod_rules['modulebody']") + """
static PyObject *#modulename#_error;
static PyObject *#modulename#_module;
""" + gentitle("See f2py2e/cfuncs.py: typedefs") + """
#typedefs#
""" + gentitle("See f2py2e/cfuncs.py: typedefs_generated") + """
#typedefs_generated#
""" + gentitle("See f2py2e/cfuncs.py: cppmacros") + """
#cppmacros#
""" + gentitle("See f2py2e/cfuncs.py: cfuncs") + """
#cfuncs#
""" + gentitle("See f2py2e/cfuncs.py: userincludes") + """
#userincludes#
""" + gentitle("See f2py2e/capi_rules.py: usercode") + """
#usercode#
/* See f2py2e/rules.py */
#externroutines#
""" + gentitle("See f2py2e/capi_rules.py: usercode1") + """
#usercode1#
""" + gentitle("See f2py2e/cb_rules.py: buildcallback") + """
#callbacks#
""" + gentitle("See f2py2e/rules.py: buildapi") + """
#body#
""" + gentitle("See f2py2e/f90mod_rules.py: buildhooks") + """
#f90modhooks#
""" + gentitle("See f2py2e/rules.py: module_rules['modulebody']") + """
""" + gentitle("See f2py2e/common_rules.py: buildhooks") + """
#commonhooks#
""" + gentitle("See f2py2e/rules.py") + """
static FortranDataDef f2py_routine_defs[] = {
#routine_defs#
\t{NULL}
};
static PyMethodDef f2py_module_methods[] = {
#pymethoddef#
\t{NULL,NULL}
};
static struct PyModuleDef moduledef = {
\tPyModuleDef_HEAD_INIT,
\t"#modulename#",
\tNULL,
\t-1,
\tf2py_module_methods,
\tNULL,
\tNULL,
\tNULL,
\tNULL
};
PyMODINIT_FUNC PyInit_#modulename#(void) {
\tint i;
\tPyObject *m,*d, *s, *tmp;
\tm = #modulename#_module = PyModule_Create(&moduledef);
\tPy_SET_TYPE(&PyFortran_Type, &PyType_Type);
\timport_array();
\tif (PyErr_Occurred())
\t\t{PyErr_SetString(PyExc_ImportError, \"can't initialize module #modulename# (failed to import numpy)\"); return m;}
\td = PyModule_GetDict(m);
\ts = PyUnicode_FromString(\"$R""" + """evision: $\");
\tPyDict_SetItemString(d, \"__version__\", s);
\tPy_DECREF(s);
\ts = PyUnicode_FromString(
\t\t\"This module '#modulename#' is auto-generated with f2py (version:#f2py_version#).\\nFunctions:\\n\"\n#docs#\".\");
\tPyDict_SetItemString(d, \"__doc__\", s);
\tPy_DECREF(s);
\ts = PyUnicode_FromString(\"""" + numpy_version + """\");
\tPyDict_SetItemString(d, \"__f2py_numpy_version__\", s);
\tPy_DECREF(s);
\t#modulename#_error = PyErr_NewException (\"#modulename#.error\", NULL, NULL);
\t/*
\t * Store the error object inside the dict, so that it could get deallocated.
\t * (in practice, this is a module, so it likely will not and cannot.)
\t */
\tPyDict_SetItemString(d, \"_#modulename#_error\", #modulename#_error);
\tPy_DECREF(#modulename#_error);
\tfor(i=0;f2py_routine_defs[i].name!=NULL;i++) {
\t\ttmp = PyFortranObject_NewAsAttr(&f2py_routine_defs[i]);
\t\tPyDict_SetItemString(d, f2py_routine_defs[i].name, tmp);
\t\tPy_DECREF(tmp);
\t}
#initf2pywraphooks#
#initf90modhooks#
#initcommonhooks#
#interface_usercode#
#ifdef F2PY_REPORT_ATEXIT
\tif (! PyErr_Occurred())
\t\ton_exit(f2py_report_on_exit,(void*)\"#modulename#\");
#endif
\treturn m;
}
#ifdef __cplusplus
}
#endif
""",
'separatorsfor': {'latexdoc': '\n\n',
'restdoc': '\n\n'},
'latexdoc': ['\\section{Module \\texttt{#texmodulename#}}\n',
'#modnote#\n',
'#latexdoc#'],
'restdoc': ['Module #modulename#\n' + '=' * 80,
'\n#restdoc#']
}
defmod_rules = [
{'body': '/*eof body*/',
'method': '/*eof method*/',
'externroutines': '/*eof externroutines*/',
'routine_defs': '/*eof routine_defs*/',
'initf90modhooks': '/*eof initf90modhooks*/',
'initf2pywraphooks': '/*eof initf2pywraphooks*/',
'initcommonhooks': '/*eof initcommonhooks*/',
'latexdoc': '',
'restdoc': '',
'modnote': {hasnote: '#note#', l_not(hasnote): ''},
}
]
routine_rules = {
'separatorsfor': sepdict,
'body': """
#begintitle#
static char doc_#apiname#[] = \"\\\n#docreturn##name#(#docsignatureshort#)\\n\\nWrapper for ``#name#``.\\\n\\n#docstrsigns#\";
/* #declfortranroutine# */
static PyObject *#apiname#(const PyObject *capi_self,
PyObject *capi_args,
PyObject *capi_keywds,
#functype# (*f2py_func)(#callprotoargument#)) {
PyObject * volatile capi_buildvalue = NULL;
volatile int f2py_success = 1;
#decl#
static char *capi_kwlist[] = {#kwlist##kwlistopt##kwlistxa#NULL};
#usercode#
#routdebugenter#
#ifdef F2PY_REPORT_ATEXIT
f2py_start_clock();
#endif
if (!PyArg_ParseTupleAndKeywords(capi_args,capi_keywds,\\
\"#argformat#|#keyformat##xaformat#:#pyname#\",\\
capi_kwlist#args_capi##keys_capi##keys_xa#))\n return NULL;
#frompyobj#
/*end of frompyobj*/
#ifdef F2PY_REPORT_ATEXIT
f2py_start_call_clock();
#endif
#callfortranroutine#
if (PyErr_Occurred())
f2py_success = 0;
#ifdef F2PY_REPORT_ATEXIT
f2py_stop_call_clock();
#endif
/*end of callfortranroutine*/
if (f2py_success) {
#pyobjfrom#
/*end of pyobjfrom*/
CFUNCSMESS(\"Building return value.\\n\");
capi_buildvalue = Py_BuildValue(\"#returnformat#\"#return#);
/*closepyobjfrom*/
#closepyobjfrom#
} /*if (f2py_success) after callfortranroutine*/
/*cleanupfrompyobj*/
#cleanupfrompyobj#
if (capi_buildvalue == NULL) {
#routdebugfailure#
} else {
#routdebugleave#
}
CFUNCSMESS(\"Freeing memory.\\n\");
#freemem#
#ifdef F2PY_REPORT_ATEXIT
f2py_stop_clock();
#endif
return capi_buildvalue;
}
#endtitle#
""",
'routine_defs': '#routine_def#',
'initf2pywraphooks': '#initf2pywraphook#',
'externroutines': '#declfortranroutine#',
'doc': '#docreturn##name#(#docsignature#)',
'docshort': '#docreturn##name#(#docsignatureshort#)',
'docs': '"\t#docreturn##name#(#docsignature#)\\n"\n',
'need': ['arrayobject.h', 'CFUNCSMESS', 'MINMAX'],
'cppmacros': {debugcapi: '#define DEBUGCFUNCS'},
'latexdoc': ['\\subsection{Wrapper function \\texttt{#texname#}}\n',
"""
\\noindent{{}\\verb@#docreturn##name#@{}}\\texttt{(#latexdocsignatureshort#)}
#routnote#
#latexdocstrsigns#
"""],
'restdoc': ['Wrapped function ``#name#``\n' + '-' * 80,
]
}
################## Rules for C/API function ##############
rout_rules = [
{ # Init
'separatorsfor': {'callfortranroutine': '\n', 'routdebugenter': '\n', 'decl': '\n',
'routdebugleave': '\n', 'routdebugfailure': '\n',
'setjmpbuf': ' || ',
'docstrreq': '\n', 'docstropt': '\n', 'docstrout': '\n',
'docstrcbs': '\n', 'docstrsigns': '\\n"\n"',
'latexdocstrsigns': '\n',
'latexdocstrreq': '\n', 'latexdocstropt': '\n',
'latexdocstrout': '\n', 'latexdocstrcbs': '\n',
},
'kwlist': '', 'kwlistopt': '', 'callfortran': '', 'callfortranappend': '',
'docsign': '', 'docsignopt': '', 'decl': '/*decl*/',
'freemem': '/*freemem*/',
'docsignshort': '', 'docsignoptshort': '',
'docstrsigns': '', 'latexdocstrsigns': '',
'docstrreq': '\\nParameters\\n----------',
'docstropt': '\\nOther Parameters\\n----------------',
'docstrout': '\\nReturns\\n-------',
'docstrcbs': '\\nNotes\\n-----\\nCall-back functions::\\n',
'latexdocstrreq': '\\noindent Required arguments:',
'latexdocstropt': '\\noindent Optional arguments:',
'latexdocstrout': '\\noindent Return objects:',
'latexdocstrcbs': '\\noindent Call-back functions:',
'args_capi': '', 'keys_capi': '', 'functype': '',
'frompyobj': '/*frompyobj*/',
# this list will be reversed
'cleanupfrompyobj': ['/*end of cleanupfrompyobj*/'],
'pyobjfrom': '/*pyobjfrom*/',
# this list will be reversed
'closepyobjfrom': ['/*end of closepyobjfrom*/'],
'topyarr': '/*topyarr*/', 'routdebugleave': '/*routdebugleave*/',
'routdebugenter': '/*routdebugenter*/',
'routdebugfailure': '/*routdebugfailure*/',
'callfortranroutine': '/*callfortranroutine*/',
'argformat': '', 'keyformat': '', 'need_cfuncs': '',
'docreturn': '', 'return': '', 'returnformat': '', 'rformat': '',
'kwlistxa': '', 'keys_xa': '', 'xaformat': '', 'docsignxa': '', 'docsignxashort': '',
'initf2pywraphook': '',
'routnote': {hasnote: '--- #note#', l_not(hasnote): ''},
}, {
'apiname': 'f2py_rout_#modulename#_#name#',
'pyname': '#modulename#.#name#',
'decl': '',
'_check': l_not(ismoduleroutine)
}, {
'apiname': 'f2py_rout_#modulename#_#f90modulename#_#name#',
'pyname': '#modulename#.#f90modulename#.#name#',
'decl': '',
'_check': ismoduleroutine
}, { # Subroutine
'functype': 'void',
'declfortranroutine': {l_and(l_not(l_or(ismoduleroutine, isintent_c)), l_not(isdummyroutine)): 'extern void #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
l_and(l_not(ismoduleroutine), isintent_c, l_not(isdummyroutine)): 'extern void #fortranname#(#callprotoargument#);',
ismoduleroutine: '',
isdummyroutine: ''
},
'routine_def': {l_not(l_or(ismoduleroutine, isintent_c, isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine), isintent_c, l_not(isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine), isdummyroutine): '\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'need': {l_and(l_not(l_or(ismoduleroutine, isintent_c)), l_not(isdummyroutine)): 'F_FUNC'},
'callfortranroutine': [
{debugcapi: [
"""\tfprintf(stderr,\"debug-capi:Fortran subroutine `#fortranname#(#callfortran#)\'\\n\");"""]},
{hasexternals: """\
\t\tif (#setjmpbuf#) {
\t\t\tf2py_success = 0;
\t\t} else {"""},
{isthreadsafe: '\t\t\tPy_BEGIN_ALLOW_THREADS'},
{hascallstatement: '''\t\t\t\t#callstatement#;
\t\t\t\t/*(*f2py_func)(#callfortran#);*/'''},
{l_not(l_or(hascallstatement, isdummyroutine))
: '\t\t\t\t(*f2py_func)(#callfortran#);'},
{isthreadsafe: '\t\t\tPy_END_ALLOW_THREADS'},
{hasexternals: """\t\t}"""}
],
'_check': l_and(issubroutine, l_not(issubroutine_wrap)),
}, { # Wrapped function
'functype': 'void',
'declfortranroutine': {l_not(l_or(ismoduleroutine, isdummyroutine)): 'extern void #F_WRAPPEDFUNC#(#name_lower#,#NAME#)(#callprotoargument#);',
isdummyroutine: '',
},
'routine_def': {l_not(l_or(ismoduleroutine, isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#F_WRAPPEDFUNC#(#name_lower#,#NAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine: '\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'initf2pywraphook': {l_not(l_or(ismoduleroutine, isdummyroutine)): '''
{
extern #ctype# #F_FUNC#(#name_lower#,#NAME#)(void);
PyObject* o = PyDict_GetItemString(d,"#name#");
tmp = F2PyCapsule_FromVoidPtr((void*)#F_FUNC#(#name_lower#,#NAME#),NULL);
PyObject_SetAttrString(o,"_cpointer", tmp);
Py_DECREF(tmp);
s = PyUnicode_FromString("#name#");
PyObject_SetAttrString(o,"__name__", s);
Py_DECREF(s);
}
'''},
'need': {l_not(l_or(ismoduleroutine, isdummyroutine)): ['F_WRAPPEDFUNC', 'F_FUNC']},
'callfortranroutine': [
{debugcapi: [
"""\tfprintf(stderr,\"debug-capi:Fortran subroutine `f2pywrap#name_lower#(#callfortran#)\'\\n\");"""]},
{hasexternals: """\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe: '\tPy_BEGIN_ALLOW_THREADS'},
{l_not(l_or(hascallstatement, isdummyroutine))
: '\t(*f2py_func)(#callfortran#);'},
{hascallstatement:
'\t#callstatement#;\n\t/*(*f2py_func)(#callfortran#);*/'},
{isthreadsafe: '\tPy_END_ALLOW_THREADS'},
{hasexternals: '\t}'}
],
'_check': isfunction_wrap,
}, { # Wrapped subroutine
'functype': 'void',
'declfortranroutine': {l_not(l_or(ismoduleroutine, isdummyroutine)): 'extern void #F_WRAPPEDFUNC#(#name_lower#,#NAME#)(#callprotoargument#);',
isdummyroutine: '',
},
'routine_def': {l_not(l_or(ismoduleroutine, isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#F_WRAPPEDFUNC#(#name_lower#,#NAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine: '\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'initf2pywraphook': {l_not(l_or(ismoduleroutine, isdummyroutine)): '''
{
extern void #F_FUNC#(#name_lower#,#NAME#)(void);
PyObject* o = PyDict_GetItemString(d,"#name#");
tmp = F2PyCapsule_FromVoidPtr((void*)#F_FUNC#(#name_lower#,#NAME#),NULL);
PyObject_SetAttrString(o,"_cpointer", tmp);
Py_DECREF(tmp);
s = PyUnicode_FromString("#name#");
PyObject_SetAttrString(o,"__name__", s);
Py_DECREF(s);
}
'''},
'need': {l_not(l_or(ismoduleroutine, isdummyroutine)): ['F_WRAPPEDFUNC', 'F_FUNC']},
'callfortranroutine': [
{debugcapi: [
"""\tfprintf(stderr,\"debug-capi:Fortran subroutine `f2pywrap#name_lower#(#callfortran#)\'\\n\");"""]},
{hasexternals: """\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe: '\tPy_BEGIN_ALLOW_THREADS'},
{l_not(l_or(hascallstatement, isdummyroutine))
: '\t(*f2py_func)(#callfortran#);'},
{hascallstatement:
'\t#callstatement#;\n\t/*(*f2py_func)(#callfortran#);*/'},
{isthreadsafe: '\tPy_END_ALLOW_THREADS'},
{hasexternals: '\t}'}
],
'_check': issubroutine_wrap,
}, { # Function
'functype': '#ctype#',
'docreturn': {l_not(isintent_hide): '#rname#,'},
'docstrout': '#pydocsignout#',
'latexdocstrout': ['\\item[]{{}\\verb@#pydocsignout#@{}}',
{hasresultnote: '--- #resultnote#'}],
'callfortranroutine': [{l_and(debugcapi, isstringfunction): """\
#ifdef USESCOMPAQFORTRAN
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callcompaqfortran#)\\n\");
#else
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callfortran#)\\n\");
#endif
"""},
{l_and(debugcapi, l_not(isstringfunction)): """\
\tfprintf(stderr,\"debug-capi:Fortran function #ctype# #fortranname#(#callfortran#)\\n\");
"""}
],
'_check': l_and(isfunction, l_not(isfunction_wrap))
}, { # Scalar function
'declfortranroutine': {l_and(l_not(l_or(ismoduleroutine, isintent_c)), l_not(isdummyroutine)): 'extern #ctype# #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
l_and(l_not(ismoduleroutine), isintent_c, l_not(isdummyroutine)): 'extern #ctype# #fortranname#(#callprotoargument#);',
isdummyroutine: ''
},
'routine_def': {l_and(l_not(l_or(ismoduleroutine, isintent_c)), l_not(isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine), isintent_c, l_not(isdummyroutine)): '\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},',
isdummyroutine: '\t{\"#name#\",-1,{{-1}},0,NULL,(f2py_init_func)#apiname#,doc_#apiname#},',
},
'decl': [{iscomplexfunction_warn: '\t#ctype# #name#_return_value={0,0};',
l_not(iscomplexfunction): '\t#ctype# #name#_return_value=0;'},
{iscomplexfunction:
'\tPyObject *#name#_return_value_capi = Py_None;'}
],
'callfortranroutine': [
{hasexternals: """\
\tif (#setjmpbuf#) {
\t\tf2py_success = 0;
\t} else {"""},
{isthreadsafe: '\tPy_BEGIN_ALLOW_THREADS'},
{hascallstatement: '''\t#callstatement#;
/*\t#name#_return_value = (*f2py_func)(#callfortran#);*/
'''},
{l_not(l_or(hascallstatement, isdummyroutine))
: '\t#name#_return_value = (*f2py_func)(#callfortran#);'},
{isthreadsafe: '\tPy_END_ALLOW_THREADS'},
{hasexternals: '\t}'},
{l_and(debugcapi, iscomplexfunction)
: '\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value.r,#name#_return_value.i);'},
{l_and(debugcapi, l_not(iscomplexfunction)): '\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value);'}],
'pyobjfrom': {iscomplexfunction: '\t#name#_return_value_capi = pyobj_from_#ctype#1(#name#_return_value);'},
'need': [{l_not(isdummyroutine): 'F_FUNC'},
{iscomplexfunction: 'pyobj_from_#ctype#1'},
{islong_longfunction: 'long_long'},
{islong_doublefunction: 'long_double'}],
'returnformat': {l_not(isintent_hide): '#rformat#'},
'return': {iscomplexfunction: ',#name#_return_value_capi',
l_not(l_or(iscomplexfunction, isintent_hide)): ',#name#_return_value'},
'_check': l_and(isfunction, l_not(isstringfunction), l_not(isfunction_wrap))
}, { # String function # in use for --no-wrap
'declfortranroutine': 'extern void #F_FUNC#(#fortranname#,#FORTRANNAME#)(#callprotoargument#);',
'routine_def': {l_not(l_or(ismoduleroutine, isintent_c)):
'\t{\"#name#\",-1,{{-1}},0,(char *)#F_FUNC#(#fortranname#,#FORTRANNAME#),(f2py_init_func)#apiname#,doc_#apiname#},',
l_and(l_not(ismoduleroutine), isintent_c):
'\t{\"#name#\",-1,{{-1}},0,(char *)#fortranname#,(f2py_init_func)#apiname#,doc_#apiname#},'
},
'decl': ['\t#ctype# #name#_return_value = NULL;',
'\tint #name#_return_value_len = 0;'],
'callfortran':'#name#_return_value,#name#_return_value_len,',
'callfortranroutine':['\t#name#_return_value_len = #rlength#;',
'\tif ((#name#_return_value = (string)malloc(sizeof(char)*(#name#_return_value_len+1))) == NULL) {',
'\t\tPyErr_SetString(PyExc_MemoryError, \"out of memory\");',
'\t\tf2py_success = 0;',
'\t} else {',
"\t\t(#name#_return_value)[#name#_return_value_len] = '\\0';",
'\t}',
'\tif (f2py_success) {',
{hasexternals: """\
\t\tif (#setjmpbuf#) {
\t\t\tf2py_success = 0;
\t\t} else {"""},
{isthreadsafe: '\t\tPy_BEGIN_ALLOW_THREADS'},
"""\
#ifdef USESCOMPAQFORTRAN
\t\t(*f2py_func)(#callcompaqfortran#);
#else
\t\t(*f2py_func)(#callfortran#);
#endif
""",
{isthreadsafe: '\t\tPy_END_ALLOW_THREADS'},
{hasexternals: '\t\t}'},
{debugcapi:
'\t\tfprintf(stderr,"#routdebugshowvalue#\\n",#name#_return_value_len,#name#_return_value);'},
'\t} /* if (f2py_success) after (string)malloc */',
],
'returnformat': '#rformat#',
'return': ',#name#_return_value',
'freemem': '\tSTRINGFREE(#name#_return_value);',
'need': ['F_FUNC', '#ctype#', 'STRINGFREE'],
'_check':l_and(isstringfunction, l_not(isfunction_wrap)) # ???obsolete
},
{ # Debugging
'routdebugenter': '\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#(#docsignature#)\\n");',
'routdebugleave': '\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#: successful.\\n");',
'routdebugfailure': '\tfprintf(stderr,"debug-capi:Python C/API function #modulename#.#name#: failure.\\n");',
'_check': debugcapi
}
]
################ Rules for arguments ##################
typedef_need_dict = {islong_long: 'long_long',
islong_double: 'long_double',
islong_complex: 'complex_long_double',
isunsigned_char: 'unsigned_char',
isunsigned_short: 'unsigned_short',
isunsigned: 'unsigned',
isunsigned_long_long: 'unsigned_long_long',
isunsigned_chararray: 'unsigned_char',
isunsigned_shortarray: 'unsigned_short',
isunsigned_long_longarray: 'unsigned_long_long',
issigned_long_longarray: 'long_long',
}
aux_rules = [
{
'separatorsfor': sepdict
},
{ # Common
'frompyobj': ['\t/* Processing auxiliary variable #varname# */',
{debugcapi: '\tfprintf(stderr,"#vardebuginfo#\\n");'}, ],
'cleanupfrompyobj': '\t/* End of cleaning variable #varname# */',
'need': typedef_need_dict,
},
# Scalars (not complex)
{ # Common
'decl': '\t#ctype# #varname# = 0;',
'need': {hasinitvalue: 'math.h'},
'frompyobj': {hasinitvalue: '\t#varname# = #init#;'},
'_check': l_and(isscalar, l_not(iscomplex)),
},
{
'return': ',#varname#',
'docstrout': '#pydocsignout#',
'docreturn': '#outvarname#,',
'returnformat': '#varrformat#',
'_check': l_and(isscalar, l_not(iscomplex), isintent_out),
},
# Complex scalars
{ # Common
'decl': '\t#ctype# #varname#;',
'frompyobj': {hasinitvalue: '\t#varname#.r = #init.r#, #varname#.i = #init.i#;'},
'_check': iscomplex
},
# String
{ # Common
'decl': ['\t#ctype# #varname# = NULL;',
'\tint slen(#varname#);',
],
'need':['len..'],
'_check':isstring
},
# Array
{ # Common
'decl': ['\t#ctype# *#varname# = NULL;',
'\tnpy_intp #varname#_Dims[#rank#] = {#rank*[-1]#};',
'\tconst int #varname#_Rank = #rank#;',
],
'need':['len..', {hasinitvalue: 'forcomb'}, {hasinitvalue: 'CFUNCSMESS'}],
'_check': isarray
},
# Scalararray
{ # Common
'_check': l_and(isarray, l_not(iscomplexarray))
}, { # Not hidden
'_check': l_and(isarray, l_not(iscomplexarray), isintent_nothide)
},
# Integer*1 array
{'need': '#ctype#',
'_check': isint1array,
'_depend': ''
},
# Integer*-1 array
{'need': '#ctype#',
'_check': isunsigned_chararray,
'_depend': ''
},
# Integer*-2 array
{'need': '#ctype#',
'_check': isunsigned_shortarray,
'_depend': ''
},
# Integer*-8 array
{'need': '#ctype#',
'_check': isunsigned_long_longarray,
'_depend': ''
},
# Complexarray
{'need': '#ctype#',
'_check': iscomplexarray,
'_depend': ''
},
# Stringarray
{
'callfortranappend': {isarrayofstrings: 'flen(#varname#),'},
'need': 'string',
'_check': isstringarray
}
]
arg_rules = [
{
'separatorsfor': sepdict
},
{ # Common
'frompyobj': ['\t/* Processing variable #varname# */',
{debugcapi: '\tfprintf(stderr,"#vardebuginfo#\\n");'}, ],
'cleanupfrompyobj': '\t/* End of cleaning variable #varname# */',
'_depend': '',
'need': typedef_need_dict,
},
# Doc signatures
{
'docstropt': {l_and(isoptional, isintent_nothide): '#pydocsign#'},
'docstrreq': {l_and(isrequired, isintent_nothide): '#pydocsign#'},
'docstrout': {isintent_out: '#pydocsignout#'},
'latexdocstropt': {l_and(isoptional, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
{hasnote: '--- #note#'}]},
'latexdocstrreq': {l_and(isrequired, isintent_nothide): ['\\item[]{{}\\verb@#pydocsign#@{}}',
{hasnote: '--- #note#'}]},
'latexdocstrout': {isintent_out: ['\\item[]{{}\\verb@#pydocsignout#@{}}',
{l_and(hasnote, isintent_hide): '--- #note#',
l_and(hasnote, isintent_nothide): '--- See above.'}]},
'depend': ''
},
# Required/Optional arguments
{
'kwlist': '"#varname#",',
'docsign': '#varname#,',
'_check': l_and(isintent_nothide, l_not(isoptional))
},
{
'kwlistopt': '"#varname#",',
'docsignopt': '#varname#=#showinit#,',
'docsignoptshort': '#varname#,',
'_check': l_and(isintent_nothide, isoptional)
},
# Docstring/BuildValue
{
'docreturn': '#outvarname#,',
'returnformat': '#varrformat#',
'_check': isintent_out
},
# Externals (call-back functions)
{ # Common
'docsignxa': {isintent_nothide: '#varname#_extra_args=(),'},
'docsignxashort': {isintent_nothide: '#varname#_extra_args,'},
'docstropt': {isintent_nothide: '#varname#_extra_args : input tuple, optional\\n Default: ()'},
'docstrcbs': '#cbdocstr#',
'latexdocstrcbs': '\\item[] #cblatexdocstr#',
'latexdocstropt': {isintent_nothide: '\\item[]{{}\\verb@#varname#_extra_args := () input tuple@{}} --- Extra arguments for call-back function {{}\\verb@#varname#@{}}.'},
'decl': [' #cbname#_t #varname#_cb = { Py_None, NULL, 0 };',
' #cbname#_t *#varname#_cb_ptr = &#varname#_cb;',
' PyTupleObject *#varname#_xa_capi = NULL;',
{l_not(isintent_callback):
' #cbname#_typedef #varname#_cptr;'}
],
'kwlistxa': {isintent_nothide: '"#varname#_extra_args",'},
'argformat': {isrequired: 'O'},
'keyformat': {isoptional: 'O'},
'xaformat': {isintent_nothide: 'O!'},
'args_capi': {isrequired: ',&#varname#_cb.capi'},
'keys_capi': {isoptional: ',&#varname#_cb.capi'},
'keys_xa': ',&PyTuple_Type,&#varname#_xa_capi',
'setjmpbuf': '(setjmp(#varname#_cb.jmpbuf))',
'callfortran': {l_not(isintent_callback): '#varname#_cptr,'},
'need': ['#cbname#', 'setjmp.h'],
'_check':isexternal
},
{
'frompyobj': [{l_not(isintent_callback): """\
if(F2PyCapsule_Check(#varname#_cb.capi)) {
#varname#_cptr = F2PyCapsule_AsVoidPtr(#varname#_cb.capi);
} else {
#varname#_cptr = #cbname#;
}
"""}, {isintent_callback: """\
if (#varname#_cb.capi==Py_None) {
#varname#_cb.capi = PyObject_GetAttrString(#modulename#_module,\"#varname#\");
if (#varname#_cb.capi) {
if (#varname#_xa_capi==NULL) {
if (PyObject_HasAttrString(#modulename#_module,\"#varname#_extra_args\")) {
PyObject* capi_tmp = PyObject_GetAttrString(#modulename#_module,\"#varname#_extra_args\");
if (capi_tmp) {
#varname#_xa_capi = (PyTupleObject *)PySequence_Tuple(capi_tmp);
Py_DECREF(capi_tmp);
}
else {
#varname#_xa_capi = (PyTupleObject *)Py_BuildValue(\"()\");
}
if (#varname#_xa_capi==NULL) {
PyErr_SetString(#modulename#_error,\"Failed to convert #modulename#.#varname#_extra_args to tuple.\\n\");
return NULL;
}
}
}
}
if (#varname#_cb.capi==NULL) {
PyErr_SetString(#modulename#_error,\"Callback #varname# not defined (as an argument or module #modulename# attribute).\\n\");
return NULL;
}
}
"""},
"""\
if (create_cb_arglist(#varname#_cb.capi,#varname#_xa_capi,#maxnofargs#,#nofoptargs#,&#varname#_cb.nofargs,&#varname#_cb.args_capi,\"failed in processing argument list for call-back #varname#.\")) {
""",
{debugcapi: ["""\
fprintf(stderr,\"debug-capi:Assuming %d arguments; at most #maxnofargs#(-#nofoptargs#) is expected.\\n\",#varname#_cb.nofargs);
CFUNCSMESSPY(\"for #varname#=\",#cbname#_capi);""",
{l_not(isintent_callback): """ fprintf(stderr,\"#vardebugshowvalue# (call-back in C).\\n\",#cbname#);"""}]},
"""\
CFUNCSMESS(\"Saving callback variables for `#varname#`.\\n\");
#varname#_cb_ptr = swap_active_#cbname#(#varname#_cb_ptr);""",
],
'cleanupfrompyobj':
"""\
CFUNCSMESS(\"Restoring callback variables for `#varname#`.\\n\");
#varname#_cb_ptr = swap_active_#cbname#(#varname#_cb_ptr);
Py_DECREF(#varname#_cb.args_capi);
}""",
'need': ['SWAP', 'create_cb_arglist'],
'_check':isexternal,
'_depend':''
},
# Scalars (not complex)
{ # Common
'decl': '\t#ctype# #varname# = 0;',
'pyobjfrom': {debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#);'},
'callfortran': {isintent_c: '#varname#,', l_not(isintent_c): '&#varname#,'},
'return': {isintent_out: ',#varname#'},
'_check': l_and(isscalar, l_not(iscomplex))
}, {
'need': {hasinitvalue: 'math.h'},
'_check': l_and(isscalar, l_not(iscomplex)),
}, { # Not hidden
'decl': '\tPyObject *#varname#_capi = Py_None;',
'argformat': {isrequired: 'O'},
'keyformat': {isoptional: 'O'},
'args_capi': {isrequired: ',&#varname#_capi'},
'keys_capi': {isoptional: ',&#varname#_capi'},
'pyobjfrom': {isintent_inout: """\
\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,&#varname#);
\tif (f2py_success) {"""},
'closepyobjfrom': {isintent_inout: "\t} /*if (f2py_success) of #varname# pyobjfrom*/"},
'need': {isintent_inout: 'try_pyarr_from_#ctype#'},
'_check': l_and(isscalar, l_not(iscomplex), isintent_nothide)
}, {
'frompyobj': [
# hasinitvalue...
# if pyobj is None:
# varname = init
# else
# from_pyobj(varname)
#
# isoptional and noinitvalue...
# if pyobj is not None:
# from_pyobj(varname)
# else:
# varname is uninitialized
#
# ...
# from_pyobj(varname)
#
{hasinitvalue: '\tif (#varname#_capi == Py_None) #varname# = #init#; else',
'_depend': ''},
{l_and(isoptional, l_not(hasinitvalue)): '\tif (#varname#_capi != Py_None)',
'_depend': ''},
{l_not(islogical): '''\
\t\tf2py_success = #ctype#_from_pyobj(&#varname#,#varname#_capi,"#pyname#() #nth# (#varname#) can\'t be converted to #ctype#");
\tif (f2py_success) {'''},
{islogical: '''\
\t\t#varname# = (#ctype#)PyObject_IsTrue(#varname#_capi);
\t\tf2py_success = 1;
\tif (f2py_success) {'''},
],
'cleanupfrompyobj': '\t} /*if (f2py_success) of #varname#*/',
'need': {l_not(islogical): '#ctype#_from_pyobj'},
'_check': l_and(isscalar, l_not(iscomplex), isintent_nothide),
'_depend': ''
}, { # Hidden
'frompyobj': {hasinitvalue: '\t#varname# = #init#;'},
'need': typedef_need_dict,
'_check': l_and(isscalar, l_not(iscomplex), isintent_hide),
'_depend': ''
}, { # Common
'frompyobj': {debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#);'},
'_check': l_and(isscalar, l_not(iscomplex)),
'_depend': ''
},
# Complex scalars
{ # Common
'decl': '\t#ctype# #varname#;',
'callfortran': {isintent_c: '#varname#,', l_not(isintent_c): '&#varname#,'},
'pyobjfrom': {debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#.r,#varname#.i);'},
'return': {isintent_out: ',#varname#_capi'},
'_check': iscomplex
}, { # Not hidden
'decl': '\tPyObject *#varname#_capi = Py_None;',
'argformat': {isrequired: 'O'},
'keyformat': {isoptional: 'O'},
'args_capi': {isrequired: ',&#varname#_capi'},
'keys_capi': {isoptional: ',&#varname#_capi'},
'need': {isintent_inout: 'try_pyarr_from_#ctype#'},
'pyobjfrom': {isintent_inout: """\
\t\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,&#varname#);
\t\tif (f2py_success) {"""},
'closepyobjfrom': {isintent_inout: "\t\t} /*if (f2py_success) of #varname# pyobjfrom*/"},
'_check': l_and(iscomplex, isintent_nothide)
}, {
'frompyobj': [{hasinitvalue: '\tif (#varname#_capi==Py_None) {#varname#.r = #init.r#, #varname#.i = #init.i#;} else'},
{l_and(isoptional, l_not(hasinitvalue))
: '\tif (#varname#_capi != Py_None)'},
'\t\tf2py_success = #ctype#_from_pyobj(&#varname#,#varname#_capi,"#pyname#() #nth# (#varname#) can\'t be converted to #ctype#");'
'\n\tif (f2py_success) {'],
'cleanupfrompyobj': '\t} /*if (f2py_success) of #varname# frompyobj*/',
'need': ['#ctype#_from_pyobj'],
'_check': l_and(iscomplex, isintent_nothide),
'_depend': ''
}, { # Hidden
'decl': {isintent_out: '\tPyObject *#varname#_capi = Py_None;'},
'_check': l_and(iscomplex, isintent_hide)
}, {
'frompyobj': {hasinitvalue: '\t#varname#.r = #init.r#, #varname#.i = #init.i#;'},
'_check': l_and(iscomplex, isintent_hide),
'_depend': ''
}, { # Common
'pyobjfrom': {isintent_out: '\t#varname#_capi = pyobj_from_#ctype#1(#varname#);'},
'need': ['pyobj_from_#ctype#1'],
'_check': iscomplex
}, {
'frompyobj': {debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",#varname#.r,#varname#.i);'},
'_check': iscomplex,
'_depend': ''
},
# String
{ # Common
'decl': ['\t#ctype# #varname# = NULL;',
'\tint slen(#varname#);',
'\tPyObject *#varname#_capi = Py_None;'],
'callfortran':'#varname#,',
'callfortranappend':'slen(#varname#),',
'pyobjfrom':{debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",slen(#varname#),#varname#);'},
'return': {isintent_out: ',#varname#'},
'need': ['len..'], # 'STRINGFREE'],
'_check':isstring
}, { # Common
'frompyobj': """\
\tslen(#varname#) = #length#;
\tf2py_success = #ctype#_from_pyobj(&#varname#,&slen(#varname#),#init#,#varname#_capi,\"#ctype#_from_pyobj failed in converting #nth# `#varname#\' of #pyname# to C #ctype#\");
\tif (f2py_success) {""",
'cleanupfrompyobj': """\
\t\tSTRINGFREE(#varname#);
\t} /*if (f2py_success) of #varname#*/""",
'need': ['#ctype#_from_pyobj', 'len..', 'STRINGFREE'],
'_check':isstring,
'_depend':''
}, { # Not hidden
'argformat': {isrequired: 'O'},
'keyformat': {isoptional: 'O'},
'args_capi': {isrequired: ',&#varname#_capi'},
'keys_capi': {isoptional: ',&#varname#_capi'},
'pyobjfrom': {isintent_inout: '''\
\tf2py_success = try_pyarr_from_#ctype#(#varname#_capi,#varname#);
\tif (f2py_success) {'''},
'closepyobjfrom': {isintent_inout: '\t} /*if (f2py_success) of #varname# pyobjfrom*/'},
'need': {isintent_inout: 'try_pyarr_from_#ctype#'},
'_check': l_and(isstring, isintent_nothide)
}, { # Hidden
'_check': l_and(isstring, isintent_hide)
}, {
'frompyobj': {debugcapi: '\tfprintf(stderr,"#vardebugshowvalue#\\n",slen(#varname#),#varname#);'},
'_check': isstring,
'_depend': ''
},
# Array
{ # Common
'decl': ['\t#ctype# *#varname# = NULL;',
'\tnpy_intp #varname#_Dims[#rank#] = {#rank*[-1]#};',
'\tconst int #varname#_Rank = #rank#;',
'\tPyArrayObject *capi_#varname#_tmp = NULL;',
'\tint capi_#varname#_intent = 0;',
],
'callfortran':'#varname#,',
'return':{isintent_out: ',capi_#varname#_tmp'},
'need': 'len..',
'_check': isarray
}, { # intent(overwrite) array
'decl': '\tint capi_overwrite_#varname# = 1;',
'kwlistxa': '"overwrite_#varname#",',
'xaformat': 'i',
'keys_xa': ',&capi_overwrite_#varname#',
'docsignxa': 'overwrite_#varname#=1,',
'docsignxashort': 'overwrite_#varname#,',
'docstropt': 'overwrite_#varname# : input int, optional\\n Default: 1',
'_check': l_and(isarray, isintent_overwrite),
}, {
'frompyobj': '\tcapi_#varname#_intent |= (capi_overwrite_#varname#?0:F2PY_INTENT_COPY);',
'_check': l_and(isarray, isintent_overwrite),
'_depend': '',
},
{ # intent(copy) array
'decl': '\tint capi_overwrite_#varname# = 0;',
'kwlistxa': '"overwrite_#varname#",',
'xaformat': 'i',
'keys_xa': ',&capi_overwrite_#varname#',
'docsignxa': 'overwrite_#varname#=0,',
'docsignxashort': 'overwrite_#varname#,',
'docstropt': 'overwrite_#varname# : input int, optional\\n Default: 0',
'_check': l_and(isarray, isintent_copy),
}, {
'frompyobj': '\tcapi_#varname#_intent |= (capi_overwrite_#varname#?0:F2PY_INTENT_COPY);',
'_check': l_and(isarray, isintent_copy),
'_depend': '',
}, {
'need': [{hasinitvalue: 'forcomb'}, {hasinitvalue: 'CFUNCSMESS'}],
'_check': isarray,
'_depend': ''
}, { # Not hidden
'decl': '\tPyObject *#varname#_capi = Py_None;',
'argformat': {isrequired: 'O'},
'keyformat': {isoptional: 'O'},
'args_capi': {isrequired: ',&#varname#_capi'},
'keys_capi': {isoptional: ',&#varname#_capi'},
'_check': l_and(isarray, isintent_nothide)
}, {
'frompyobj': ['\t#setdims#;',
'\tcapi_#varname#_intent |= #intent#;',
{isintent_hide:
'\tcapi_#varname#_tmp = array_from_pyobj(#atype#,#varname#_Dims,#varname#_Rank,capi_#varname#_intent,Py_None);'},
{isintent_nothide:
'\tcapi_#varname#_tmp = array_from_pyobj(#atype#,#varname#_Dims,#varname#_Rank,capi_#varname#_intent,#varname#_capi);'},
"""\
\tif (capi_#varname#_tmp == NULL) {
\t\tPyObject *exc, *val, *tb;
\t\tPyErr_Fetch(&exc, &val, &tb);
\t\tPyErr_SetString(exc ? exc : #modulename#_error,\"failed in converting #nth# `#varname#\' of #pyname# to C/Fortran array\" );
\t\tnpy_PyErr_ChainExceptionsCause(exc, val, tb);
\t} else {
\t\t#varname# = (#ctype# *)(PyArray_DATA(capi_#varname#_tmp));
""",
{hasinitvalue: [
{isintent_nothide:
'\tif (#varname#_capi == Py_None) {'},
{isintent_hide: '\t{'},
{iscomplexarray: '\t\t#ctype# capi_c;'},
"""\
\t\tint *_i,capi_i=0;
\t\tCFUNCSMESS(\"#name#: Initializing #varname#=#init#\\n\");
\t\tif (initforcomb(PyArray_DIMS(capi_#varname#_tmp),PyArray_NDIM(capi_#varname#_tmp),1)) {
\t\t\twhile ((_i = nextforcomb()))
\t\t\t\t#varname#[capi_i++] = #init#; /* fortran way */
\t\t} else {
\t\t\tPyObject *exc, *val, *tb;
\t\t\tPyErr_Fetch(&exc, &val, &tb);
\t\t\tPyErr_SetString(exc ? exc : #modulename#_error,\"Initialization of #nth# #varname# failed (initforcomb).\");
\t\t\tnpy_PyErr_ChainExceptionsCause(exc, val, tb);
\t\t\tf2py_success = 0;
\t\t}
\t}
\tif (f2py_success) {"""]},
],
'cleanupfrompyobj': [ # note that this list will be reversed
'\t} /*if (capi_#varname#_tmp == NULL) ... else of #varname#*/',
{l_not(l_or(isintent_out, isintent_hide)): """\
\tif((PyObject *)capi_#varname#_tmp!=#varname#_capi) {
\t\tPy_XDECREF(capi_#varname#_tmp); }"""},
{l_and(isintent_hide, l_not(isintent_out))
: """\t\tPy_XDECREF(capi_#varname#_tmp);"""},
{hasinitvalue: '\t} /*if (f2py_success) of #varname# init*/'},
],
'_check': isarray,
'_depend': ''
},
# Scalararray
{ # Common
'_check': l_and(isarray, l_not(iscomplexarray))
}, { # Not hidden
'_check': l_and(isarray, l_not(iscomplexarray), isintent_nothide)
},
# Integer*1 array
{'need': '#ctype#',
'_check': isint1array,
'_depend': ''
},
# Integer*-1 array
{'need': '#ctype#',
'_check': isunsigned_chararray,
'_depend': ''
},
# Integer*-2 array
{'need': '#ctype#',
'_check': isunsigned_shortarray,
'_depend': ''
},
# Integer*-8 array
{'need': '#ctype#',
'_check': isunsigned_long_longarray,
'_depend': ''
},
# Complexarray
{'need': '#ctype#',
'_check': iscomplexarray,
'_depend': ''
},
# Stringarray
{
'callfortranappend': {isarrayofstrings: 'flen(#varname#),'},
'need': 'string',
'_check': isstringarray
}
]
################# Rules for checking ###############
check_rules = [
{
'frompyobj': {debugcapi: '\tfprintf(stderr,\"debug-capi:Checking `#check#\'\\n\");'},
'need': 'len..'
}, {
'frompyobj': '\tCHECKSCALAR(#check#,\"#check#\",\"#nth# #varname#\",\"#varshowvalue#\",#varname#) {',
'cleanupfrompyobj': '\t} /*CHECKSCALAR(#check#)*/',
'need': 'CHECKSCALAR',
'_check': l_and(isscalar, l_not(iscomplex)),
'_break': ''
}, {
'frompyobj': '\tCHECKSTRING(#check#,\"#check#\",\"#nth# #varname#\",\"#varshowvalue#\",#varname#) {',
'cleanupfrompyobj': '\t} /*CHECKSTRING(#check#)*/',
'need': 'CHECKSTRING',
'_check': isstring,
'_break': ''
}, {
'need': 'CHECKARRAY',
'frompyobj': '\tCHECKARRAY(#check#,\"#check#\",\"#nth# #varname#\") {',
'cleanupfrompyobj': '\t} /*CHECKARRAY(#check#)*/',
'_check': isarray,
'_break': ''
}, {
'need': 'CHECKGENERIC',
'frompyobj': '\tCHECKGENERIC(#check#,\"#check#\",\"#nth# #varname#\") {',
'cleanupfrompyobj': '\t} /*CHECKGENERIC(#check#)*/',
}
]
########## Applying the rules. No need to modify what follows #############
#################### Build C/API module #######################
def buildmodule(m, um):
"""
Return
"""
outmess('\tBuilding module "%s"...\n' % (m['name']))
ret = {}
mod_rules = defmod_rules[:]
vrd = capi_maps.modsign2map(m)
rd = dictappend({'f2py_version': f2py_version}, vrd)
funcwrappers = []
funcwrappers2 = [] # F90 codes
for n in m['interfaced']:
nb = None
for bi in m['body']:
if not bi['block'] == 'interface':
errmess('buildmodule: Expected interface block. Skipping.\n')
continue
for b in bi['body']:
if b['name'] == n:
nb = b
break
if not nb:
errmess(
'buildmodule: Could not found the body of interfaced routine "%s". Skipping.\n' % (n))
continue
nb_list = [nb]
if 'entry' in nb:
for k, a in nb['entry'].items():
nb1 = copy.deepcopy(nb)
del nb1['entry']
nb1['name'] = k
nb1['args'] = a
nb_list.append(nb1)
for nb in nb_list:
# requiresf90wrapper must be called before buildapi as it
# rewrites assumed shape arrays as automatic arrays.
isf90 = requiresf90wrapper(nb)
api, wrap = buildapi(nb)
if wrap:
if isf90:
funcwrappers2.append(wrap)
else:
funcwrappers.append(wrap)
ar = applyrules(api, vrd)
rd = dictappend(rd, ar)
# Construct COMMON block support
cr, wrap = common_rules.buildhooks(m)
if wrap:
funcwrappers.append(wrap)
ar = applyrules(cr, vrd)
rd = dictappend(rd, ar)
# Construct F90 module support
mr, wrap = f90mod_rules.buildhooks(m)
if wrap:
funcwrappers2.append(wrap)
ar = applyrules(mr, vrd)
rd = dictappend(rd, ar)
for u in um:
ar = use_rules.buildusevars(u, m['use'][u['name']])
rd = dictappend(rd, ar)
needs = cfuncs.get_needs()
code = {}
for n in needs.keys():
code[n] = []
for k in needs[n]:
c = ''
if k in cfuncs.includes0:
c = cfuncs.includes0[k]
elif k in cfuncs.includes:
c = cfuncs.includes[k]
elif k in cfuncs.userincludes:
c = cfuncs.userincludes[k]
elif k in cfuncs.typedefs:
c = cfuncs.typedefs[k]
elif k in cfuncs.typedefs_generated:
c = cfuncs.typedefs_generated[k]
elif k in cfuncs.cppmacros:
c = cfuncs.cppmacros[k]
elif k in cfuncs.cfuncs:
c = cfuncs.cfuncs[k]
elif k in cfuncs.callbacks:
c = cfuncs.callbacks[k]
elif k in cfuncs.f90modhooks:
c = cfuncs.f90modhooks[k]
elif k in cfuncs.commonhooks:
c = cfuncs.commonhooks[k]
else:
errmess('buildmodule: unknown need %s.\n' % (repr(k)))
continue
code[n].append(c)
mod_rules.append(code)
for r in mod_rules:
if ('_check' in r and r['_check'](m)) or ('_check' not in r):
ar = applyrules(r, vrd, m)
rd = dictappend(rd, ar)
ar = applyrules(module_rules, rd)
fn = os.path.join(options['buildpath'], vrd['coutput'])
ret['csrc'] = fn
with open(fn, 'w') as f:
f.write(ar['modulebody'].replace('\t', 2 * ' '))
outmess('\tWrote C/API module "%s" to file "%s"\n' % (m['name'], fn))
if options['dorestdoc']:
fn = os.path.join(
options['buildpath'], vrd['modulename'] + 'module.rest')
with open(fn, 'w') as f:
f.write('.. -*- rest -*-\n')
f.write('\n'.join(ar['restdoc']))
outmess('\tReST Documentation is saved to file "%s/%smodule.rest"\n' %
(options['buildpath'], vrd['modulename']))
if options['dolatexdoc']:
fn = os.path.join(
options['buildpath'], vrd['modulename'] + 'module.tex')
ret['ltx'] = fn
with open(fn, 'w') as f:
f.write(
'%% This file is auto-generated with f2py (version:%s)\n' % (f2py_version))
if 'shortlatex' not in options:
f.write(
'\\documentclass{article}\n\\usepackage{a4wide}\n\\begin{document}\n\\tableofcontents\n\n')
f.write('\n'.join(ar['latexdoc']))
if 'shortlatex' not in options:
f.write('\\end{document}')
outmess('\tDocumentation is saved to file "%s/%smodule.tex"\n' %
(options['buildpath'], vrd['modulename']))
if funcwrappers:
wn = os.path.join(options['buildpath'], vrd['f2py_wrapper_output'])
ret['fsrc'] = wn
with open(wn, 'w') as f:
f.write('C -*- fortran -*-\n')
f.write(
'C This file is autogenerated with f2py (version:%s)\n' % (f2py_version))
f.write(
'C It contains Fortran 77 wrappers to fortran functions.\n')
lines = []
for l in ('\n\n'.join(funcwrappers) + '\n').split('\n'):
if 0 <= l.find('!') < 66:
# don't split comment lines
lines.append(l + '\n')
elif l and l[0] == ' ':
while len(l) >= 66:
lines.append(l[:66] + '\n &')
l = l[66:]
lines.append(l + '\n')
else:
lines.append(l + '\n')
lines = ''.join(lines).replace('\n &\n', '\n')
f.write(lines)
outmess('\tFortran 77 wrappers are saved to "%s"\n' % (wn))
if funcwrappers2:
wn = os.path.join(
options['buildpath'], '%s-f2pywrappers2.f90' % (vrd['modulename']))
ret['fsrc'] = wn
with open(wn, 'w') as f:
f.write('! -*- f90 -*-\n')
f.write(
'! This file is autogenerated with f2py (version:%s)\n' % (f2py_version))
f.write(
'! It contains Fortran 90 wrappers to fortran functions.\n')
lines = []
for l in ('\n\n'.join(funcwrappers2) + '\n').split('\n'):
if 0 <= l.find('!') < 72:
# don't split comment lines
lines.append(l + '\n')
elif len(l) > 72 and l[0] == ' ':
lines.append(l[:72] + '&\n &')
l = l[72:]
while len(l) > 66:
lines.append(l[:66] + '&\n &')
l = l[66:]
lines.append(l + '\n')
else:
lines.append(l + '\n')
lines = ''.join(lines).replace('\n &\n', '\n')
f.write(lines)
outmess('\tFortran 90 wrappers are saved to "%s"\n' % (wn))
return ret
################## Build C/API function #############
stnd = {1: 'st', 2: 'nd', 3: 'rd', 4: 'th', 5: 'th',
6: 'th', 7: 'th', 8: 'th', 9: 'th', 0: 'th'}
def buildapi(rout):
rout, wrap = func2subr.assubr(rout)
args, depargs = getargs2(rout)
capi_maps.depargs = depargs
var = rout['vars']
if ismoduleroutine(rout):
outmess('\t\t\tConstructing wrapper function "%s.%s"...\n' %
(rout['modulename'], rout['name']))
else:
outmess('\t\tConstructing wrapper function "%s"...\n' % (rout['name']))
# Routine
vrd = capi_maps.routsign2map(rout)
rd = dictappend({}, vrd)
for r in rout_rules:
if ('_check' in r and r['_check'](rout)) or ('_check' not in r):
ar = applyrules(r, vrd, rout)
rd = dictappend(rd, ar)
# Args
nth, nthk = 0, 0
savevrd = {}
for a in args:
vrd = capi_maps.sign2map(a, var[a])
if isintent_aux(var[a]):
_rules = aux_rules
else:
_rules = arg_rules
if not isintent_hide(var[a]):
if not isoptional(var[a]):
nth = nth + 1
vrd['nth'] = repr(nth) + stnd[nth % 10] + ' argument'
else:
nthk = nthk + 1
vrd['nth'] = repr(nthk) + stnd[nthk % 10] + ' keyword'
else:
vrd['nth'] = 'hidden'
savevrd[a] = vrd
for r in _rules:
if '_depend' in r:
continue
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
ar = applyrules(r, vrd, var[a])
rd = dictappend(rd, ar)
if '_break' in r:
break
for a in depargs:
if isintent_aux(var[a]):
_rules = aux_rules
else:
_rules = arg_rules
vrd = savevrd[a]
for r in _rules:
if '_depend' not in r:
continue
if ('_check' in r and r['_check'](var[a])) or ('_check' not in r):
ar = applyrules(r, vrd, var[a])
rd = dictappend(rd, ar)
if '_break' in r:
break
if 'check' in var[a]:
for c in var[a]['check']:
vrd['check'] = c
ar = applyrules(check_rules, vrd, var[a])
rd = dictappend(rd, ar)
if isinstance(rd['cleanupfrompyobj'], list):
rd['cleanupfrompyobj'].reverse()
if isinstance(rd['closepyobjfrom'], list):
rd['closepyobjfrom'].reverse()
rd['docsignature'] = stripcomma(replace('#docsign##docsignopt##docsignxa#',
{'docsign': rd['docsign'],
'docsignopt': rd['docsignopt'],
'docsignxa': rd['docsignxa']}))
optargs = stripcomma(replace('#docsignopt##docsignxa#',
{'docsignxa': rd['docsignxashort'],
'docsignopt': rd['docsignoptshort']}
))
if optargs == '':
rd['docsignatureshort'] = stripcomma(
replace('#docsign#', {'docsign': rd['docsign']}))
else:
rd['docsignatureshort'] = replace('#docsign#[#docsignopt#]',
{'docsign': rd['docsign'],
'docsignopt': optargs,
})
rd['latexdocsignatureshort'] = rd['docsignatureshort'].replace('_', '\\_')
rd['latexdocsignatureshort'] = rd[
'latexdocsignatureshort'].replace(',', ', ')
cfs = stripcomma(replace('#callfortran##callfortranappend#', {
'callfortran': rd['callfortran'], 'callfortranappend': rd['callfortranappend']}))
if len(rd['callfortranappend']) > 1:
rd['callcompaqfortran'] = stripcomma(replace('#callfortran# 0,#callfortranappend#', {
'callfortran': rd['callfortran'], 'callfortranappend': rd['callfortranappend']}))
else:
rd['callcompaqfortran'] = cfs
rd['callfortran'] = cfs
if isinstance(rd['docreturn'], list):
rd['docreturn'] = stripcomma(
replace('#docreturn#', {'docreturn': rd['docreturn']})) + ' = '
rd['docstrsigns'] = []
rd['latexdocstrsigns'] = []
for k in ['docstrreq', 'docstropt', 'docstrout', 'docstrcbs']:
if k in rd and isinstance(rd[k], list):
rd['docstrsigns'] = rd['docstrsigns'] + rd[k]
k = 'latex' + k
if k in rd and isinstance(rd[k], list):
rd['latexdocstrsigns'] = rd['latexdocstrsigns'] + rd[k][0:1] +\
['\\begin{description}'] + rd[k][1:] +\
['\\end{description}']
ar = applyrules(routine_rules, rd)
if ismoduleroutine(rout):
outmess('\t\t\t %s\n' % (ar['docshort']))
else:
outmess('\t\t %s\n' % (ar['docshort']))
return ar, wrap
#################### EOF rules.py #######################
|
# Generated by Django 2.1.7 on 2019-03-03 09:18
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('movielist', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='movie',
name='director',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='movies_directed', to='movielist.Person'),
),
]
|
from typing import Dict, List
import numpy as np
import torch
from torch.utils.data import Dataset
from allennlp.data import Vocabulary
from updown.config import Config
from updown.data.readers import CocoCaptionsReader, ConstraintBoxesReader, ImageFeaturesReader
from updown.types import (
TrainingInstance,
TrainingBatch,
EvaluationInstance,
EvaluationInstanceWithConstraints,
EvaluationBatch,
EvaluationBatchWithConstraints,
)
from updown.utils.constraints import ConstraintFilter, FiniteStateMachineBuilder
class TrainingDataset(Dataset):
r"""
A PyTorch `:class:`~torch.utils.data.Dataset` providing access to COCO train2017 captions data
for training :class:`~updown.models.updown_captioner.UpDownCaptioner`. When wrapped with a
:class:`~torch.utils.data.DataLoader`, it provides batches of image features and tokenized
ground truth captions.
.. note::
Use :mod:`collate_fn` when wrapping with a :class:`~torch.utils.data.DataLoader`.
Parameters
----------
vocabulary: allennlp.data.Vocabulary
AllenNLP’s vocabulary containing token to index mapping for captions vocabulary.
captions_jsonpath: str
Path to a JSON file containing COCO train2017 caption annotations.
image_features_h5path: str
Path to an H5 file containing pre-extracted features from COCO train2017 images.
max_caption_length: int, optional (default = 20)
Maximum length of caption sequences for language modeling. Captions longer than this will
be truncated to maximum length.
in_memory: bool, optional (default = True)
Whether to load all image features in memory.
"""
def __init__(
self,
vocabulary: Vocabulary,
captions_jsonpath: str,
image_features_h5path: str,
max_caption_length: int = 20,
in_memory: bool = True,
) -> None:
self._vocabulary = vocabulary
self._image_features_reader = ImageFeaturesReader(image_features_h5path, in_memory)
self._captions_reader = CocoCaptionsReader(captions_jsonpath)
self._max_caption_length = max_caption_length
@classmethod
def from_config(cls, config: Config, **kwargs):
r"""Instantiate this class directly from a :class:`~updown.config.Config`."""
_C = config
vocabulary = kwargs.pop("vocabulary")
return cls(
vocabulary=vocabulary,
image_features_h5path=_C.DATA.TRAIN_FEATURES,
captions_jsonpath=_C.DATA.TRAIN_CAPTIONS,
max_caption_length=_C.DATA.MAX_CAPTION_LENGTH,
in_memory=kwargs.pop("in_memory"),
)
def __len__(self) -> int:
# Number of training examples are number of captions, not number of images.
return len(self._captions_reader)
def __getitem__(self, indexes: list) -> TrainingInstance:
items = []
for index in indexes:
image_id, caption = self._captions_reader[index]
image_features = self._image_features_reader[image_id]
# Tokenize caption.
caption_tokens: List[int] = [self._vocabulary.get_token_index(c) for c in caption]
# Pad upto max_caption_length.
caption_tokens = caption_tokens[: self._max_caption_length]
caption_tokens.extend(
[self._vocabulary.get_token_index("@@UNKNOWN@@")]
* (self._max_caption_length - len(caption_tokens))
)
item: TrainingInstance = {
"image_id": image_id,
"image_features": image_features,
"caption_tokens": caption_tokens,
}
items += [item]
return items
def collate_fn(self, batch_lists: List[TrainingInstance]) -> TrainingBatch:
batches = []
for batch_list in batch_lists:
# Convert lists of ``image_id``s and ``caption_tokens``s as tensors.
image_id = torch.tensor([instance["image_id"] for instance in batch_list]).long()
caption_tokens = torch.tensor(
[instance["caption_tokens"] for instance in batch_list]
).long()
# Pad adaptive image features in the batch.
image_features = torch.from_numpy(
_collate_image_features([instance["image_features"] for instance in batch_list])
)
batch: TrainingBatch = {
"image_id": image_id,
"image_features": image_features,
"caption_tokens": caption_tokens,
}
batches += [batch]
return batches
class EvaluationDataset(Dataset):
r"""
A PyTorch :class:`~torch.utils.data.Dataset` providing image features for inference. When
wrapped with a :class:`~torch.utils.data.DataLoader`, it provides batches of image features.
.. note::
Use :mod:`collate_fn` when wrapping with a :class:`~torch.utils.data.DataLoader`.
Parameters
----------
vocabulary: allennlp.data.Vocabulary
AllenNLP’s vocabulary containing token to index mapping for captions vocabulary.
image_features_h5path: str
Path to an H5 file containing pre-extracted features from nocaps val/test images.
in_memory: bool, optional (default = True)
Whether to load all image features in memory.
"""
def __init__(self, image_features_h5path: str, in_memory: bool = True) -> None:
self._image_features_reader = ImageFeaturesReader(image_features_h5path, in_memory)
self._image_ids = sorted(list(self._image_features_reader._map.keys()))
@classmethod
def from_config(cls, config: Config, **kwargs):
r"""Instantiate this class directly from a :class:`~updown.config.Config`."""
_C = config
return cls(image_features_h5path=_C.DATA.INFER_FEATURES, in_memory=kwargs.pop("in_memory"))
def __len__(self) -> int:
return len(self._image_ids)
def __getitem__(self, index: int) -> EvaluationInstance:
image_id = self._image_ids[index]
image_features = self._image_features_reader[image_id]
item: EvaluationInstance = {"image_id": image_id, "image_features": image_features}
return item
def collate_fn(self, batch_list: List[EvaluationInstance]) -> EvaluationBatch:
# Convert lists of ``image_id``s and ``caption_tokens``s as tensors.
image_id = torch.tensor([instance["image_id"] for instance in batch_list]).long()
# Pad adaptive image features in the batch.
image_features = torch.from_numpy(
_collate_image_features([instance["image_features"] for instance in batch_list])
)
batch: EvaluationBatch = {"image_id": image_id, "image_features": image_features}
return batch
class EvaluationDatasetWithConstraints(EvaluationDataset):
r"""
A PyTorch :class:`~torch.utils.data.Dataset` providing image features for inference, along
with constraints for :class:`~updown.modules.cbs.ConstrainedBeamSearch`. When wrapped with a
:class:`~torch.utils.data.DataLoader`, it provides batches of image features, Finite State
Machines built (per instance) from constraints, and number of constraints used to make these.
Extended Summary
----------------
Finite State Machines as represented as adjacency matrices (Tensors) with state transitions
corresponding to specific constraint (word) occurrence while decoding). We return the number
of constraints used to make an FSM because it is required while selecting which decoded beams
satisfied constraints. Refer :func:`~updown.utils.constraints.select_best_beam_with_constraints`
for more details.
.. note::
Use :mod:`collate_fn` when wrapping with a :class:`~torch.utils.data.DataLoader`.
Parameters
----------
vocabulary: allennlp.data.Vocabulary
AllenNLP’s vocabulary containing token to index mapping for captions vocabulary.
image_features_h5path: str
Path to an H5 file containing pre-extracted features from nocaps val/test images.
boxes_jsonpath: str
Path to a JSON file containing bounding box detections in COCO format (nocaps val/test
usually).
wordforms_tsvpath: str
Path to a TSV file containing two fields: first is the name of Open Images object class
and second field is a comma separated list of words (possibly singular and plural forms
of the word etc.) which could be CBS constraints.
hierarchy_jsonpath: str
Path to a JSON file containing a hierarchy of Open Images object classes as
`here <https://storage.googleapis.com/openimages/2018_04/bbox_labels_600_hierarchy_visualizer/circle.html>`_.
nms_threshold: float, optional (default = 0.85)
NMS threshold for suppressing generic object class names during constraint filtering,
for two boxes with IoU higher than this threshold, "dog" suppresses "animal".
max_given_constraints: int, optional (default = 3)
Maximum number of constraints which can be specified for CBS decoding. Constraints are
selected based on the prediction confidence score of their corresponding bounding boxes.
in_memory: bool, optional (default = True)
Whether to load all image features in memory.
"""
def __init__(
self,
vocabulary: Vocabulary,
image_features_h5path: str,
boxes_jsonpath: str,
wordforms_tsvpath: str,
hierarchy_jsonpath: str,
nms_threshold: float = 0.85,
max_given_constraints: int = 3,
max_words_per_constraint: int = 3,
in_memory: bool = True,
):
super().__init__(image_features_h5path, in_memory=in_memory)
self._vocabulary = vocabulary
self._pad_index = vocabulary.get_token_index("@@UNKNOWN@@")
self._boxes_reader = ConstraintBoxesReader(boxes_jsonpath)
self._constraint_filter = ConstraintFilter(
hierarchy_jsonpath, nms_threshold, max_given_constraints
)
self._fsm_builder = FiniteStateMachineBuilder(vocabulary, wordforms_tsvpath, max_given_constraints, max_words_per_constraint)
@classmethod
def from_config(cls, config: Config, **kwargs):
r"""Instantiate this class directly from a :class:`~updown.config.Config`."""
_C = config
vocabulary = kwargs.pop("vocabulary")
return cls(
vocabulary=vocabulary,
image_features_h5path=_C.DATA.INFER_FEATURES,
boxes_jsonpath=_C.DATA.CBS.INFER_BOXES,
wordforms_tsvpath=_C.DATA.CBS.WORDFORMS,
hierarchy_jsonpath=_C.DATA.CBS.CLASS_HIERARCHY,
max_given_constraints=_C.DATA.CBS.MAX_GIVEN_CONSTRAINTS,
max_words_per_constraint=_C.DATA.CBS.MAX_WORDS_PER_CONSTRAINT,
in_memory=kwargs.pop("in_memory"),
)
def __getitem__(self, index: int) -> EvaluationInstanceWithConstraints:
item: EvaluationInstance = super().__getitem__(index)
# Apply constraint filtering to object class names.
constraint_boxes = self._boxes_reader[item["image_id"]]
candidates: List[str] = self._constraint_filter(
constraint_boxes["boxes"], constraint_boxes["class_names"], constraint_boxes["scores"]
)
fsm, nstates = self._fsm_builder.build(candidates)
return {"fsm": fsm, "num_states": nstates, "num_constraints": len(candidates), **item}
def collate_fn(
self, batch_list: List[EvaluationInstanceWithConstraints]
) -> EvaluationBatchWithConstraints:
batch = super().collate_fn(batch_list)
max_state = max([s["num_states"] for s in batch_list])
fsm = torch.stack([s["fsm"][:max_state, :max_state, :] for s in batch_list])
num_candidates = torch.tensor([s["num_constraints"] for s in batch_list]).long()
batch.update({"fsm": fsm, "num_constraints": num_candidates})
return batch
def _collate_image_features(image_features_list: List[np.ndarray]) -> np.ndarray:
num_boxes = [instance.shape[0] for instance in image_features_list]
image_feature_size = image_features_list[0].shape[-1]
image_features = np.zeros(
(len(image_features_list), max(num_boxes), image_feature_size), dtype=np.float32
)
for i, (instance, dim) in enumerate(zip(image_features_list, num_boxes)):
image_features[i, :dim] = instance
return image_features
|
from tixte_foss import app
from multiprocessing import Process
import pytest
def test_run():
server = Process(target=app.run)
server.start()
server.terminate()
|
#
# @lc app=leetcode id=321 lang=python3
#
# [321] Create Maximum Number
#
# @lc code=start
# class Solution:
# def maxNumber(self, nums1, nums2, k):
# if not nums1 and not nums2:
# return []
# l1, l2 = len(nums1), len(nums2)
# dp = [[0] * (l2 + 1) for _ in range(l1 + 1)]
# if k >= l1:
# for i in range(1, l1 + 1):
# dp[i][0] = dp[i - 1][0] * 10 + nums1[i - 1]
# else:
# for i in range(1, k + 1):
# dp[i][0] = dp[i - 1][0] * 10 + nums1[i - 1]
# for i in range(k + 1, l1 + 1):
# curnum = dp[i - 1][0] % 10 * 10 + nums1[i - 1]
# dp[i][0] = max(curnum, dp[i - 1][0])
# if not nums2:
# return dp[-1][0]
# if k >= l2:
# for j in range(1, l2 + 1):
# dp[0][j] = dp[0][j - 1] * 10 + nums2[j - 1]
# else:
# for j in range(1, k + 1):
# dp[0][j] = dp[0][j - 1] + nums2[j - 1]
# for i in range(k + 1, l2 + 1):
# curnum = dp[0][j - 1] % 10 + nums2[j - 1]
# dp[0][j] = max(dp[0][j - 1], curnum)
# if not nums1:
# return dp[0][-1]
# for i in range(1, l1 + 1):
# for j in range(1, l2 + 1):
# last1 = dp[i - 1][j]
# last2 = dp[i][j - 1]
# dp[i][j] = max(self.insert(nums1[i - 1], last1, k), self.insert(nums2[j - 1], last2, k))
# number = str(dp[-1][-1])
# ans = []
# for digit in number:
# ans.append(digit)
# return ans
# def insert(self, newdigit, orinum, limit):
# d, num = str(newdigit), str(orinum)
# mx = float('-inf')
# if len(num) + 1 <= limit:
# for i in range(len(num)):
# mx = max(int(num[:i] + d + num[i:]), mx)
# return mx
# for i in range(len(num)):
# mx = max(int(num[:i] + d + num[i + 1:]), mx)
# return mx
class Solution(object):
def maxNumber(self, nums1, nums2, k):
"""
:type nums1: List[int]
:type nums2: List[int]
:type k: int
:rtype: List[int]
"""
def get_max_sub_array(nums, k):
res , n = [] ,len(nums)
for i in range(n):
while res and len(res) + n - i > k and nums[i] > res[-1]:
res.pop()
if len(res) < k:
res.append(nums[i])
return res
ans = [0] * k
for i in range(max(0, k - len(nums2)), min(k, len(nums1)) + 1):
res1 = get_max_sub_array(nums1, i)
res2 = get_max_sub_array(nums2, k - i)
ans = max(ans, [max(res1, res2).pop(0) for _ in range(k)])
return ans
if __name__ == '__main__':
a = Solution()
b = a.maxNumber([3, 4, 6, 5], [9, 1, 2, 5, 8, 3], 5)
print(b)
# @lc code=end
|
from celery import Celery
from flasgger import Swagger
from flask_caching import Cache
from flask_cors import CORS
from flask_jwt_extended import JWTManager
from flask_migrate import Migrate
from flask_redis import FlaskRedis
from flask_sqlalchemy import SQLAlchemy
redis_store = FlaskRedis()
cors = CORS()
migrate = Migrate()
jwt_manager = JWTManager()
db = SQLAlchemy()
celery = Celery('flask_cc_api', include=['flask_cc_api.proj.tasks'])
cache = Cache(config={'CACHE_TYPE': 'redis'})
template = {
'swagger': '2.0',
'info': {
'title': 'API文档',
'version': '0.0.1',
},
'securityDefinitions': {
'Token': {
'type': 'apiKey',
'name': 'Authorization',
'in': 'header',
'description': 'Bearer <jwt>',
},
},
}
swagger = Swagger(template=template)
|
import tushare
from datetime import datetime, timedelta
import pytz
import logging
import json
import os
from pymongo import MongoClient
from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from .ProxyManager import ProxyManager
from .Constants import Constants
from .HotConcept import HotConcept
class DataLoader(object):
UTC_8 = pytz.timezone('Asia/Shanghai')
_logger = logging.getLogger(__name__)
_MONFO_URL = '127.0.0.1'
_DATA_DB = 'firestone-data'
_CODE_FROM_DB = '000000'
def __init__(self, code_list, is_mock=False, mock_trade=False, date=None, hours=['9','11','10,13-14'], minutes=['30-59','0-29','*']):
self.proxyManager = ProxyManager()
self.use_proxy = False
self.hours = hours
self.minutes = minutes
self.is_mock = is_mock
self.mock_trade = mock_trade
self.is_finsih_flag = False
self.lastRows = {}
self.client = MongoClient(DataLoader._MONFO_URL, 27017)
self.data_db = self.client[DataLoader._DATA_DB]
self.db = self.client[os.environ['FR_DB']]
self.hot_concept = HotConcept(self.db)
self.scheduler = BackgroundScheduler()
self.date = date
today = datetime.now()
self.today = '{}-{}-{}'.format(today.year,('0' + str(today.month))[-2:],('0' + str(today.day))[-2:])
# self.today_datetime = datetime(today.year,today.month,today.day,tzinfo=DataLoader.UTC_8)
if(self.date is None):
self.date = self.today
end_date = today + timedelta(days = 1)
end_date = '{}-{}-{}'.format(end_date.year,('0' + str(end_date.month))[-2:],('0' + str(end_date.day))[-2:])
self.load_codes_from_db = False
self.code_list = self.get_code_list(code_list)
for i, hour in enumerate(hours):
trigger = CronTrigger(hour=hour,minute=minutes[i],second='*/3', end_date=end_date)
trigger_concept = CronTrigger(hour=hour,minute=minutes[i],second='0', end_date=end_date)
if(i == len(hours) - 1):
self.scheduler.add_job(self.run,id="last_job",trigger=trigger)
else:
self.scheduler.add_job(self.run,trigger=trigger)
self.scheduler.add_job(self.run_concept,trigger=trigger_concept)
def get_code_list(self, code_list):
if(DataLoader._CODE_FROM_DB in code_list):
self.load_codes_from_db = True
return [DataLoader._CODE_FROM_DB]
colls = list(self.data_db.list_collections())
codes = []
for code in code_list:
if(code == 'N/A'):
continue
name = code + '-' + self.date + ('-m' if self.is_mock else '')
if(name not in [coll['name'] for coll in colls]):
codes.append(code)
self.data_db.create_collection(name)
if(len(codes) == 0):
self.is_finsih_flag = True
return codes
def start(self):
if(self.is_finsih_flag):
return
self.scheduler.start()
DataLoader._logger.info('job get data for {} is start'.format(self.code_list))
def is_finsih(self):
job = self.scheduler.get_job('last_job')
return self.is_finsih_flag or job is None or job.next_run_time is None
def stop(self):
self.client.close()
self.scheduler.shutdown(wait=True)
DataLoader._logger.info('job get data for {} is stop'.format(self.code_list))
def get_code_list_from_db(self):
colname = 'trades'
if(self.mock_trade):
colname = 'mocktrades'
codes_data = self.db[colname].find({"deleted":False, "params.executeDate" : self.today},{"code" : 1, "_id" : 0})
code_list = [code_data["code"] for code_data in list(codes_data) if code_data["code"] != 'N/A']
temp_list = []
for code in code_list:
if(',' in code):
temp_list.extend(code.split(','))
else:
temp_list.append(code)
code_list = temp_list
for code in code_list:
if(code.startswith('3')):
if(Constants.INDEX[5] not in code_list):
code_list.append(Constants.INDEX[5])
else:
if(Constants.INDEX[0] not in code_list):
code_list.append(Constants.INDEX[0])
return list(set(code_list))
def load_data(self):
list_wrapper = []
size = len(self.code_list)
df_result = None
if(size > 50):
list_size = (size // 50) + (1 if (size % 50) > 0 else 0)
for i in range(list_size):
list_wrapper.append(self.code_list[i * 50 : i * 50 + 50])
else:
list_wrapper.append(self.code_list)
if(self.use_proxy):
for l in list_wrapper:
try:
df = tushare.get_realtime_quotes(l, proxyManager=self.proxyManager)
if(df_result is None):
df_result = df
else:
df_result = df_result.append(df)
except Exception as e:
DataLoader._logger.error('load data error, use_proxy = {}, e = {}'.format(self.use_proxy, e))
self.use_proxy = True
self.proxyManager.remove_proxy()
else:
for i, l in enumerate(list_wrapper):
try:
if(i == 0):
df = tushare.get_realtime_quotes(l)
else:
df = tushare.get_realtime_quotes(l, proxyManager=self.proxyManager)
if(df_result is None):
df_result = df
else:
df_result = df_result.append(df)
except Exception as e:
DataLoader._logger.error('load data error, use_proxy = {}, e = {}'.format(self.use_proxy, e))
self.use_proxy = True
self.proxyManager.remove_proxy()
return df_result
def run(self):
try:
if(self.load_codes_from_db):
self.code_list = self.get_code_list_from_db()
DataLoader._logger.info('start get the data for {}'.format(self.code_list))
if(len(self.code_list) < 2):
return
if(self.is_mock):
self.run_mock()
else:
df = self.load_data()
if(df is None):
DataLoader._logger.error('failed to get the data for {}'.format(self.code_list))
return
json_list = json.loads(df.to_json(orient='records'))
DataLoader._logger.info('get data length = {}'.format(len(json_list)))
for json_data in json_list:
code = json_data['code']
code = Constants.map_code(json_data['name'], json_data['code'])
if(code not in self.lastRows):
self.lastRows[code] = None
if(self.lastRows[code] is None or self.lastRows[code]['time'] != json_data['time']):
json_data['real_time'] = datetime.now()
self.data_db[code + '-' + self.today].insert(json_data)
self.lastRows[code] = json_data
except Exception as e:
DataLoader._logger.error(e)
def run_concept(self):
try:
self.hot_concept.load_hot_concept()
except Exception as e:
DataLoader._logger.error(f'load hot concept failed, e = {e}')
def run_mock(self):
try:
if(not hasattr(self, 'mock_count')):
self.mock_count = 0
self.data = {}
for code in self.code_list:
self.data[code + '-' + self.date] = list(self.data_db[code + '-' + self.date].find())
self.lastRows[code] = None
for code in self.code_list:
if self.mock_count < len(self.data[code + '-' + self.date]):
json_data = self.data[code + '-' + self.date][self.mock_count]
json_data['real_time'] = datetime.now()
if(self.lastRows[code] is None or self.lastRows[code]['time'] != json_data['time']):
self.data_db[code + '-' + self.date + '-m'].insert(json_data)
self.lastRows[code] = json_data
self.mock_count += 1
except Exception as e:
DataLoader._logger.error(e)
|
# -*- coding: utf-8 -*-
""":mod:`dodotable.environment.flask` --- Flask environment
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Yet, dodotable only supports Jinja2_ for template, it can be possible to use
another template engine but if you have a mind to use Jinja2_,
maybe you have interest with Flask_ either.
Originally dodotable are intended to use with Flask_, so its environment class
is provided.
Customize HTML
==============
Maybe you wish to change a design of your table or add some help message,
give your class name on HTML elements. So all you have to do is inehrit
one of environment class, and use it in your table.
.. code-block:: python
# yourapplication/dodotable.py
from dodotable.schema import Column as SchemaColumn, Table as SchemaTable
from dodotable.environment.flask import FlaskEnvironment
from jinja2 import PackageLoader
class CustomEnvironment(FlaskEnvironment):
@property
def template_loader(self):
return PackageLoader('yourapplication', 'templates')
class Column(SchemaColumn):
environment = CustomEnvironment()
class Table(SchemaTable):
environment = CustomEnvironment()
.. code-block:: python
#yourapplication/app.py
from flask import Flask, render_template
from .dodotable import Table, Column
app = Flask(__name__)
@app.route('/', methods=['GET'])
def index():
table = Table(columns=[
Column(...)
], ...)
return render_template('index.html', table=table.select(0, 10))
.. _Jinja2: http://jinja.pocoo.org/
.. _Flask: http://flask.pocoo.org/
"""
from __future__ import absolute_import
from flask import request
from . import Environment
__all__ = 'FlaskEnvironment', 'default_locale_selector'
class FlaskEnvironment(Environment):
"""Build table with :mod:`flask`"""
def __init__(self, locale_selector=None, *args, **kwargs):
if locale_selector is None:
locale_selector = default_locale_selector
super(FlaskEnvironment, self).__init__(
*args,
locale_selector=locale_selector,
**kwargs
)
def build_url(self, **kwargs):
arg = request.args.copy()
view_args = request.view_args
arg.update(view_args)
for attr in kwargs.keys():
if attr in arg:
arg.pop(attr)
arg.update(kwargs.items())
rule = request.url_rule
result = rule.build({k: v for k, v in arg.items()})
return result[1]
def get_session(self):
ctx = request._get_current_object()
try:
session = ctx._current_session
except AttributeError:
return None
else:
return session
def default_locale_selector():
# FIXME
# Commonly defined in all Flask services that currently use dodo tables
# I don't know what locale means is, so temporarily
# use accept_language.
try:
return request.accept_languages.best_match(['ko', 'jp', 'en'])
except Exception:
return 'ko'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.