code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import uuid
from django.test import TestCase
from abidria.exceptions import EntityDoesNotExistException
from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken
from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo
from people.entities import Person
class PersonRepoTestCase(TestCase):
def test_create_guest_person(self):
PersonRepoTestCase._ScenarioMaker() \
.when_create_guest_person() \
.then_response_should_be_a_guest_person() \
.then_that_person_should_be_saved_in_db()
def test_update_person(self):
PersonRepoTestCase._ScenarioMaker() \
.given_a_person_in_db() \
.given_a_person_entity_with_db_person_id() \
.when_update_person_entity() \
.then_result_should_be_person_entity() \
.then_db_person_should_be_same_as_entity()
def test_get_person(self):
PersonRepoTestCase._ScenarioMaker() \
.given_a_person_in_db() \
.when_get_person_with_her_id() \
.then_result_should_be_that_person()
def test_get_person_by_username(self):
PersonRepoTestCase._ScenarioMaker() \
.given_a_person_in_db() \
.when_get_person_with_her_username() \
.then_result_should_be_that_person()
def test_get_person_by_email(self):
PersonRepoTestCase._ScenarioMaker() \
.given_a_person_in_db() \
.when_get_person_with_her_email() \
.then_result_should_be_that_person()
class _ScenarioMaker:
def __init__(self):
self.response = None
self.orm_person = None
self.person = None
def given_a_person_in_db(self):
self.orm_person = ORMPerson.objects.create()
return self
def given_a_person_entity_with_db_person_id(self):
self.person = Person(id=self.orm_person.id, is_registered=True,
username='U', email='E', is_email_confirmed=True)
return self
def when_create_guest_person(self):
self.result = PersonRepo().create_guest_person()
return self
def when_get_person_with_her_id(self):
self.result = PersonRepo().get_person(id=self.orm_person.id)
return self
def when_get_person_with_her_username(self):
self.result = PersonRepo().get_person(username=self.orm_person.username)
return self
def when_get_person_with_her_email(self):
self.result = PersonRepo().get_person(email=self.orm_person.email)
return self
def when_update_person_entity(self):
self.result = PersonRepo().update_person(self.person)
return self
def then_response_should_be_a_guest_person(self):
assert self.result.id is not None
assert not self.result.is_registered
assert self.result.username is None
assert self.result.email is None
assert not self.result.is_email_confirmed
return self
def then_that_person_should_be_saved_in_db(self):
assert ORMPerson.objects.filter(id=self.result.id).exists()
return self
def then_result_should_be_person_entity(self):
assert self.result == self.person
return self
def then_db_person_should_be_same_as_entity(self):
updated_orm_person = ORMPerson.objects.get(id=self.orm_person.id)
assert updated_orm_person.is_registered == self.person.is_registered
assert updated_orm_person.username == self.person.username
assert updated_orm_person.email == self.person.email
assert updated_orm_person.is_email_confirmed == self.person.is_email_confirmed
return self
def then_result_should_be_that_person(self):
assert self.orm_person.id == self.result.id
assert self.orm_person.is_registered == self.result.is_registered
assert self.orm_person.username == self.result.username
assert self.orm_person.email == self.result.email
assert self.orm_person.is_email_confirmed == self.result.is_email_confirmed
return self
class AuthTokenRepoTestCase(TestCase):
def test_create_auth_token(self):
AuthTokenRepoTestCase._ScenarioMaker() \
.given_a_person() \
.when_create_auth_token_for_that_person() \
.then_response_should_be_that_token() \
.then_that_token_should_be_saved_in_db()
def test_get_auth_token_from_access_token(self):
AuthTokenRepoTestCase._ScenarioMaker() \
.given_a_person() \
.given_an_auth_token_for_that_person() \
.when_get_auth_token_with_access_token() \
.then_should_return_auth_token()
def test_unexistent_get_auth_token(self):
AuthTokenRepoTestCase._ScenarioMaker() \
.when_get_auth_token_with_wrong_access_token() \
.then_should_raise_entity_does_not_exist()
class _ScenarioMaker:
def __init__(self):
self.person = None
self.response = None
self.error = None
def given_a_person(self):
self.person = PersonRepo().create_guest_person()
return self
def given_an_auth_token_for_that_person(self):
self.auth_token = AuthTokenRepo().create_auth_token(person_id=self.person.id)
return self
def when_get_auth_token_with_access_token(self):
try:
self.result = AuthTokenRepo().get_auth_token(access_token=self.auth_token.access_token)
except Exception as e:
self.error = e
return self
def when_get_auth_token_with_wrong_access_token(self):
try:
self.result = AuthTokenRepo().get_auth_token(access_token=str(uuid.uuid4()))
except Exception as e:
self.error = e
return self
def when_create_auth_token_for_that_person(self):
try:
self.result = AuthTokenRepo().create_auth_token(person_id=self.person.id)
except Exception as e:
self.error = e
return self
def then_should_return_auth_token(self):
assert self.result == self.auth_token
return self
def then_response_should_be_that_token(self):
assert self.result.person_id == self.person.id
assert type(self.result.access_token) is str
assert type(self.result.refresh_token) is str
return self
def then_that_token_should_be_saved_in_db(self):
assert ORMAuthToken.objects.filter(person_id=self.result.person_id, access_token=self.result.access_token,
refresh_token=self.result.refresh_token).exists()
return self
def then_should_raise_entity_does_not_exist(self):
assert type(self.error) is EntityDoesNotExistException
return self
class ConfirmationTokenRepoTestCase(TestCase):
def test_create_confirmation_token(self):
ConfirmationTokenRepoTestCase._ScenarioMaker() \
.given_a_person() \
.when_create_confirmation_token_for_that_person() \
.then_should_return_confirmation_token() \
.then_that_token_should_be_saved_in_db()
def test_delete_confirmation_tokens_from_person(self):
ConfirmationTokenRepoTestCase._ScenarioMaker() \
.given_a_person() \
.when_create_confirmation_token_for_that_person() \
.when_create_confirmation_token_for_that_person() \
.when_create_confirmation_token_for_that_person() \
.when_delete_confirmation_tokens_for_that_person() \
.then_response_should_be_true() \
.then_there_should_be_no_confirmation_tokens_for_that_person()
def test_get_person_id_from_confirmation_token(self):
ConfirmationTokenRepoTestCase._ScenarioMaker() \
.given_a_person() \
.given_a_confirmation_token_for_that_person() \
.when_get_person_id_for_that_confirmation_token() \
.then_response_should_be_person_id()
def test_get_person_id_when_has_no_tokens(self):
ConfirmationTokenRepoTestCase._ScenarioMaker() \
.given_a_random_confirmation_token_not_in_db() \
.when_get_person_id_for_that_confirmation_token() \
.then_should_raise_entity_does_not_exist()
class _ScenarioMaker:
def __init__(self):
self.person = None
self.response = None
self.error = None
def given_a_person(self):
self.person = PersonRepo().create_guest_person()
return self
def given_a_confirmation_token_for_that_person(self):
self.confirmation_token = ConfirmationTokenRepo().create_confirmation_token(person_id=self.person.id)
return self
def given_a_random_confirmation_token_not_in_db(self):
self.confirmation_token = uuid.uuid4()
return self
def when_get_person_id_for_that_confirmation_token(self):
try:
self.result = ConfirmationTokenRepo().get_person_id(confirmation_token=self.confirmation_token)
except Exception as e:
self.error = e
return self
def when_create_confirmation_token_for_that_person(self):
self.result = ConfirmationTokenRepo().create_confirmation_token(person_id=self.person.id)
return self
def when_delete_confirmation_tokens_for_that_person(self):
self.result = ConfirmationTokenRepo().delete_confirmation_tokens(person_id=self.person.id)
return self
def then_should_return_confirmation_token(self):
assert type(self.result) is str
assert len(self.result) > 0
return self
def then_that_token_should_be_saved_in_db(self):
assert ORMConfirmationToken.objects.filter(person_id=self.person.id, token=self.result).exists()
return self
def then_response_should_be_true(self):
assert self.result is True
return self
def then_there_should_be_no_confirmation_tokens_for_that_person(self):
assert not ORMConfirmationToken.objects.filter(person_id=self.person.id).exists()
def then_response_should_be_person_id(self):
assert self.result == self.person.id
return self
def then_should_raise_entity_does_not_exist(self):
assert type(self.error) is EntityDoesNotExistException
return self
|
[
"people.repositories.AuthTokenRepo",
"people.models.ORMPerson.objects.get",
"people.models.ORMConfirmationToken.objects.filter",
"uuid.uuid4",
"people.repositories.ConfirmationTokenRepo",
"people.models.ORMPerson.objects.filter",
"people.repositories.PersonRepo",
"people.entities.Person",
"people.models.ORMPerson.objects.create",
"people.models.ORMAuthToken.objects.filter"
] |
[((1852, 1878), 'people.models.ORMPerson.objects.create', 'ORMPerson.objects.create', ([], {}), '()\n', (1876, 1878), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((1989, 2092), 'people.entities.Person', 'Person', ([], {'id': 'self.orm_person.id', 'is_registered': '(True)', 'username': '"""U"""', 'email': '"""E"""', 'is_email_confirmed': '(True)'}), "(id=self.orm_person.id, is_registered=True, username='U', email='E',\n is_email_confirmed=True)\n", (1995, 2092), False, 'from people.entities import Person\n'), ((3573, 3617), 'people.models.ORMPerson.objects.get', 'ORMPerson.objects.get', ([], {'id': 'self.orm_person.id'}), '(id=self.orm_person.id)\n', (3594, 3617), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((9438, 9450), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (9448, 9450), False, 'import uuid\n'), ((2217, 2229), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (2227, 2229), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((2350, 2362), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (2360, 2362), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((2501, 2513), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (2511, 2513), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((2661, 2673), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (2671, 2673), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((2810, 2822), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (2820, 2822), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((3277, 3320), 'people.models.ORMPerson.objects.filter', 'ORMPerson.objects.filter', ([], {'id': 'self.result.id'}), '(id=self.result.id)\n', (3301, 3320), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((5453, 5465), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (5463, 5465), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((5598, 5613), 'people.repositories.AuthTokenRepo', 'AuthTokenRepo', ([], {}), '()\n', (5611, 5613), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((6925, 7070), 'people.models.ORMAuthToken.objects.filter', 'ORMAuthToken.objects.filter', ([], {'person_id': 'self.result.person_id', 'access_token': 'self.result.access_token', 'refresh_token': 'self.result.refresh_token'}), '(person_id=self.result.person_id, access_token=\n self.result.access_token, refresh_token=self.result.refresh_token)\n', (6952, 7070), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((9076, 9088), 'people.repositories.PersonRepo', 'PersonRepo', ([], {}), '()\n', (9086, 9088), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((9236, 9259), 'people.repositories.ConfirmationTokenRepo', 'ConfirmationTokenRepo', ([], {}), '()\n', (9257, 9259), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((9854, 9877), 'people.repositories.ConfirmationTokenRepo', 'ConfirmationTokenRepo', ([], {}), '()\n', (9875, 9877), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((10048, 10071), 'people.repositories.ConfirmationTokenRepo', 'ConfirmationTokenRepo', ([], {}), '()\n', (10069, 10071), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((10392, 10477), 'people.models.ORMConfirmationToken.objects.filter', 'ORMConfirmationToken.objects.filter', ([], {'person_id': 'self.person.id', 'token': 'self.result'}), '(person_id=self.person.id, token=self.result\n )\n', (10427, 10477), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((5787, 5802), 'people.repositories.AuthTokenRepo', 'AuthTokenRepo', ([], {}), '()\n', (5800, 5802), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((6062, 6077), 'people.repositories.AuthTokenRepo', 'AuthTokenRepo', ([], {}), '()\n', (6075, 6077), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((6321, 6336), 'people.repositories.AuthTokenRepo', 'AuthTokenRepo', ([], {}), '()\n', (6334, 6336), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((9589, 9612), 'people.repositories.ConfirmationTokenRepo', 'ConfirmationTokenRepo', ([], {}), '()\n', (9610, 9612), False, 'from people.repositories import PersonRepo, AuthTokenRepo, ConfirmationTokenRepo\n'), ((10721, 10782), 'people.models.ORMConfirmationToken.objects.filter', 'ORMConfirmationToken.objects.filter', ([], {'person_id': 'self.person.id'}), '(person_id=self.person.id)\n', (10756, 10782), False, 'from people.models import ORMPerson, ORMAuthToken, ORMConfirmationToken\n'), ((6110, 6122), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (6120, 6122), False, 'import uuid\n')]
|
__author__ = "<NAME> (jdamador)"
__credits__ = ["UNAD, Pasto - Colombia", "TEC, San Carlos - Costa Rica"]
__license__ = "Apache 2.0"
__version__ = "1.0"
__maintainer__ = "jdamador"
__email__ = "<EMAIL>"
import time
# Serial Port Settings
import serial
DWM = serial.Serial(port="/dev/ttyACM0",baudrate=115200)
print("Connected to " + DWM.name)
DWM.write("\r\r".encode())
time.sleep(1)
DWM.write("les\r".encode())
time.sleep(1)
try:
while True:
data = DWM.readline()
if data:
parse = data.translate(None, b'\r\n').decode().split(",")
if len(parse) > 5:
print(parse)
DWM.write("\r".encode())
DWM.close()
except KeyboardInterrupt:
print("Stop")
|
[
"serial.Serial",
"time.sleep"
] |
[((261, 312), 'serial.Serial', 'serial.Serial', ([], {'port': '"""/dev/ttyACM0"""', 'baudrate': '(115200)'}), "(port='/dev/ttyACM0', baudrate=115200)\n", (274, 312), False, 'import serial\n'), ((373, 386), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (383, 386), False, 'import time\n'), ((415, 428), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (425, 428), False, 'import time\n')]
|
from datetime import date, datetime
from pyws.functions.args import DictOf
from pyws.functions.register import register
# = add simple ================================================================
@register()
@register('add_integers', return_type=int, args=((int, 0), (int, 0)))
@register('add_floats', return_type=float, args=((float, 0), (float, 0)))
def add_simple(a, b):
return a + b
# = flip a boolean ============================================================
@register(return_type=bool, args=(bool, ))
def flip_boolean(b):
return b ^ True
# = say hello ================================================================
@register('say_hello', needs_context=True)
def say_hello(context=None):
return 'hello ' + context
# = next month ================================================================
@register(return_type=date, args=(date, ))
@register('next_month_dt', return_type=datetime, args=(datetime, ))
def next_month(d):
if not d:
return None
month = d.month + 1
year = d.year
if month > 12:
month = (d.month - 1) % 12 + 1
year += 1
return d.replace(year=year, month=month)
# = add dicts =================================================================
ABStringDict = {0: 'ABStringDict', 'a': str, 'b': str}
ABIntegerDict = {0: 'ABIntegerDict', 'a': (int, 0), 'b': (int, 0)}
ab_string_dict_none = lambda: {'a': '', 'b': ''}
ab_integer_dict_none = lambda: {'a': 0, 'b': 0}
@register(
'add_string_dicts',
return_type=ABStringDict,
args=(
(ABStringDict, ab_string_dict_none),
(ABStringDict, ab_string_dict_none),
),
)
@register(
'add_integer_dicts',
return_type=ABIntegerDict,
args=(
(ABIntegerDict, ab_integer_dict_none),
(ABIntegerDict, ab_integer_dict_none),
),
)
def add_dicts(p, q):
return {
'a': p['a'] + q['a'],
'b': p['b'] + q['b'],
}
# = add lists =================================================================
StringList = [str]
@register(
'add_string_lists',
return_type=StringList,
args=(StringList, StringList),
)
def add_string_lists(p, q):
if len(p) < len(q):
p.extend([''] * (len(q) - len(p)))
if len(q) < len(p):
q.extend([''] * (len(p) - len(q)))
return list(p[i] + q[i] for i in xrange(len(p)))
IntegerList = [int, 0]
@register(
'add_integer_lists',
return_type=IntegerList,
args=(IntegerList, IntegerList),
)
def add_integer_lists(p, q):
if len(p) < len(q):
p.extend([0] * (len(q) - len(p)))
if len(q) < len(p):
q.extend([0] * (len(p) - len(q)))
return list(p[i] + q[i] for i in xrange(len(p)))
# = trees =====================================================================
Tree = DictOf('Tree',
('value', int, 0),
)
Tree.add_fields(
('left', Tree),
('right', Tree),
)
@register(return_type=int, args=(Tree,))
def sum_tree(p):
return p and (p['value'] +
(p['left'] and sum_tree(p['left']) or 0) +
(p['right'] and sum_tree(p['right']) or 0)) or 0
@register(return_type=Tree, args=(int,))
def get_tree(p):
if not p:
return None
if p == 1:
return {'value': 1}
if p == 2:
return {'value': 2, 'left': None, 'right': None}
return {
'value': 3,
'left': {'value': 4, 'left': None, 'right': None},
'right': {'value': 5, 'left': None, 'right': None},
}
# = raises exception ==========================================================
class HelloError(Exception):
def __init__(self, name):
super(HelloError, self).__init__(u'%s error' % name)
@register()
def raises_exception(name):
"""
this function will always fail
"""
raise HelloError(name)
|
[
"pyws.functions.register.register",
"pyws.functions.args.DictOf"
] |
[((205, 215), 'pyws.functions.register.register', 'register', ([], {}), '()\n', (213, 215), False, 'from pyws.functions.register import register\n'), ((217, 285), 'pyws.functions.register.register', 'register', (['"""add_integers"""'], {'return_type': 'int', 'args': '((int, 0), (int, 0))'}), "('add_integers', return_type=int, args=((int, 0), (int, 0)))\n", (225, 285), False, 'from pyws.functions.register import register\n'), ((287, 359), 'pyws.functions.register.register', 'register', (['"""add_floats"""'], {'return_type': 'float', 'args': '((float, 0), (float, 0))'}), "('add_floats', return_type=float, args=((float, 0), (float, 0)))\n", (295, 359), False, 'from pyws.functions.register import register\n'), ((482, 522), 'pyws.functions.register.register', 'register', ([], {'return_type': 'bool', 'args': '(bool,)'}), '(return_type=bool, args=(bool,))\n', (490, 522), False, 'from pyws.functions.register import register\n'), ((647, 688), 'pyws.functions.register.register', 'register', (['"""say_hello"""'], {'needs_context': '(True)'}), "('say_hello', needs_context=True)\n", (655, 688), False, 'from pyws.functions.register import register\n'), ((832, 872), 'pyws.functions.register.register', 'register', ([], {'return_type': 'date', 'args': '(date,)'}), '(return_type=date, args=(date,))\n', (840, 872), False, 'from pyws.functions.register import register\n'), ((875, 940), 'pyws.functions.register.register', 'register', (['"""next_month_dt"""'], {'return_type': 'datetime', 'args': '(datetime,)'}), "('next_month_dt', return_type=datetime, args=(datetime,))\n", (883, 940), False, 'from pyws.functions.register import register\n'), ((1463, 1602), 'pyws.functions.register.register', 'register', (['"""add_string_dicts"""'], {'return_type': 'ABStringDict', 'args': '((ABStringDict, ab_string_dict_none), (ABStringDict, ab_string_dict_none))'}), "('add_string_dicts', return_type=ABStringDict, args=((ABStringDict,\n ab_string_dict_none), (ABStringDict, ab_string_dict_none)))\n", (1471, 1602), False, 'from pyws.functions.register import register\n'), ((1638, 1788), 'pyws.functions.register.register', 'register', (['"""add_integer_dicts"""'], {'return_type': 'ABIntegerDict', 'args': '((ABIntegerDict, ab_integer_dict_none), (ABIntegerDict, ab_integer_dict_none))'}), "('add_integer_dicts', return_type=ABIntegerDict, args=((\n ABIntegerDict, ab_integer_dict_none), (ABIntegerDict,\n ab_integer_dict_none)))\n", (1646, 1788), False, 'from pyws.functions.register import register\n'), ((2022, 2109), 'pyws.functions.register.register', 'register', (['"""add_string_lists"""'], {'return_type': 'StringList', 'args': '(StringList, StringList)'}), "('add_string_lists', return_type=StringList, args=(StringList,\n StringList))\n", (2030, 2109), False, 'from pyws.functions.register import register\n'), ((2362, 2453), 'pyws.functions.register.register', 'register', (['"""add_integer_lists"""'], {'return_type': 'IntegerList', 'args': '(IntegerList, IntegerList)'}), "('add_integer_lists', return_type=IntegerList, args=(IntegerList,\n IntegerList))\n", (2370, 2453), False, 'from pyws.functions.register import register\n'), ((2769, 2802), 'pyws.functions.args.DictOf', 'DictOf', (['"""Tree"""', "('value', int, 0)"], {}), "('Tree', ('value', int, 0))\n", (2775, 2802), False, 'from pyws.functions.args import DictOf\n'), ((2871, 2910), 'pyws.functions.register.register', 'register', ([], {'return_type': 'int', 'args': '(Tree,)'}), '(return_type=int, args=(Tree,))\n', (2879, 2910), False, 'from pyws.functions.register import register\n'), ((3069, 3108), 'pyws.functions.register.register', 'register', ([], {'return_type': 'Tree', 'args': '(int,)'}), '(return_type=Tree, args=(int,))\n', (3077, 3108), False, 'from pyws.functions.register import register\n'), ((3638, 3648), 'pyws.functions.register.register', 'register', ([], {}), '()\n', (3646, 3648), False, 'from pyws.functions.register import register\n')]
|
# -*- coding: utf-8 -*-
import sys
import unittest
import locale
import threading
from datetime import date
from contextlib import contextmanager
from orgmode.py3compat.unicode_compatibility import *
sys.path.append(u'../ftplugin')
from orgmode.liborgmode.orgdate import OrgDate
class OrgDateUtf8TestCase(unittest.TestCase):
u"""
Tests OrgDate with utf-8 enabled locales
"""
LOCALE_LOCK = threading.Lock()
UTF8_LOCALE = "pt_BR.utf-8"
@contextmanager
def setlocale(self, name):
with self.LOCALE_LOCK:
saved = locale.setlocale(locale.LC_ALL)
try:
yield locale.setlocale(locale.LC_ALL, name)
finally:
locale.setlocale(locale.LC_ALL, saved)
def setUp(self):
self.year = 2016
self.month = 5
self.day = 7
self.text = u'<2016-05-07 Sáb>'
self.textinactive = u'[2016-05-07 Sáb]'
def test_OrdDate_str_unicode_active(self):
with self.setlocale(self.UTF8_LOCALE):
od = OrgDate(True, self.year, self.month, self.day)
self.assertEqual(self.text, unicode(od))
def test_OrdDate_str_unicode_inactive(self):
with self.setlocale(self.UTF8_LOCALE):
od = OrgDate(False, self.year, self.month, self.day)
self.assertEqual(self.textinactive, unicode(od))
def suite():
return unittest.TestLoader().loadTestsFromTestCase(OrgDateUtf8TestCase)
# vi: noexpandtab
|
[
"sys.path.append",
"orgmode.liborgmode.orgdate.OrgDate",
"threading.Lock",
"unittest.TestLoader",
"locale.setlocale"
] |
[((204, 235), 'sys.path.append', 'sys.path.append', (['u"""../ftplugin"""'], {}), "(u'../ftplugin')\n", (219, 235), False, 'import sys\n'), ((398, 414), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (412, 414), False, 'import threading\n'), ((526, 557), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL'], {}), '(locale.LC_ALL)\n', (542, 557), False, 'import locale\n'), ((909, 955), 'orgmode.liborgmode.orgdate.OrgDate', 'OrgDate', (['(True)', 'self.year', 'self.month', 'self.day'], {}), '(True, self.year, self.month, self.day)\n', (916, 955), False, 'from orgmode.liborgmode.orgdate import OrgDate\n'), ((1096, 1143), 'orgmode.liborgmode.orgdate.OrgDate', 'OrgDate', (['(False)', 'self.year', 'self.month', 'self.day'], {}), '(False, self.year, self.month, self.day)\n', (1103, 1143), False, 'from orgmode.liborgmode.orgdate import OrgDate\n'), ((1218, 1239), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (1237, 1239), False, 'import unittest\n'), ((630, 668), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', 'saved'], {}), '(locale.LC_ALL, saved)\n', (646, 668), False, 'import locale\n'), ((576, 613), 'locale.setlocale', 'locale.setlocale', (['locale.LC_ALL', 'name'], {}), '(locale.LC_ALL, name)\n', (592, 613), False, 'import locale\n')]
|
from django.contrib import messages
from django.core.exceptions import ValidationError
from django.http import HttpResponseRedirect, Http404
from django.shortcuts import render, get_object_or_404
from django_filters.views import FilterView
from django_tables2 import SingleTableMixin
from app.mixins import TabsViewMixin
from app.utils import reverse, hacker_tabs
from app.views import TabsView
from applications import models as app_mod
from applications.emails import send_batch_emails
from reimbursement import forms, models, emails
from reimbursement.tables import ReimbursementTable, ReimbursementFilter, SendReimbursementTable, \
SendReimbursementFilter
from user.mixins import IsOrganizerMixin, IsDirectorMixin, IsHackerMixin
from organizers.views import hacker_tabs as organizer_tabs
class ReimbursementHacker(IsHackerMixin, TabsView):
template_name = 'reimbursement_hacker.html'
def get_current_tabs(self):
return hacker_tabs(self.request.user)
def get_context_data(self, **kwargs):
c = super(ReimbursementHacker, self).get_context_data(**kwargs)
reimb = getattr(self.request.user, 'reimbursement', None)
if not reimb:
raise Http404
c.update({'form': forms.ReceiptSubmissionReceipt(instance=self.request.user.reimbursement)})
return c
def post(self, request, *args, **kwargs):
try:
form = forms.ReceiptSubmissionReceipt(request.POST, request.FILES, instance=request.user.reimbursement)
except:
form = forms.ReceiptSubmissionReceipt(request.POST, request.FILES)
if form.is_valid():
reimb = form.save(commit=False)
reimb.hacker = request.user
reimb.save()
messages.success(request,
'We have now received your reimbursement. '
'Processing will take some time, so please be patient.')
return HttpResponseRedirect(reverse('reimbursement_dashboard'))
else:
c = self.get_context_data()
c.update({'form': form})
return render(request, self.template_name, c)
class ReimbursementDetail(IsOrganizerMixin, TabsView):
template_name = 'reimbursement_detail.html'
def get_current_tabs(self):
return None
def get_back_url(self):
return reverse('reimbursement_list')
def get_context_data(self, **kwargs):
c = super(ReimbursementDetail, self).get_context_data(**kwargs)
id_ = kwargs.get('id', None)
if not id_:
return c
reimb = get_object_or_404(models.Reimbursement, pk=id_)
c.update({'reimb': reimb, 'edit_form': forms.EditReimbursementForm(instance=reimb)})
return c
def post(self, request, *args, **kwargs):
id_ = kwargs.get('id', None)
reimb = models.Reimbursement.objects.get(pk=id_)
form = forms.EditReimbursementForm(request.POST, instance=reimb)
if form.is_valid():
form.save()
messages.success(self.request, "Changes in reimbursement successfully saved!")
else:
return render(request, self.template_name, {'reimb': reimb, 'edit_form': form})
return HttpResponseRedirect(reverse('reimbursement_detail', kwargs={'id': reimb.pk}))
class ReceiptReview(ReimbursementDetail):
def get_current_tabs(self):
return organizer_tabs(self.request.user)
def get_back_url(self):
return None
def get_context_data(self, **kwargs):
c = super(ReceiptReview, self).get_context_data(**kwargs)
reimb = models.Reimbursement.objects.filter(status=models.RE_PEND_APPROVAL).order_by('-update_time').first()
c.update({'reimb': reimb, 'reject_form': forms.RejectReceiptForm(instance=reimb), 'review': True,
'accept_form': forms.AcceptReceiptForm(instance=reimb)})
return c
def post(self, request, *args, **kwargs):
id_ = request.POST.get('id', None)
reimb = models.Reimbursement.objects.get(pk=id_)
a_form = forms.AcceptReceiptForm(instance=reimb)
r_form = forms.RejectReceiptForm(instance=reimb)
if request.POST.get('accept', None):
a_form = forms.AcceptReceiptForm(request.POST, instance=reimb)
if a_form.is_valid():
a_form.save(commit=False)
a_form.instance.accept_receipt(request.user)
a_form.save()
messages.success(request, 'Receipt accepted')
else:
c = self.get_context_data()
c.update({'reject_form': r_form, 'accept_form': a_form})
return render(request, self.template_name, c)
elif request.POST.get('reject', None):
r_form = forms.RejectReceiptForm(request.POST, instance=reimb)
if r_form.is_valid():
r_form.save(commit=False)
m = r_form.instance.reject_receipt(request.user, request)
m.send()
r_form.save()
messages.success(request, 'Receipt rejected')
else:
c = self.get_context_data()
c.update({'reject_form': r_form,
'accept_form': a_form})
return render(request, self.template_name, c)
return HttpResponseRedirect(reverse('receipt_review'))
class ReimbursementListView(IsOrganizerMixin, TabsViewMixin, SingleTableMixin, FilterView):
template_name = 'reimbursements_table.html'
table_class = ReimbursementTable
filterset_class = ReimbursementFilter
table_pagination = {'per_page': 100}
def get_current_tabs(self):
return organizer_tabs(self.request.user)
def get_queryset(self):
return models.Reimbursement.objects.all()
class SendReimbursementListView(IsDirectorMixin, TabsViewMixin, SingleTableMixin, FilterView):
template_name = 'reimbursement_send_table.html'
table_class = SendReimbursementTable
filterset_class = SendReimbursementFilter
table_pagination = {'per_page': 100}
def get_current_tabs(self):
return organizer_tabs(self.request.user)
def get_queryset(self):
status = [app_mod.APP_INVITED, app_mod.APP_LAST_REMIDER, app_mod.APP_CONFIRMED, app_mod.APP_ATTENDED]
return models.Reimbursement.objects.filter(status=models.RE_DRAFT) \
.filter(hacker__hackerapplication_application__status__in=status).all()
def post(self, request, *args, **kwargs):
ids = request.POST.getlist('selected')
no_reimb = request.POST.get('no_reimb', False)
reimbs = models.Reimbursement.objects.filter(pk__in=ids).all()
mails = []
errors = 0
for reimb in reimbs:
try:
if not no_reimb:
assigned_money = request.POST.get('am_' + str(reimb.pk))
reimb.assigned_money = assigned_money
reimb.send(request.user)
m = emails.create_reimbursement_email(reimb, request)
else:
reimb.no_reimb(request.user)
m = emails.create_no_reimbursement_email(reimb, request)
mails.append(m)
except ValidationError:
errors += 1
if mails:
send_batch_emails(mails)
if no_reimb:
msg = "%s no reimbursements message sent"
else:
msg = "%s reimbursements sent"
messages.success(request, msg % len(mails))
else:
messages.error(request, "%s reimbursements not sent" % errors)
return HttpResponseRedirect(reverse('send_reimbursement'))
|
[
"reimbursement.forms.ReceiptSubmissionReceipt",
"reimbursement.emails.create_reimbursement_email",
"reimbursement.models.Reimbursement.objects.filter",
"applications.emails.send_batch_emails",
"reimbursement.forms.RejectReceiptForm",
"django.contrib.messages.error",
"reimbursement.forms.EditReimbursementForm",
"organizers.views.hacker_tabs",
"django.shortcuts.get_object_or_404",
"reimbursement.models.Reimbursement.objects.all",
"app.utils.reverse",
"reimbursement.forms.AcceptReceiptForm",
"reimbursement.emails.create_no_reimbursement_email",
"reimbursement.models.Reimbursement.objects.get",
"django.shortcuts.render",
"django.contrib.messages.success",
"app.utils.hacker_tabs"
] |
[((970, 1000), 'app.utils.hacker_tabs', 'hacker_tabs', (['self.request.user'], {}), '(self.request.user)\n', (981, 1000), False, 'from app.utils import reverse, hacker_tabs\n'), ((2418, 2447), 'app.utils.reverse', 'reverse', (['"""reimbursement_list"""'], {}), "('reimbursement_list')\n", (2425, 2447), False, 'from app.utils import reverse, hacker_tabs\n'), ((2664, 2711), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['models.Reimbursement'], {'pk': 'id_'}), '(models.Reimbursement, pk=id_)\n', (2681, 2711), False, 'from django.shortcuts import render, get_object_or_404\n'), ((2928, 2968), 'reimbursement.models.Reimbursement.objects.get', 'models.Reimbursement.objects.get', ([], {'pk': 'id_'}), '(pk=id_)\n', (2960, 2968), False, 'from reimbursement import forms, models, emails\n'), ((2985, 3042), 'reimbursement.forms.EditReimbursementForm', 'forms.EditReimbursementForm', (['request.POST'], {'instance': 'reimb'}), '(request.POST, instance=reimb)\n', (3012, 3042), False, 'from reimbursement import forms, models, emails\n'), ((3492, 3525), 'organizers.views.hacker_tabs', 'organizer_tabs', (['self.request.user'], {}), '(self.request.user)\n', (3506, 3525), True, 'from organizers.views import hacker_tabs as organizer_tabs\n'), ((4119, 4159), 'reimbursement.models.Reimbursement.objects.get', 'models.Reimbursement.objects.get', ([], {'pk': 'id_'}), '(pk=id_)\n', (4151, 4159), False, 'from reimbursement import forms, models, emails\n'), ((4178, 4217), 'reimbursement.forms.AcceptReceiptForm', 'forms.AcceptReceiptForm', ([], {'instance': 'reimb'}), '(instance=reimb)\n', (4201, 4217), False, 'from reimbursement import forms, models, emails\n'), ((4236, 4275), 'reimbursement.forms.RejectReceiptForm', 'forms.RejectReceiptForm', ([], {'instance': 'reimb'}), '(instance=reimb)\n', (4259, 4275), False, 'from reimbursement import forms, models, emails\n'), ((5848, 5881), 'organizers.views.hacker_tabs', 'organizer_tabs', (['self.request.user'], {}), '(self.request.user)\n', (5862, 5881), True, 'from organizers.views import hacker_tabs as organizer_tabs\n'), ((5929, 5963), 'reimbursement.models.Reimbursement.objects.all', 'models.Reimbursement.objects.all', ([], {}), '()\n', (5961, 5963), False, 'from reimbursement import forms, models, emails\n'), ((6299, 6332), 'organizers.views.hacker_tabs', 'organizer_tabs', (['self.request.user'], {}), '(self.request.user)\n', (6313, 6332), True, 'from organizers.views import hacker_tabs as organizer_tabs\n'), ((1439, 1540), 'reimbursement.forms.ReceiptSubmissionReceipt', 'forms.ReceiptSubmissionReceipt', (['request.POST', 'request.FILES'], {'instance': 'request.user.reimbursement'}), '(request.POST, request.FILES, instance=\n request.user.reimbursement)\n', (1469, 1540), False, 'from reimbursement import forms, models, emails\n'), ((1787, 1919), 'django.contrib.messages.success', 'messages.success', (['request', '"""We have now received your reimbursement. Processing will take some time, so please be patient."""'], {}), "(request,\n 'We have now received your reimbursement. Processing will take some time, so please be patient.'\n )\n", (1803, 1919), False, 'from django.contrib import messages\n'), ((2167, 2205), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'c'], {}), '(request, self.template_name, c)\n', (2173, 2205), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3112, 3190), 'django.contrib.messages.success', 'messages.success', (['self.request', '"""Changes in reimbursement successfully saved!"""'], {}), "(self.request, 'Changes in reimbursement successfully saved!')\n", (3128, 3190), False, 'from django.contrib import messages\n'), ((3226, 3298), 'django.shortcuts.render', 'render', (['request', 'self.template_name', "{'reimb': reimb, 'edit_form': form}"], {}), "(request, self.template_name, {'reimb': reimb, 'edit_form': form})\n", (3232, 3298), False, 'from django.shortcuts import render, get_object_or_404\n'), ((3338, 3394), 'app.utils.reverse', 'reverse', (['"""reimbursement_detail"""'], {'kwargs': "{'id': reimb.pk}"}), "('reimbursement_detail', kwargs={'id': reimb.pk})\n", (3345, 3394), False, 'from app.utils import reverse, hacker_tabs\n'), ((4346, 4399), 'reimbursement.forms.AcceptReceiptForm', 'forms.AcceptReceiptForm', (['request.POST'], {'instance': 'reimb'}), '(request.POST, instance=reimb)\n', (4369, 4399), False, 'from reimbursement import forms, models, emails\n'), ((5501, 5526), 'app.utils.reverse', 'reverse', (['"""receipt_review"""'], {}), "('receipt_review')\n", (5508, 5526), False, 'from app.utils import reverse, hacker_tabs\n'), ((7529, 7553), 'applications.emails.send_batch_emails', 'send_batch_emails', (['mails'], {}), '(mails)\n', (7546, 7553), False, 'from applications.emails import send_batch_emails\n'), ((7791, 7853), 'django.contrib.messages.error', 'messages.error', (['request', "('%s reimbursements not sent' % errors)"], {}), "(request, '%s reimbursements not sent' % errors)\n", (7805, 7853), False, 'from django.contrib import messages\n'), ((7893, 7922), 'app.utils.reverse', 'reverse', (['"""send_reimbursement"""'], {}), "('send_reimbursement')\n", (7900, 7922), False, 'from app.utils import reverse, hacker_tabs\n'), ((1263, 1335), 'reimbursement.forms.ReceiptSubmissionReceipt', 'forms.ReceiptSubmissionReceipt', ([], {'instance': 'self.request.user.reimbursement'}), '(instance=self.request.user.reimbursement)\n', (1293, 1335), False, 'from reimbursement import forms, models, emails\n'), ((1573, 1632), 'reimbursement.forms.ReceiptSubmissionReceipt', 'forms.ReceiptSubmissionReceipt', (['request.POST', 'request.FILES'], {}), '(request.POST, request.FILES)\n', (1603, 1632), False, 'from reimbursement import forms, models, emails\n'), ((2017, 2051), 'app.utils.reverse', 'reverse', (['"""reimbursement_dashboard"""'], {}), "('reimbursement_dashboard')\n", (2024, 2051), False, 'from app.utils import reverse, hacker_tabs\n'), ((2760, 2803), 'reimbursement.forms.EditReimbursementForm', 'forms.EditReimbursementForm', ([], {'instance': 'reimb'}), '(instance=reimb)\n', (2787, 2803), False, 'from reimbursement import forms, models, emails\n'), ((3858, 3897), 'reimbursement.forms.RejectReceiptForm', 'forms.RejectReceiptForm', ([], {'instance': 'reimb'}), '(instance=reimb)\n', (3881, 3897), False, 'from reimbursement import forms, models, emails\n'), ((3949, 3988), 'reimbursement.forms.AcceptReceiptForm', 'forms.AcceptReceiptForm', ([], {'instance': 'reimb'}), '(instance=reimb)\n', (3972, 3988), False, 'from reimbursement import forms, models, emails\n'), ((4588, 4633), 'django.contrib.messages.success', 'messages.success', (['request', '"""Receipt accepted"""'], {}), "(request, 'Receipt accepted')\n", (4604, 4633), False, 'from django.contrib import messages\n'), ((4796, 4834), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'c'], {}), '(request, self.template_name, c)\n', (4802, 4834), False, 'from django.shortcuts import render, get_object_or_404\n'), ((4907, 4960), 'reimbursement.forms.RejectReceiptForm', 'forms.RejectReceiptForm', (['request.POST'], {'instance': 'reimb'}), '(request.POST, instance=reimb)\n', (4930, 4960), False, 'from reimbursement import forms, models, emails\n'), ((6809, 6856), 'reimbursement.models.Reimbursement.objects.filter', 'models.Reimbursement.objects.filter', ([], {'pk__in': 'ids'}), '(pk__in=ids)\n', (6844, 6856), False, 'from reimbursement import forms, models, emails\n'), ((5188, 5233), 'django.contrib.messages.success', 'messages.success', (['request', '"""Receipt rejected"""'], {}), "(request, 'Receipt rejected')\n", (5204, 5233), False, 'from django.contrib import messages\n'), ((5423, 5461), 'django.shortcuts.render', 'render', (['request', 'self.template_name', 'c'], {}), '(request, self.template_name, c)\n', (5429, 5461), False, 'from django.shortcuts import render, get_object_or_404\n'), ((7195, 7244), 'reimbursement.emails.create_reimbursement_email', 'emails.create_reimbursement_email', (['reimb', 'request'], {}), '(reimb, request)\n', (7228, 7244), False, 'from reimbursement import forms, models, emails\n'), ((7343, 7395), 'reimbursement.emails.create_no_reimbursement_email', 'emails.create_no_reimbursement_email', (['reimb', 'request'], {}), '(reimb, request)\n', (7379, 7395), False, 'from reimbursement import forms, models, emails\n'), ((3707, 3774), 'reimbursement.models.Reimbursement.objects.filter', 'models.Reimbursement.objects.filter', ([], {'status': 'models.RE_PEND_APPROVAL'}), '(status=models.RE_PEND_APPROVAL)\n', (3742, 3774), False, 'from reimbursement import forms, models, emails\n'), ((6491, 6550), 'reimbursement.models.Reimbursement.objects.filter', 'models.Reimbursement.objects.filter', ([], {'status': 'models.RE_DRAFT'}), '(status=models.RE_DRAFT)\n', (6526, 6550), False, 'from reimbursement import forms, models, emails\n')]
|
import numpy
from copy import deepcopy
from barracuda.neurontype import neuron_type
import json
class _Neuron(object):
def __init__(self):
self.input = None
self.output = None
def forward(self,input_data):
raise NotImplementedError
def backward(self,output_error, learning_rate):
raise NotImplementedError
def serialize(self):
raise NotImplementedError
class InterNeuron(_Neuron):
def __init__(self,input_size:int, output_size:int):
super().__init__()
self.shape = (input_size, output_size)
self.weights:numpy.array = numpy.random.uniform(low=-1,high=1,size=(input_size, output_size))
self.bias:numpy.array = numpy.random.uniform(low=-1,high=1,size=(1, output_size))
self.neuron_type = neuron_type.InterNeuron
def forward(self, input_data):
self.input= numpy.array(input_data)
self.output:numpy.array = numpy.dot(self.input, self.weights) + self.bias
return self.output
def backward(self, output_error, learning_rate):
input_error = numpy.dot(output_error, self.weights.T)
weights_error = numpy.dot(self.input.T, output_error)
self.weights -= learning_rate * weights_error
self.bias -= learning_rate * output_error
return input_error
def serialize(self):
val = {"neuron_type":"InterNeuron","shape":self.shape,"weights":self.weights.tolist(),"bias":self.bias.tolist()}
serialized = json.dumps(val,indent=2)
return serialized
class ActivationNeuron(_Neuron):
def __init__(self,activation):
super().__init__()
self.activation = activation
self.neuron_type = neuron_type.ActivationNeuron
def forward(self, input_data):
self.input = input_data
self.output = self.activation.normal(self.input)
return self.output
def backward(self, output_error, learning_rate):
return self.activation.derivative(self.input) * output_error
def serialize(self):
val = {"neuron_type":"ActivationNeuron","activation":self.activation.func}
serialized = json.dumps(val,indent=2)
return serialized
|
[
"numpy.dot",
"numpy.random.uniform",
"numpy.array",
"json.dumps"
] |
[((609, 677), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'low': '(-1)', 'high': '(1)', 'size': '(input_size, output_size)'}), '(low=-1, high=1, size=(input_size, output_size))\n', (629, 677), False, 'import numpy\n'), ((709, 768), 'numpy.random.uniform', 'numpy.random.uniform', ([], {'low': '(-1)', 'high': '(1)', 'size': '(1, output_size)'}), '(low=-1, high=1, size=(1, output_size))\n', (729, 768), False, 'import numpy\n'), ((874, 897), 'numpy.array', 'numpy.array', (['input_data'], {}), '(input_data)\n', (885, 897), False, 'import numpy\n'), ((1083, 1122), 'numpy.dot', 'numpy.dot', (['output_error', 'self.weights.T'], {}), '(output_error, self.weights.T)\n', (1092, 1122), False, 'import numpy\n'), ((1147, 1184), 'numpy.dot', 'numpy.dot', (['self.input.T', 'output_error'], {}), '(self.input.T, output_error)\n', (1156, 1184), False, 'import numpy\n'), ((1484, 1509), 'json.dumps', 'json.dumps', (['val'], {'indent': '(2)'}), '(val, indent=2)\n', (1494, 1509), False, 'import json\n'), ((2136, 2161), 'json.dumps', 'json.dumps', (['val'], {'indent': '(2)'}), '(val, indent=2)\n', (2146, 2161), False, 'import json\n'), ((932, 967), 'numpy.dot', 'numpy.dot', (['self.input', 'self.weights'], {}), '(self.input, self.weights)\n', (941, 967), False, 'import numpy\n')]
|
#!/usr/bin/env python
# encoding: utf-8
# <NAME>, 2005-2008 (ita)
"""
The class task_gen encapsulates the creation of task objects (low-level code)
The instances can have various parameters, but the creation of task nodes (Task.py)
is delayed. To achieve this, various methods are called from the method "apply"
The class task_gen contains lots of methods, and a configuration table:
* the methods to call (self.meths) can be specified dynamically (removing, adding, ..)
* the order of the methods (self.prec or by default task_gen.prec) is configurable
* new methods can be inserted dynamically without pasting old code
Additionally, task_gen provides the method apply_core
* file extensions are mapped to methods: def meth(self, name_or_node)
* if a mapping is not found in self.mappings, it is searched in task_gen.mappings
* when called, the functions may modify self.allnodes to re-add source to process
* the mappings can map an extension or a filename (see the code below)
WARNING: subclasses must reimplement the clone method
"""
import os, traceback, copy
import Build, Task, Utils, Logs, Options
from Logs import debug, error, warn
from Constants import *
typos = {
'sources':'source',
'targets':'target',
'include':'includes',
'define':'defines',
'importpath':'importpaths',
'install_var':'install_path',
'install_subdir':'install_path',
'inst_var':'install_path',
'inst_dir':'install_path',
'feature':'features',
}
class register_obj(type):
"""no decorators for classes, so we use a metaclass
we store into task_gen.classes the classes that inherit task_gen
and whose names end in '_taskgen'
"""
def __init__(cls, name, bases, dict):
super(register_obj, cls).__init__(name, bases, dict)
name = cls.__name__
suffix = '_taskgen'
if name.endswith(suffix):
task_gen.classes[name.replace(suffix, '')] = cls
class task_gen(object):
"""
Most methods are of the form 'def meth(self):' without any parameters
there are many of them, and they do many different things:
* task creation
* task results installation
* environment modification
* attribute addition/removal
The inheritance approach is complicated
* mixing several languages at once
* subclassing is needed even for small changes
* inserting new methods is complicated
This new class uses a configuration table:
* adding new methods easily
* obtaining the order in which to call the methods
* postponing the method calls (post() -> apply)
Additionally, a 'traits' static attribute is provided:
* this list contains methods
* the methods can remove or add methods from self.meths
Example1: the attribute 'staticlib' is set on an instance
a method set in the list of traits is executed when the
instance is posted, it finds that flag and adds another method for execution
Example2: a method set in the list of traits finds the msvc
compiler (from self.env['MSVC']==1); more methods are added to self.meths
"""
__metaclass__ = register_obj
mappings = {}
mapped = {}
prec = Utils.DefaultDict(list)
traits = Utils.DefaultDict(set)
classes = {}
def __init__(self, *kw, **kwargs):
self.prec = Utils.DefaultDict(list)
"map precedence of function names to call"
# so we will have to play with directed acyclic graphs
# detect cycles, etc
self.source = ''
self.target = ''
# list of methods to execute - does not touch it by hand unless you know
self.meths = []
# list of mappings extension -> function
self.mappings = {}
# list of features (see the documentation on traits)
self.features = list(kw)
# not always a good idea
self.tasks = []
self.default_chmod = O644
self.default_install_path = None
# kind of private, beware of what you put in it, also, the contents are consumed
self.allnodes = []
self.bld = kwargs.get('bld', Build.bld)
self.env = self.bld.env.copy()
self.path = self.bld.path # emulate chdir when reading scripts
self.name = '' # give a name to the target (static+shlib with the same targetname ambiguity)
# provide a unique id
self.idx = self.bld.idx[self.path.id] = self.bld.idx.get(self.path.id, 0) + 1
for key, val in kwargs.iteritems():
setattr(self, key, val)
self.bld.task_manager.add_task_gen(self)
self.bld.all_task_gen.append(self)
def __str__(self):
return ("<task_gen '%s' of type %s defined in %s>"
% (self.name or self.target, self.__class__.__name__, str(self.path)))
def __setattr__(self, name, attr):
real = typos.get(name, name)
if real != name:
warn('typo %s -> %s' % (name, real))
if Logs.verbose > 0:
traceback.print_stack()
object.__setattr__(self, real, attr)
def to_list(self, value):
"helper: returns a list"
if isinstance(value, str): return value.split()
else: return value
def apply(self):
"order the methods to execute using self.prec or task_gen.prec"
keys = set(self.meths)
# add the methods listed in the features
self.features = Utils.to_list(self.features)
for x in self.features + ['*']:
st = task_gen.traits[x]
if not st:
warn('feature %r does not exist - bind at least one method to it' % x)
keys.update(st)
# copy the precedence table
prec = {}
prec_tbl = self.prec or task_gen.prec
for x in prec_tbl:
if x in keys:
prec[x] = prec_tbl[x]
# elements disconnected
tmp = []
for a in keys:
for x in prec.values():
if a in x: break
else:
tmp.append(a)
# topological sort
out = []
while tmp:
e = tmp.pop()
if e in keys: out.append(e)
try:
nlst = prec[e]
except KeyError:
pass
else:
del prec[e]
for x in nlst:
for y in prec:
if x in prec[y]:
break
else:
tmp.append(x)
if prec: raise Utils.WafError("graph has a cycle %s" % str(prec))
out.reverse()
self.meths = out
# then we run the methods in order
debug('task_gen: posting %s %d' % (self, id(self)))
for x in out:
try:
v = getattr(self, x)
except AttributeError:
raise Utils.WafError("tried to retrieve %s which is not a valid method" % x)
debug('task_gen: -> %s (%d)' % (x, id(self)))
v()
def post(self):
"runs the code to create the tasks, do not subclass"
if not self.name:
if isinstance(self.target, list):
self.name = ' '.join(self.target)
else:
self.name = self.target
if getattr(self, 'posted', None):
#error("OBJECT ALREADY POSTED" + str( self))
return
self.apply()
debug('task_gen: posted %s' % self.name)
self.posted = True
def get_hook(self, ext):
try: return self.mappings[ext]
except KeyError:
try: return task_gen.mappings[ext]
except KeyError: return None
# TODO waf 1.6: always set the environment
# TODO waf 1.6: create_task(self, name, inputs, outputs)
def create_task(self, name, env=None):
env = env or self.env
task = Task.TaskBase.classes[name](env.copy(), generator=self)
self.tasks.append(task)
return task
def name_to_obj(self, name):
return self.bld.name_to_obj(name, self.env)
def find_sources_in_dirs(self, dirnames, excludes=[], exts=[]):
"""
The attributes "excludes" and "exts" must be lists to avoid the confusion
find_sources_in_dirs('a', 'b', 'c') <-> find_sources_in_dirs('a b c')
do not use absolute paths
do not use paths outside of the source tree
the files or folder beginning by . are not returned
# TODO: remove in Waf 1.6
"""
err_msg = "'%s' attribute must be a list"
if not isinstance(excludes, list):
raise Utils.WscriptError(err_msg % 'excludes')
if not isinstance(exts, list):
raise Utils.WscriptError(err_msg % 'exts')
lst = []
#make sure dirnames is a list helps with dirnames with spaces
dirnames = self.to_list(dirnames)
ext_lst = exts or self.mappings.keys() + task_gen.mappings.keys()
for name in dirnames:
anode = self.path.find_dir(name)
if not anode or not anode.is_child_of(self.bld.srcnode):
raise Utils.WscriptError("Unable to use '%s' - either because it's not a relative path" \
", or it's not child of '%s'." % (name, self.bld.srcnode))
self.bld.rescan(anode)
for name in self.bld.cache_dir_contents[anode.id]:
# ignore hidden files
if name.startswith('.'):
continue
(base, ext) = os.path.splitext(name)
if ext in ext_lst and not name in lst and not name in excludes:
lst.append((anode.relpath_gen(self.path) or '.') + os.path.sep + name)
lst.sort()
self.source = self.to_list(self.source)
if not self.source: self.source = lst
else: self.source += lst
def clone(self, env):
""
newobj = task_gen(bld=self.bld)
for x in self.__dict__:
if x in ['env', 'bld']:
continue
elif x in ["path", "features"]:
setattr(newobj, x, getattr(self, x))
else:
setattr(newobj, x, copy.copy(getattr(self, x)))
newobj.__class__ = self.__class__
if isinstance(env, str):
newobj.env = self.bld.all_envs[env].copy()
else:
newobj.env = env.copy()
return newobj
def get_inst_path(self):
return getattr(self, '_install_path', getattr(self, 'default_install_path', ''))
def set_inst_path(self, val):
self._install_path = val
install_path = property(get_inst_path, set_inst_path)
def get_chmod(self):
return getattr(self, '_chmod', getattr(self, 'default_chmod', O644))
def set_chmod(self, val):
self._chmod = val
chmod = property(get_chmod, set_chmod)
def declare_extension(var, func):
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WscriptError('declare_extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
def declare_order(*k):
assert(len(k) > 1)
n = len(k) - 1
for i in xrange(n):
f1 = k[i]
f2 = k[i+1]
if not f1 in task_gen.prec[f2]:
task_gen.prec[f2].append(f1)
def declare_chain(name='', action='', ext_in='', ext_out='', reentrant=1, color='BLUE',
install=0, before=[], after=[], decider=None, rule=None, scan=None):
"""
see Tools/flex.py for an example
while i do not like such wrappers, some people really do
"""
action = action or rule
if isinstance(action, str):
act = Task.simple_task_type(name, action, color=color)
else:
act = Task.task_type_from_func(name, action, color=color)
act.ext_in = tuple(Utils.to_list(ext_in))
act.ext_out = tuple(Utils.to_list(ext_out))
act.before = Utils.to_list(before)
act.after = Utils.to_list(after)
act.scan = scan
def x_file(self, node):
if decider:
ext = decider(self, node)
elif isinstance(ext_out, str):
ext = ext_out
if isinstance(ext, str):
out_source = node.change_ext(ext)
if reentrant:
self.allnodes.append(out_source)
elif isinstance(ext, list):
out_source = [node.change_ext(x) for x in ext]
if reentrant:
for i in xrange(reentrant):
self.allnodes.append(out_source[i])
else:
# XXX: useless: it will fail on Utils.to_list above...
raise Utils.WafError("do not know how to process %s" % str(ext))
tsk = self.create_task(name)
tsk.set_inputs(node)
tsk.set_outputs(out_source)
if node.__class__.bld.is_install == INSTALL:
tsk.install = install
declare_extension(act.ext_in, x_file)
def bind_feature(name, methods):
lst = Utils.to_list(methods)
task_gen.traits[name].update(lst)
"""
All the following decorators are registration decorators, i.e add an attribute to current class
(task_gen and its derivatives), with same name as func, which points to func itself.
For example:
@taskgen
def sayHi(self):
print("hi")
Now taskgen.sayHi() may be called
If python were really smart, it could infer itself the order of methods by looking at the
attributes. A prerequisite for execution is to have the attribute set before.
Intelligent compilers binding aspect-oriented programming and parallelization, what a nice topic for studies.
"""
def taskgen(func):
setattr(task_gen, func.__name__, func)
def feature(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for name in k:
task_gen.traits[name].update([func.__name__])
return func
return deco
def before(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not func.__name__ in task_gen.prec[fun_name]:
task_gen.prec[fun_name].append(func.__name__)
return func
return deco
def after(*k):
def deco(func):
setattr(task_gen, func.__name__, func)
for fun_name in k:
if not fun_name in task_gen.prec[func.__name__]:
task_gen.prec[func.__name__].append(fun_name)
return func
return deco
def extension(var):
def deco(func):
setattr(task_gen, func.__name__, func)
try:
for x in Utils.to_list(var):
task_gen.mappings[x] = func
except:
raise Utils.WafError('extension takes either a list or a string %r' % var)
task_gen.mapped[func.__name__] = func
return func
return deco
# TODO make certain the decorators may be used here
def apply_core(self):
"""Process the attribute source
transform the names into file nodes
try to process the files by name first, later by extension"""
# get the list of folders to use by the scanners
# all our objects share the same include paths anyway
find_resource = self.path.find_resource
for filename in self.to_list(self.source):
# if self.mappings or task_gen.mappings contains a file of the same name
x = self.get_hook(filename)
if x:
x(self, filename)
else:
node = find_resource(filename)
if not node: raise Utils.WafError("source not found: '%s' in '%s'" % (filename, str(self.path)))
self.allnodes.append(node)
for node in self.allnodes:
# self.mappings or task_gen.mappings map the file extension to a function
x = self.get_hook(node.suffix())
if not x:
raise Utils.WafError("Cannot guess how to process %s (got mappings %r in %r) -> try conf.check_tool(..)?" % \
(str(node), self.__class__.mappings.keys(), self.__class__))
x(self, node)
feature('*')(apply_core)
def exec_rule(self):
"""Process the attribute rule, when provided the method apply_core will be disabled
"""
if not getattr(self, 'rule', None):
return
# someone may have removed it already
try:
self.meths.remove('apply_core')
except ValueError:
pass
# get the function and the variables
func = self.rule
vars2 = []
if isinstance(func, str):
# use the shell by default for user-defined commands
(func, vars2) = Task.compile_fun('', self.rule, shell=getattr(self, 'shell', True))
func.code = self.rule
vars = getattr(self, 'vars', vars2)
if not vars:
if isinstance(self.rule, str):
vars = self.rule
else:
vars = Utils.h_fun(self.rule)
# create the task class
name = getattr(self, 'name', None) or self.target or self.rule
cls = Task.task_type_from_func(name, func, vars)
# now create one instance
tsk = self.create_task(name)
# we assume that the user knows that without inputs or outputs
#if not getattr(self, 'target', None) and not getattr(self, 'source', None):
# cls.quiet = True
if getattr(self, 'target', None):
cls.quiet = True
tsk.outputs=[self.path.find_or_declare(x) for x in self.to_list(self.target)]
if getattr(self, 'source', None):
cls.quiet = True
tsk.inputs = []
for x in self.to_list(self.source):
y = self.path.find_resource(x)
if not y:
raise Utils.WafError('input file %r could not be found (%r)' % (x, self.path.abspath()))
tsk.inputs.append(y)
if getattr(self, 'always', None):
Task.always_run(cls)
if getattr(self, 'scan', None):
cls.scan = self.scan
if getattr(self, 'install_path', None):
tsk.install_path = self.install_path
if getattr(self, 'cwd', None):
tsk.cwd = self.cwd
if getattr(self, 'on_results', None):
Task.update_outputs(cls)
for x in ['after', 'before']:
setattr(cls, x, getattr(self, x, []))
feature('*')(exec_rule)
before('apply_core')(exec_rule)
def sequence_order(self):
"""
add a strict sequential constraint between the tasks generated by task generators
it uses the fact that task generators are posted in order
it will not post objects which belong to other folders
there is also an awesome trick for executing the method in last position
to use:
bld.new_task_gen(features='javac seq')
bld.new_task_gen(features='jar seq')
to start a new sequence, set the attribute seq_start, for example:
obj.seq_start = True
"""
if self.meths and self.meths[-1] != 'sequence_order':
self.meths.append('sequence_order')
return
if getattr(self, 'seq_start', None):
return
# all the tasks previously declared must be run before these
if getattr(self.bld, 'prev', None):
self.bld.prev.post()
for x in self.bld.prev.tasks:
for y in self.tasks:
y.set_run_after(x)
self.bld.prev = self
feature('seq')(sequence_order)
|
[
"Task.task_type_from_func",
"Utils.to_list",
"Logs.warn",
"Utils.WafError",
"Logs.debug",
"Utils.DefaultDict",
"os.path.splitext",
"traceback.print_stack",
"Utils.h_fun",
"Task.update_outputs",
"Task.simple_task_type",
"Task.always_run",
"Utils.WscriptError"
] |
[((2989, 3012), 'Utils.DefaultDict', 'Utils.DefaultDict', (['list'], {}), '(list)\n', (3006, 3012), False, 'import Build, Task, Utils, Logs, Options\n'), ((3023, 3045), 'Utils.DefaultDict', 'Utils.DefaultDict', (['set'], {}), '(set)\n', (3040, 3045), False, 'import Build, Task, Utils, Logs, Options\n'), ((10260, 10281), 'Utils.to_list', 'Utils.to_list', (['before'], {}), '(before)\n', (10273, 10281), False, 'import Build, Task, Utils, Logs, Options\n'), ((10295, 10315), 'Utils.to_list', 'Utils.to_list', (['after'], {}), '(after)\n', (10308, 10315), False, 'import Build, Task, Utils, Logs, Options\n'), ((11114, 11136), 'Utils.to_list', 'Utils.to_list', (['methods'], {}), '(methods)\n', (11127, 11136), False, 'import Build, Task, Utils, Logs, Options\n'), ((14569, 14611), 'Task.task_type_from_func', 'Task.task_type_from_func', (['name', 'func', 'vars'], {}), '(name, func, vars)\n', (14593, 14611), False, 'import Build, Task, Utils, Logs, Options\n'), ((3111, 3134), 'Utils.DefaultDict', 'Utils.DefaultDict', (['list'], {}), '(list)\n', (3128, 3134), False, 'import Build, Task, Utils, Logs, Options\n'), ((4909, 4937), 'Utils.to_list', 'Utils.to_list', (['self.features'], {}), '(self.features)\n', (4922, 4937), False, 'import Build, Task, Utils, Logs, Options\n'), ((6390, 6430), 'Logs.debug', 'debug', (["('task_gen: posted %s' % self.name)"], {}), "('task_gen: posted %s' % self.name)\n", (6395, 6430), False, 'from Logs import debug, error, warn\n'), ((9357, 9375), 'Utils.to_list', 'Utils.to_list', (['var'], {}), '(var)\n', (9370, 9375), False, 'import Build, Task, Utils, Logs, Options\n'), ((10042, 10090), 'Task.simple_task_type', 'Task.simple_task_type', (['name', 'action'], {'color': 'color'}), '(name, action, color=color)\n', (10063, 10090), False, 'import Build, Task, Utils, Logs, Options\n'), ((10106, 10157), 'Task.task_type_from_func', 'Task.task_type_from_func', (['name', 'action'], {'color': 'color'}), '(name, action, color=color)\n', (10130, 10157), False, 'import Build, Task, Utils, Logs, Options\n'), ((10178, 10199), 'Utils.to_list', 'Utils.to_list', (['ext_in'], {}), '(ext_in)\n', (10191, 10199), False, 'import Build, Task, Utils, Logs, Options\n'), ((10222, 10244), 'Utils.to_list', 'Utils.to_list', (['ext_out'], {}), '(ext_out)\n', (10235, 10244), False, 'import Build, Task, Utils, Logs, Options\n'), ((15281, 15301), 'Task.always_run', 'Task.always_run', (['cls'], {}), '(cls)\n', (15296, 15301), False, 'import Build, Task, Utils, Logs, Options\n'), ((15536, 15560), 'Task.update_outputs', 'Task.update_outputs', (['cls'], {}), '(cls)\n', (15555, 15560), False, 'import Build, Task, Utils, Logs, Options\n'), ((4483, 4519), 'Logs.warn', 'warn', (["('typo %s -> %s' % (name, real))"], {}), "('typo %s -> %s' % (name, real))\n", (4487, 4519), False, 'from Logs import debug, error, warn\n'), ((7425, 7465), 'Utils.WscriptError', 'Utils.WscriptError', (["(err_msg % 'excludes')"], {}), "(err_msg % 'excludes')\n", (7443, 7465), False, 'import Build, Task, Utils, Logs, Options\n'), ((7508, 7544), 'Utils.WscriptError', 'Utils.WscriptError', (["(err_msg % 'exts')"], {}), "(err_msg % 'exts')\n", (7526, 7544), False, 'import Build, Task, Utils, Logs, Options\n'), ((9425, 9510), 'Utils.WscriptError', 'Utils.WscriptError', (["('declare_extension takes either a list or a string %r' % var)"], {}), "('declare_extension takes either a list or a string %r' % var\n )\n", (9443, 9510), False, 'import Build, Task, Utils, Logs, Options\n'), ((12516, 12534), 'Utils.to_list', 'Utils.to_list', (['var'], {}), '(var)\n', (12529, 12534), False, 'import Build, Task, Utils, Logs, Options\n'), ((14449, 14471), 'Utils.h_fun', 'Utils.h_fun', (['self.rule'], {}), '(self.rule)\n', (14460, 14471), False, 'import Build, Task, Utils, Logs, Options\n'), ((4548, 4571), 'traceback.print_stack', 'traceback.print_stack', ([], {}), '()\n', (4569, 4571), False, 'import os, traceback, copy\n'), ((5017, 5087), 'Logs.warn', 'warn', (["('feature %r does not exist - bind at least one method to it' % x)"], {}), "('feature %r does not exist - bind at least one method to it' % x)\n", (5021, 5087), False, 'from Logs import debug, error, warn\n'), ((7859, 8006), 'Utils.WscriptError', 'Utils.WscriptError', (['("Unable to use \'%s\' - either because it\'s not a relative path, or it\'s not child of \'%s\'."\n % (name, self.bld.srcnode))'], {}), '(\n "Unable to use \'%s\' - either because it\'s not a relative path, or it\'s not child of \'%s\'."\n % (name, self.bld.srcnode))\n', (7877, 8006), False, 'import Build, Task, Utils, Logs, Options\n'), ((8178, 8200), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (8194, 8200), False, 'import os, traceback, copy\n'), ((12587, 12655), 'Utils.WafError', 'Utils.WafError', (["('extension takes either a list or a string %r' % var)"], {}), "('extension takes either a list or a string %r' % var)\n", (12601, 12655), False, 'import Build, Task, Utils, Logs, Options\n'), ((5946, 6016), 'Utils.WafError', 'Utils.WafError', (["('tried to retrieve %s which is not a valid method' % x)"], {}), "('tried to retrieve %s which is not a valid method' % x)\n", (5960, 6016), False, 'import Build, Task, Utils, Logs, Options\n')]
|
#!/usr/bin/env python3
"""The setup script."""
from setuptools import setup, find_packages
try: # pip version >= 10.0
from pip._internal.req import parse_requirements
from pip._internal.download import PipSession
except ImportError: # pip version < 10.0
from pip.req import parse_requirements
from pip.download import PipSession
with open('README.rst') as readme_file:
readme = readme_file.read()
# get the requirements from requirements.txt
install_reqs = parse_requirements('requirements.txt', session=PipSession())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name='neo-python',
python_requires='>=3.6',
version='0.8.1',
description="Python Node and SDK for the NEO blockchain",
long_description=readme,
author="<NAME>",
author_email='<EMAIL>',
maintainer="<NAME>",
maintainer_email='<EMAIL>',
url='https://github.com/CityOfZion/neo-python',
packages=find_packages(include=['neo']),
entry_points = {
'console_scripts': [
'np-prompt=neo.bin.prompt:main',
'np-api-server=neo.bin.api_server:main',
'np-bootstrap=neo.bin.bootstrap:main',
'np-reencrypt-wallet=neo.bin.reencrypt_wallet:main',
'np-sign=neo.bin.sign_message:main',
'np-export=neo.bin.export_blocks:main',
'np-import=neo.bin.import_blocks:main',
],
},
include_package_data=True,
install_requires=reqs,
license="MIT license",
zip_safe=False,
keywords='neo, python, node',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
[
"pip.download.PipSession",
"setuptools.find_packages"
] |
[((530, 542), 'pip.download.PipSession', 'PipSession', ([], {}), '()\n', (540, 542), False, 'from pip.download import PipSession\n'), ((931, 961), 'setuptools.find_packages', 'find_packages', ([], {'include': "['neo']"}), "(include=['neo'])\n", (944, 961), False, 'from setuptools import setup, find_packages\n')]
|
# 🚨 Don't change the code below 👇
age = input("What is your current age?")
# 🚨 Don't change the code above 👆
expected_remaining_years = 90 - int(age)
year_days, year_weeks, year_months = [
conv * expected_remaining_years for conv in [365, 52, 12]]
print(f"You have {year_days} days, {year_weeks} weeks, "
f"and {year_months} months left.")
#First *fork* your copy. Then copy-paste your code below this line 👇
#Finally click "Run" to execute the tests
#Write your code above this line 👆
# 🚨 Do NOT modify the code below this line 👇
with open('testing_copy.py', 'w') as file:
file.write('def test_func():\n')
with open('day-2-3.py', 'r') as original:
f2 = original.readlines()[0:40]
for x in f2:
file.write(" " + x)
import testing_copy
import unittest
from unittest.mock import patch
from io import StringIO
import os
class MyTest(unittest.TestCase):
def run_test(self, given_answer, expected_print):
with patch('builtins.input', return_value=given_answer), patch('sys.stdout', new=StringIO()) as fake_out:
testing_copy.test_func()
self.assertEqual(fake_out.getvalue(), expected_print)
def test_1(self):
self.run_test(given_answer='53', expected_print='You have 13505 days, 1924 weeks, and 444 months left.\n')
def test_2(self):
self.run_test(given_answer='12', expected_print="You have 28470 days, 4056 weeks, and 936 months left.\n")
def test_3(self):
self.run_test(given_answer='90', expected_print='You have 0 days, 0 weeks, and 0 months left.\n')
print("\n\n\n.\n.\n.")
print('Checking what your code prints for several different ages.\nFor an age of 56 it should read this *exactly*:\n')
print('You have 12410 days, 1768 weeks, and 408 months left.')
print('\nRunning some tests on your code:')
print(".\n.\n.")
unittest.main(verbosity=1, exit=False)
os.remove("testing_copy.py")
|
[
"unittest.main",
"os.remove",
"io.StringIO",
"unittest.mock.patch",
"testing_copy.test_func"
] |
[((1883, 1921), 'unittest.main', 'unittest.main', ([], {'verbosity': '(1)', 'exit': '(False)'}), '(verbosity=1, exit=False)\n', (1896, 1921), False, 'import unittest\n'), ((1923, 1951), 'os.remove', 'os.remove', (['"""testing_copy.py"""'], {}), "('testing_copy.py')\n", (1932, 1951), False, 'import os\n'), ((1006, 1056), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {'return_value': 'given_answer'}), "('builtins.input', return_value=given_answer)\n", (1011, 1056), False, 'from unittest.mock import patch\n'), ((1119, 1143), 'testing_copy.test_func', 'testing_copy.test_func', ([], {}), '()\n', (1141, 1143), False, 'import testing_copy\n'), ((1082, 1092), 'io.StringIO', 'StringIO', ([], {}), '()\n', (1090, 1092), False, 'from io import StringIO\n')]
|
"""
This module hears to estimation messages and response back to them by using
predictive machine/deep learning
"""
from jira_client import JiraClient
from settings import config
import random
class EstimationMessage:
def __init__(self, bot):
self.bot = bot
self.jira_client = JiraClient()
self.story_points = [1, 2, 3, 5, 8]
self.words = [':estima', 'estima']
self.offenses = ['malparido', 'io pushi', 'cabron', 'patron']
def listenTo(self, channel, message, event):
return any([word in message.lower() for word in self.words])
def getEstimation(self, jira_issue):
return random.choice(self.story_points)
def getOffenses(self):
return random.choice(self.offenses) if config.get("options", "offensive") == True else ""
def reactTo(self, channel, message, event):
jira_issue_id = self.jira_client.getIssueID(message)
jira_issue = self.jira_client.getIssue(jira_issue_id)
if jira_issue_id is None or jira_issue is None:
return self.bot.sendMessage(channel, 'No jira issue found {0}'.format(self.getOffenses()))
estimation_points = self.getEstimation(jira_issue)
response_msg = '{0} story points for {1} {2} ({3})!'.format(str(
estimation_points), jira_issue_id, self.getOffenses(), jira_issue.fields.summary)
self.bot.sendMessage(channel, response_msg)
|
[
"random.choice",
"jira_client.JiraClient",
"settings.config.get"
] |
[((301, 313), 'jira_client.JiraClient', 'JiraClient', ([], {}), '()\n', (311, 313), False, 'from jira_client import JiraClient\n'), ((647, 679), 'random.choice', 'random.choice', (['self.story_points'], {}), '(self.story_points)\n', (660, 679), False, 'import random\n'), ((723, 751), 'random.choice', 'random.choice', (['self.offenses'], {}), '(self.offenses)\n', (736, 751), False, 'import random\n'), ((755, 789), 'settings.config.get', 'config.get', (['"""options"""', '"""offensive"""'], {}), "('options', 'offensive')\n", (765, 789), False, 'from settings import config\n')]
|
from datetime import date
import pytest
from envinorma.models import Regime
from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range
from back_office.components.condition_form import _AND_ID
from back_office.components.condition_form.helpers import (
CONDITION_VARIABLES,
ConditionFormValues,
FormHandlingError,
_assert_strictly_below,
_build_parameter_value,
_check_compatibility_and_build_range,
_extract_parameter_to_conditions,
_NotSimplifiableError,
_simplify_condition,
_simplify_mono_conditions,
_try_building_range_condition,
build_condition,
)
def test_simplify_condition():
_date = ParameterEnum.DATE_AUTORISATION.value
_regime = ParameterEnum.REGIME.value
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
with pytest.raises(FormHandlingError):
_simplify_condition(AndCondition(frozenset()))
with pytest.raises(FormHandlingError):
_simplify_condition(OrCondition(frozenset()))
cond = Greater(_date, d1, False)
cond_2 = Greater(_date, d2, False)
assert _simplify_condition(OrCondition(frozenset((cond,)))) == cond
assert _simplify_condition(AndCondition(frozenset((cond,)))) == cond
and_cond = AndCondition(frozenset((cond, cond_2)))
with pytest.raises(FormHandlingError):
_simplify_condition(and_cond)
cond_1 = Greater(_date, d1)
cond_2 = Littler(_date, d1)
and_cond = AndCondition(frozenset((cond_1, cond_2)))
with pytest.raises(FormHandlingError):
_simplify_condition(and_cond)
cond_1 = Littler(_date, d1)
cond_2 = Littler(_date, d2)
and_cond = AndCondition(frozenset((cond_1, cond_2)))
with pytest.raises(FormHandlingError):
_simplify_condition(and_cond)
cond_1 = Littler(_date, d1)
cond_2 = Equal(_date, d1)
and_cond = AndCondition(frozenset((cond_1, cond_2)))
with pytest.raises(FormHandlingError):
_simplify_condition(and_cond)
cond_1 = Littler(_date, d2)
cond_2 = Greater(_date, d1)
and_cond = AndCondition(frozenset((cond_1, cond_2)))
res = _simplify_condition(and_cond)
assert res == Range(_date, d1, d2)
and_cond = AndCondition(frozenset((Littler(_date, d2), Greater(_date, d1), Equal(_regime, 'A'))))
res = _simplify_condition(and_cond)
assert res == AndCondition(frozenset((Range(_date, d1, d2), Equal(_regime, 'A'))))
def test_check_compatibility_and_build_range_try():
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
_date = ParameterEnum.DATE_AUTORISATION.value
cond_1 = Littler(_date, d2)
cond_2 = Greater(_date, d1)
assert isinstance(_check_compatibility_and_build_range(_date, cond_1, cond_2), Range)
_date = ParameterEnum.DATE_AUTORISATION.value
cond_1 = Littler(_date, d2)
cond_2 = Greater(_date, d2)
with pytest.raises(FormHandlingError):
_check_compatibility_and_build_range(_date, cond_1, cond_2)
def test_building_range_condition():
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
date_ = ParameterEnum.DATE_AUTORISATION.value
quantity = ParameterEnum.RUBRIQUE_QUANTITY.value
reg = ParameterEnum.REGIME.value
assert _try_building_range_condition(frozenset()) is None
with pytest.raises(ValueError):
_try_building_range_condition(frozenset((AndCondition(frozenset()),)))
with pytest.raises(ValueError):
_try_building_range_condition(frozenset((OrCondition(frozenset()),)))
assert _try_building_range_condition(frozenset([Greater(date_, d1, False)])) == Greater(date_, d1, False)
res = _try_building_range_condition(frozenset([Equal(reg, 'A'), Greater(date_, d2)]))
assert res == AndCondition(frozenset([Equal(reg, 'A'), Greater(date_, d2)]))
with pytest.raises(FormHandlingError):
_try_building_range_condition(frozenset([Littler(date_, d2), Greater(date_, d2)]))
res = _try_building_range_condition(frozenset([Littler(date_, d2), Greater(date_, d1)]))
assert res == Range(date_, d1, d2)
res = _try_building_range_condition(
frozenset([Littler(date_, d2), Greater(date_, d1), Equal(reg, 'E'), Equal(quantity, 10)])
)
assert res == AndCondition(frozenset([Range(date_, d1, d2), Equal(reg, 'E'), Equal(quantity, 10)]))
res = _try_building_range_condition(frozenset([Littler(quantity, 20), Greater(quantity, 10), Equal(reg, 'D')]))
assert res == AndCondition(frozenset([Range(quantity, 10, 20), Equal(reg, 'D')]))
def test_simplify_mono_conditions():
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
d3 = date(2030, 1, 1)
date_ = ParameterEnum.DATE_AUTORISATION.value
quantity = ParameterEnum.RUBRIQUE_QUANTITY.value
reg = ParameterEnum.REGIME.value
with pytest.raises(_NotSimplifiableError):
_simplify_mono_conditions(date_, [])
with pytest.raises(_NotSimplifiableError):
_simplify_mono_conditions(date_, [Equal(date_, d1), Equal(date_, d2), Equal(date_, d3)])
res = _simplify_mono_conditions(quantity, [Littler(quantity, 100), Greater(quantity, 10)])
assert res == Range(quantity, 10, 100)
with pytest.raises(FormHandlingError):
_simplify_mono_conditions(reg, [Littler(quantity, 10), Greater(quantity, 100)])
assert _simplify_mono_conditions(date_, [Littler(date_, d1)]) == Littler(date_, d1)
assert _simplify_mono_conditions(date_, [Greater(date_, d1)]) == Greater(date_, d1)
assert _simplify_mono_conditions(date_, [Equal(date_, d1)]) == Equal(date_, d1)
def test_assert_strictly_below():
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
with pytest.raises(FormHandlingError):
_assert_strictly_below(1, 1)
with pytest.raises(FormHandlingError):
_assert_strictly_below(2, 1)
assert _assert_strictly_below(1, 2) is None
with pytest.raises(FormHandlingError):
_assert_strictly_below(d1, d1)
with pytest.raises(FormHandlingError):
_assert_strictly_below(d2, d1)
assert _assert_strictly_below(d1, d2) is None
def test_build_parameter_value():
for param in CONDITION_VARIABLES.values():
try:
_build_parameter_value(param.value.type, '')
except Exception as exc:
if 'Ce type de paramètre' in str(exc):
raise exc
def test_extract_parameter_to_conditions():
d1 = date(2010, 1, 1)
d2 = date(2020, 1, 1)
_date = ParameterEnum.DATE_AUTORISATION.value
_regime = ParameterEnum.REGIME.value
res = _extract_parameter_to_conditions([Littler(_date, d2), Greater(_date, d1), Equal(_regime, 'A')])
assert res == {_date: [Littler(_date, d2), Greater(_date, d1)], _regime: [Equal(_regime, 'A')]}
res = _extract_parameter_to_conditions([Littler(_date, d2), Greater(_date, d1)])
assert res == {_date: [Littler(_date, d2), Greater(_date, d1)]}
res = _extract_parameter_to_conditions([Greater(_date, d1), Equal(_regime, 'A')])
assert res == {_date: [Greater(_date, d1)], _regime: [Equal(_regime, 'A')]}
res = _extract_parameter_to_conditions([Greater(_date, d1)])
assert res == {_date: [Greater(_date, d1)]}
res = _extract_parameter_to_conditions([])
assert res == {}
def test_build_condition():
with pytest.raises(FormHandlingError):
assert build_condition(ConditionFormValues([], [], [], _AND_ID))
res = Equal(ParameterEnum.DATE_DECLARATION.value, date(2020, 1, 1))
assert build_condition(ConditionFormValues(['Date de déclaration'], ['='], ['01/01/2020'], _AND_ID)) == res
res = Range(ParameterEnum.DATE_DECLARATION.value, date(2020, 1, 1), date(2020, 1, 31))
form_values = ConditionFormValues(['Date de déclaration'] * 2, ['>=', '<'], ['01/01/2020', '31/01/2020'], _AND_ID)
assert build_condition(form_values) == res
cd_1 = Equal(ParameterEnum.DATE_DECLARATION.value, date(2020, 1, 1))
cd_2 = Equal(ParameterEnum.REGIME.value, Regime.A)
res = AndCondition(frozenset([cd_1, cd_2]))
form_values = ConditionFormValues(['Date de déclaration', 'Régime'], ['=', '='], ['01/01/2020', 'A'], _AND_ID)
assert build_condition(form_values) == res
|
[
"envinorma.parametrization.Equal",
"back_office.components.condition_form.helpers.build_condition",
"envinorma.parametrization.Greater",
"back_office.components.condition_form.helpers._assert_strictly_below",
"datetime.date",
"back_office.components.condition_form.helpers.CONDITION_VARIABLES.values",
"envinorma.parametrization.Range",
"back_office.components.condition_form.helpers._simplify_mono_conditions",
"pytest.raises",
"back_office.components.condition_form.helpers._check_compatibility_and_build_range",
"back_office.components.condition_form.helpers._build_parameter_value",
"back_office.components.condition_form.helpers._simplify_condition",
"envinorma.parametrization.Littler",
"back_office.components.condition_form.helpers._extract_parameter_to_conditions",
"back_office.components.condition_form.helpers.ConditionFormValues"
] |
[((790, 806), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (794, 806), False, 'from datetime import date\n'), ((816, 832), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (820, 832), False, 'from datetime import date\n'), ((1039, 1064), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1', '(False)'], {}), '(_date, d1, False)\n', (1046, 1064), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1078, 1103), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd2', '(False)'], {}), '(_date, d2, False)\n', (1085, 1103), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1399, 1417), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (1406, 1417), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1431, 1449), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd1'], {}), '(_date, d1)\n', (1438, 1449), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1602, 1620), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd1'], {}), '(_date, d1)\n', (1609, 1620), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1634, 1652), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (1641, 1652), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1805, 1823), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd1'], {}), '(_date, d1)\n', (1812, 1823), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((1837, 1853), 'envinorma.parametrization.Equal', 'Equal', (['_date', 'd1'], {}), '(_date, d1)\n', (1842, 1853), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2006, 2024), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (2013, 2024), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2038, 2056), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (2045, 2056), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2124, 2153), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (2143, 2153), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((2306, 2335), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (2325, 2335), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((2486, 2502), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (2490, 2502), False, 'from datetime import date\n'), ((2512, 2528), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (2516, 2528), False, 'from datetime import date\n'), ((2593, 2611), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (2600, 2611), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2625, 2643), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (2632, 2643), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2798, 2816), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (2805, 2816), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2830, 2848), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd2'], {}), '(_date, d2)\n', (2837, 2848), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3008, 3024), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (3012, 3024), False, 'from datetime import date\n'), ((3034, 3050), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (3038, 3050), False, 'from datetime import date\n'), ((4537, 4553), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (4541, 4553), False, 'from datetime import date\n'), ((4563, 4579), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (4567, 4579), False, 'from datetime import date\n'), ((4589, 4605), 'datetime.date', 'date', (['(2030)', '(1)', '(1)'], {}), '(2030, 1, 1)\n', (4593, 4605), False, 'from datetime import date\n'), ((5561, 5577), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (5565, 5577), False, 'from datetime import date\n'), ((5587, 5603), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (5591, 5603), False, 'from datetime import date\n'), ((6081, 6109), 'back_office.components.condition_form.helpers.CONDITION_VARIABLES.values', 'CONDITION_VARIABLES.values', ([], {}), '()\n', (6107, 6109), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((6346, 6362), 'datetime.date', 'date', (['(2010)', '(1)', '(1)'], {}), '(2010, 1, 1)\n', (6350, 6362), False, 'from datetime import date\n'), ((6372, 6388), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (6376, 6388), False, 'from datetime import date\n'), ((7129, 7165), 'back_office.components.condition_form.helpers._extract_parameter_to_conditions', '_extract_parameter_to_conditions', (['[]'], {}), '([])\n', (7161, 7165), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((7628, 7732), 'back_office.components.condition_form.helpers.ConditionFormValues', 'ConditionFormValues', (["(['Date de déclaration'] * 2)", "['>=', '<']", "['01/01/2020', '31/01/2020']", '_AND_ID'], {}), "(['Date de déclaration'] * 2, ['>=', '<'], ['01/01/2020',\n '31/01/2020'], _AND_ID)\n", (7647, 7732), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((7861, 7904), 'envinorma.parametrization.Equal', 'Equal', (['ParameterEnum.REGIME.value', 'Regime.A'], {}), '(ParameterEnum.REGIME.value, Regime.A)\n', (7866, 7904), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((7971, 8072), 'back_office.components.condition_form.helpers.ConditionFormValues', 'ConditionFormValues', (["['Date de déclaration', 'Régime']", "['=', '=']", "['01/01/2020', 'A']", '_AND_ID'], {}), "(['Date de déclaration', 'Régime'], ['=', '='], [\n '01/01/2020', 'A'], _AND_ID)\n", (7990, 8072), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((842, 874), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (855, 874), False, 'import pytest\n'), ((940, 972), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (953, 972), False, 'import pytest\n'), ((1313, 1345), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (1326, 1345), False, 'import pytest\n'), ((1355, 1384), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (1374, 1384), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((1516, 1548), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (1529, 1548), False, 'import pytest\n'), ((1558, 1587), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (1577, 1587), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((1719, 1751), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (1732, 1751), False, 'import pytest\n'), ((1761, 1790), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (1780, 1790), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((1920, 1952), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (1933, 1952), False, 'import pytest\n'), ((1962, 1991), 'back_office.components.condition_form.helpers._simplify_condition', '_simplify_condition', (['and_cond'], {}), '(and_cond)\n', (1981, 1991), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((2172, 2192), 'envinorma.parametrization.Range', 'Range', (['_date', 'd1', 'd2'], {}), '(_date, d1, d2)\n', (2177, 2192), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2666, 2725), 'back_office.components.condition_form.helpers._check_compatibility_and_build_range', '_check_compatibility_and_build_range', (['_date', 'cond_1', 'cond_2'], {}), '(_date, cond_1, cond_2)\n', (2702, 2725), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((2858, 2890), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (2871, 2890), False, 'import pytest\n'), ((2900, 2959), 'back_office.components.condition_form.helpers._check_compatibility_and_build_range', '_check_compatibility_and_build_range', (['_date', 'cond_1', 'cond_2'], {}), '(_date, cond_1, cond_2)\n', (2936, 2959), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((3264, 3289), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3277, 3289), False, 'import pytest\n'), ((3380, 3405), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3393, 3405), False, 'import pytest\n'), ((3570, 3595), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1', '(False)'], {}), '(date_, d1, False)\n', (3577, 3595), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3778, 3810), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (3791, 3810), False, 'import pytest\n'), ((4015, 4035), 'envinorma.parametrization.Range', 'Range', (['date_', 'd1', 'd2'], {}), '(date_, d1, d2)\n', (4020, 4035), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4756, 4792), 'pytest.raises', 'pytest.raises', (['_NotSimplifiableError'], {}), '(_NotSimplifiableError)\n', (4769, 4792), False, 'import pytest\n'), ((4802, 4838), 'back_office.components.condition_form.helpers._simplify_mono_conditions', '_simplify_mono_conditions', (['date_', '[]'], {}), '(date_, [])\n', (4827, 4838), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((4849, 4885), 'pytest.raises', 'pytest.raises', (['_NotSimplifiableError'], {}), '(_NotSimplifiableError)\n', (4862, 4885), False, 'import pytest\n'), ((5098, 5122), 'envinorma.parametrization.Range', 'Range', (['quantity', '(10)', '(100)'], {}), '(quantity, 10, 100)\n', (5103, 5122), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5133, 5165), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (5146, 5165), False, 'import pytest\n'), ((5325, 5343), 'envinorma.parametrization.Littler', 'Littler', (['date_', 'd1'], {}), '(date_, d1)\n', (5332, 5343), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5413, 5431), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1'], {}), '(date_, d1)\n', (5420, 5431), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5499, 5515), 'envinorma.parametrization.Equal', 'Equal', (['date_', 'd1'], {}), '(date_, d1)\n', (5504, 5515), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5614, 5646), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (5627, 5646), False, 'import pytest\n'), ((5656, 5684), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['(1)', '(1)'], {}), '(1, 1)\n', (5678, 5684), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5694, 5726), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (5707, 5726), False, 'import pytest\n'), ((5736, 5764), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['(2)', '(1)'], {}), '(2, 1)\n', (5758, 5764), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5776, 5804), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['(1)', '(2)'], {}), '(1, 2)\n', (5798, 5804), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5823, 5855), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (5836, 5855), False, 'import pytest\n'), ((5865, 5895), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['d1', 'd1'], {}), '(d1, d1)\n', (5887, 5895), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5905, 5937), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (5918, 5937), False, 'import pytest\n'), ((5947, 5977), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['d2', 'd1'], {}), '(d2, d1)\n', (5969, 5977), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5989, 6019), 'back_office.components.condition_form.helpers._assert_strictly_below', '_assert_strictly_below', (['d1', 'd2'], {}), '(d1, d2)\n', (6011, 6019), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((7226, 7258), 'pytest.raises', 'pytest.raises', (['FormHandlingError'], {}), '(FormHandlingError)\n', (7239, 7258), False, 'import pytest\n'), ((7388, 7404), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (7392, 7404), False, 'from datetime import date\n'), ((7573, 7589), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (7577, 7589), False, 'from datetime import date\n'), ((7591, 7608), 'datetime.date', 'date', (['(2020)', '(1)', '(31)'], {}), '(2020, 1, 31)\n', (7595, 7608), False, 'from datetime import date\n'), ((7740, 7768), 'back_office.components.condition_form.helpers.build_condition', 'build_condition', (['form_values'], {}), '(form_values)\n', (7755, 7768), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((7832, 7848), 'datetime.date', 'date', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (7836, 7848), False, 'from datetime import date\n'), ((8079, 8107), 'back_office.components.condition_form.helpers.build_condition', 'build_condition', (['form_values'], {}), '(form_values)\n', (8094, 8107), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((5032, 5054), 'envinorma.parametrization.Littler', 'Littler', (['quantity', '(100)'], {}), '(quantity, 100)\n', (5039, 5054), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5056, 5077), 'envinorma.parametrization.Greater', 'Greater', (['quantity', '(10)'], {}), '(quantity, 10)\n', (5063, 5077), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6136, 6180), 'back_office.components.condition_form.helpers._build_parameter_value', '_build_parameter_value', (['param.value.type', '""""""'], {}), "(param.value.type, '')\n", (6158, 6180), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((6525, 6543), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (6532, 6543), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6545, 6563), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6552, 6563), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6565, 6584), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (6570, 6584), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6731, 6749), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (6738, 6749), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6751, 6769), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6758, 6769), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6884, 6902), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6891, 6902), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6904, 6923), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (6909, 6923), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((7050, 7068), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (7057, 7068), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((7291, 7331), 'back_office.components.condition_form.helpers.ConditionFormValues', 'ConditionFormValues', (['[]', '[]', '[]', '_AND_ID'], {}), '([], [], [], _AND_ID)\n', (7310, 7331), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((7433, 7509), 'back_office.components.condition_form.helpers.ConditionFormValues', 'ConditionFormValues', (["['Date de déclaration']", "['=']", "['01/01/2020']", '_AND_ID'], {}), "(['Date de déclaration'], ['='], ['01/01/2020'], _AND_ID)\n", (7452, 7509), False, 'from back_office.components.condition_form.helpers import CONDITION_VARIABLES, ConditionFormValues, FormHandlingError, _assert_strictly_below, _build_parameter_value, _check_compatibility_and_build_range, _extract_parameter_to_conditions, _NotSimplifiableError, _simplify_condition, _simplify_mono_conditions, _try_building_range_condition, build_condition\n'), ((2233, 2251), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (2240, 2251), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2253, 2271), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (2260, 2271), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2273, 2292), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (2278, 2292), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3648, 3663), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""A"""'], {}), "(reg, 'A')\n", (3653, 3663), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3665, 3683), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd2'], {}), '(date_, d2)\n', (3672, 3683), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3955, 3973), 'envinorma.parametrization.Littler', 'Littler', (['date_', 'd2'], {}), '(date_, d2)\n', (3962, 3973), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3975, 3993), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1'], {}), '(date_, d1)\n', (3982, 3993), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4097, 4115), 'envinorma.parametrization.Littler', 'Littler', (['date_', 'd2'], {}), '(date_, d2)\n', (4104, 4115), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4117, 4135), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1'], {}), '(date_, d1)\n', (4124, 4135), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4137, 4152), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""E"""'], {}), "(reg, 'E')\n", (4142, 4152), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4154, 4173), 'envinorma.parametrization.Equal', 'Equal', (['quantity', '(10)'], {}), '(quantity, 10)\n', (4159, 4173), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4338, 4359), 'envinorma.parametrization.Littler', 'Littler', (['quantity', '(20)'], {}), '(quantity, 20)\n', (4345, 4359), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4361, 4382), 'envinorma.parametrization.Greater', 'Greater', (['quantity', '(10)'], {}), '(quantity, 10)\n', (4368, 4382), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4384, 4399), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""D"""'], {}), "(reg, 'D')\n", (4389, 4399), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4929, 4945), 'envinorma.parametrization.Equal', 'Equal', (['date_', 'd1'], {}), '(date_, d1)\n', (4934, 4945), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4947, 4963), 'envinorma.parametrization.Equal', 'Equal', (['date_', 'd2'], {}), '(date_, d2)\n', (4952, 4963), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4965, 4981), 'envinorma.parametrization.Equal', 'Equal', (['date_', 'd3'], {}), '(date_, d3)\n', (4970, 4981), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5207, 5228), 'envinorma.parametrization.Littler', 'Littler', (['quantity', '(10)'], {}), '(quantity, 10)\n', (5214, 5228), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5230, 5252), 'envinorma.parametrization.Greater', 'Greater', (['quantity', '(100)'], {}), '(quantity, 100)\n', (5237, 5252), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5301, 5319), 'envinorma.parametrization.Littler', 'Littler', (['date_', 'd1'], {}), '(date_, d1)\n', (5308, 5319), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5389, 5407), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1'], {}), '(date_, d1)\n', (5396, 5407), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((5477, 5493), 'envinorma.parametrization.Equal', 'Equal', (['date_', 'd1'], {}), '(date_, d1)\n', (5482, 5493), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6614, 6632), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (6621, 6632), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6634, 6652), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6641, 6652), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6665, 6684), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (6670, 6684), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6799, 6817), 'envinorma.parametrization.Littler', 'Littler', (['_date', 'd2'], {}), '(_date, d2)\n', (6806, 6817), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6819, 6837), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6826, 6837), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6953, 6971), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (6960, 6971), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((6984, 7003), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (6989, 7003), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((7098, 7116), 'envinorma.parametrization.Greater', 'Greater', (['_date', 'd1'], {}), '(_date, d1)\n', (7105, 7116), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2378, 2398), 'envinorma.parametrization.Range', 'Range', (['_date', 'd1', 'd2'], {}), '(_date, d1, d2)\n', (2383, 2398), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((2400, 2419), 'envinorma.parametrization.Equal', 'Equal', (['_regime', '"""A"""'], {}), "(_regime, 'A')\n", (2405, 2419), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3538, 3563), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd1', '(False)'], {}), '(date_, d1, False)\n', (3545, 3563), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3729, 3744), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""A"""'], {}), "(reg, 'A')\n", (3734, 3744), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3746, 3764), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd2'], {}), '(date_, d2)\n', (3753, 3764), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3861, 3879), 'envinorma.parametrization.Littler', 'Littler', (['date_', 'd2'], {}), '(date_, d2)\n', (3868, 3879), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((3881, 3899), 'envinorma.parametrization.Greater', 'Greater', (['date_', 'd2'], {}), '(date_, d2)\n', (3888, 3899), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4224, 4244), 'envinorma.parametrization.Range', 'Range', (['date_', 'd1', 'd2'], {}), '(date_, d1, d2)\n', (4229, 4244), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4246, 4261), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""E"""'], {}), "(reg, 'E')\n", (4251, 4261), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4263, 4282), 'envinorma.parametrization.Equal', 'Equal', (['quantity', '(10)'], {}), '(quantity, 10)\n', (4268, 4282), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4445, 4468), 'envinorma.parametrization.Range', 'Range', (['quantity', '(10)', '(20)'], {}), '(quantity, 10, 20)\n', (4450, 4468), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n'), ((4470, 4485), 'envinorma.parametrization.Equal', 'Equal', (['reg', '"""D"""'], {}), "(reg, 'D')\n", (4475, 4485), False, 'from envinorma.parametrization import AndCondition, Equal, Greater, Littler, OrCondition, ParameterEnum, Range\n')]
|
import random
from collections import defaultdict
player_names = []
player_scores = []
# word_to_guess = None
letters_missed = []
def players():
print("----------------------")
print("Welcome to Hangman!")
print("----------------------")
while(True):
try:
players = int(input("\nHow many players are going to play?"))
except ValueError:
print("That's not a number. Please input a number")
continue
else:
print()
break
add_player_to_list(players)
more_players = None
while(True):
more_players = input("\nThere are currently " + str(players) + " players. Is that correct? (Yes/No) ").lower()
if (more_players != "yes" and more_players != "no"):
print("Wrong input. Please input yes or no")
elif (more_players == "no"):
how_many = int(input("How many more players will join? "))
add_player_to_list(how_many)
break
elif (more_players == "yes"):
break
print_player_names(len(player_names))
hangman()
# print("The current players are: {}".format(player_names))
def hangman():
word_to_guess = random_words()
print("Word to guess: {}" .format(word_to_guess))
word_length = len(word_to_guess)
guess = ['_' for i in range(word_length)]
lives = 6
flag = None
number_of_players = len(player_names)
current_player = 0
print("====================================================")
print("\nLet us begin the game!")
print("You will have 6 lives as a group to guess the word.")
print("The current word is {} letters long\n" .format(word_length))
while(True):
check_win_condition(lives, guess, word_to_guess)
print("Word: ", end='')
for i in guess:
print(i,' ', end='')
print("")
print("Lives: {}" .format(lives))
print("Misses: ", end = '')
for i in letters_missed:
print(i,' ',end='')
input_guess = input("\nWhat is your guess {}? " .format(player_names[current_player]))
print("")
if (len(input_guess) > 1):
print("Please only input 1 letter")
continue
elif (input_guess.isalpha()):
flag = check_letter(input_guess.upper(), word_to_guess.upper())
if (flag == 1):
for i in range(word_length):
if(word_to_guess[i] == input_guess):
guess[i] = input_guess.upper()
player_scores[current_player] += 1
elif (flag == 0):
lives = lives - 1
print("===============")
print("Current Scores")
print_player_names(len(player_names))
print("===============")
if (current_player >= (number_of_players - 1)):
current_player = 0
elif (current_player <= (number_of_players - 1)):
current_player += 1
def add_player_to_list(players):
for name in range(players):
name = (input("What is your name Player: "))
player_names.append(name)
player_scores.append(0)
def random_words():
words = ["headline", "soup", "filter", "command", "mass", "truck", "tumble", "flourish", "squash", "mouth"]
return random.choice(words)
def check_letter(letter, word_to_guess):
flag = 0
if (letter in word_to_guess):
flag = 1
if (flag == 0):
if (not letter in letters_missed):
letters_missed.append(letter.upper())
return flag
def check_win_condition(lives, guess, word_to_guess):
if (lives == 0):
print("You have run out of lives. Better luck next time!")
print("The correct word was {}\n" .format(word_to_guess.upper()))
while(True):
play_again = input("Would you like to play again? (Yes/No) ").lower()
if (play_again != "yes" and play_again != "no"):
print("Wrong input. Please input yes or no")
elif (play_again == "yes"):
print("\n")
del player_names[:]
del player_scores[:]
del letters_missed[:]
game()
elif (play_again == "no"):
print("")
exit()
if (not '_' in guess):
print("")
print("Congratulations! You figured out the word.")
print("The word was {}" .format(word_to_guess.upper()))
check_higher_score()
print("")
while(True):
play_again = input("Would you like to play again? (Yes/No) ").lower()
if (play_again != "yes" and play_again != "no"):
print("Wrong input. Please input yes or no")
elif (play_again == "yes"):
print("\n")
del player_names[:]
del player_scores[:]
del letters_missed[:]
game()
elif (play_again == "no"):
print("")
exit()
def check_higher_score():
# if (len(player_scores != len(set(player_scores)))):
# print("More than 1 person ")
duplicates = defaultdict(list)
for i,item in enumerate(player_scores):
duplicates[item].append(i)
duplicates = {k:v for k,v in duplicates.items() if len(v)>1}
if not duplicates:
max_score = max(player_scores)
max_score_index = player_scores.index(max_score)
print("{} got the highest score with {} points!" .format(player_names[max_score_index], max_score))
else:
max_score = max(player_scores)
print("Multiple players achieved a high score of {}!" .format(max_score))
# dups = defaultdict(list)
# for i, e in enumerate(player_scores):
# dups[e].append(i)
# for k, v in sorted(dups.items()):
# if len(v) >= 2:
# print('K:%s: V:%r' % (k, v))
# for values in v:
# print("Players {} " .format(player_names[values]), end='')
# print("got: ", end='')
def print_player_names(players):
print("\nThe current players are: ")
for item in range(players):
print(player_names[item],end=': ')
print(player_scores[item])
def game():
players()
game()
|
[
"collections.defaultdict",
"random.choice"
] |
[((3346, 3366), 'random.choice', 'random.choice', (['words'], {}), '(words)\n', (3359, 3366), False, 'import random\n'), ((5450, 5467), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (5461, 5467), False, 'from collections import defaultdict\n')]
|
# Generated by Django 3.1.12 on 2021-06-23 04:42
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('bus_driver', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BusModel',
fields=[
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('is_removed', models.BooleanField(default=False)),
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('plate', models.BooleanField()),
('is_available', models.BooleanField()),
('capacity', models.PositiveSmallIntegerField(default=10)),
('driver', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='bus_driver.busdrivermodel')),
],
options={
'db_table': 'bus',
},
),
]
|
[
"django.db.models.ForeignKey",
"django.db.models.UUIDField",
"django.db.models.BooleanField",
"django.db.models.PositiveSmallIntegerField"
] |
[((742, 776), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (761, 776), False, 'from django.db import migrations, models\n'), ((802, 893), 'django.db.models.UUIDField', 'models.UUIDField', ([], {'default': 'uuid.uuid4', 'editable': '(False)', 'primary_key': '(True)', 'serialize': '(False)'}), '(default=uuid.uuid4, editable=False, primary_key=True,\n serialize=False)\n', (818, 893), False, 'from django.db import migrations, models\n'), ((918, 939), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (937, 939), False, 'from django.db import migrations, models\n'), ((975, 996), 'django.db.models.BooleanField', 'models.BooleanField', ([], {}), '()\n', (994, 996), False, 'from django.db import migrations, models\n'), ((1028, 1072), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': '(10)'}), '(default=10)\n', (1060, 1072), False, 'from django.db import migrations, models\n'), ((1102, 1212), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""bus_driver.busdrivermodel"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='bus_driver.busdrivermodel')\n", (1119, 1212), False, 'from django.db import migrations, models\n')]
|
from plivo import exceptions
from tests.base import PlivoResourceTestCase
from tests.decorators import with_response
class LookupTest(PlivoResourceTestCase):
@with_response(200)
def test_get(self):
number = '+14154305555'
resp = self.client.lookup.get(number)
self.assertResponseMatches(resp)
self.assertEqual(self.client.current_request.method, 'GET')
|
[
"tests.decorators.with_response"
] |
[((165, 183), 'tests.decorators.with_response', 'with_response', (['(200)'], {}), '(200)\n', (178, 183), False, 'from tests.decorators import with_response\n')]
|
#!/usr/bin/env python
##############################################################################
# Copyright (c) 2015 Ericsson AB and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
# Unittest for yardstick.benchmark.scenarios.networking.iperf3.Iperf
from __future__ import absolute_import
import unittest
from yardstick.benchmark.scenarios.networking import \
vtc_instantiation_validation
class VtcInstantiationValidationTestCase(unittest.TestCase):
def setUp(self):
scenario = dict()
scenario['options'] = dict()
scenario['options']['default_net_name'] = ''
scenario['options']['default_subnet_name'] = ''
scenario['options']['vlan_net_1_name'] = ''
scenario['options']['vlan_subnet_1_name'] = ''
scenario['options']['vlan_net_2_name'] = ''
scenario['options']['vlan_subnet_2_name'] = ''
scenario['options']['vnic_type'] = ''
scenario['options']['vtc_flavor'] = ''
scenario['options']['packet_size'] = ''
scenario['options']['vlan_sender'] = ''
scenario['options']['vlan_receiver'] = ''
self.vt = vtc_instantiation_validation.VtcInstantiationValidation(
scenario, '')
def test_run_for_success(self):
result = {}
self.vt.run(result)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
[
"unittest.main",
"yardstick.benchmark.scenarios.networking.vtc_instantiation_validation.VtcInstantiationValidation"
] |
[((1601, 1616), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1614, 1616), False, 'import unittest\n'), ((1415, 1484), 'yardstick.benchmark.scenarios.networking.vtc_instantiation_validation.VtcInstantiationValidation', 'vtc_instantiation_validation.VtcInstantiationValidation', (['scenario', '""""""'], {}), "(scenario, '')\n", (1470, 1484), False, 'from yardstick.benchmark.scenarios.networking import vtc_instantiation_validation\n')]
|
import imp, glob, numpy
imp.load_source('common_functions','common_functions.py')
import common_functions as cf
def dirty_cont_image(config,config_raw,config_file,logger):
"""
Generates a dirty image of each science target including the continuum emission.
Checks that the pixel size and image size are set (will prompt user if in interactive mode).
Input:
config = The parameters read from the configuration file. (Ordered dictionary)
config_raw = The instance of the parser.
config_file = Path to configuration file. (String)
"""
logger.info('Starting making dirty continuum image.')
calib = config['calibration']
rest_freq = config['global']['rest_freq']
targets = calib['target_names'][:]
fields = calib['targets'][:]
for i in range(len(targets)):
target = targets[i]
if 'spw' in target:
inx = target.index('.spw')
target_name = target[:inx]
if target_name in calib['target_names'][i-1]:
fields.insert(i,fields[i-1])
if calib['mosaic']:
targets = list(set(calib['target_names']))
cln_param = config['clean']
src_dir = config['global']['src_dir']+'/'
img_dir = config['global']['img_dir']+'/'
cf.makedir('/.'+img_dir,logger)
logger.info('Removing any existing dirty continuum images.')
del_list = glob.glob(img_dir+'*cont.dirty*')
for file_path in del_list:
logger.info('Deleting: '+file_path)
shutil.rmtree(file_path)
logger.info('Checking clean parameters for dirty image (inc. continuum).')
reset_cln = False
if (len(cln_param['pix_size']) == 0) or (len(cln_param['pix_size']) != len(targets)):
if not interactive:
logger.critical('The number of pixel sizes provided does not match the number of targets.')
logger.info('Pixel sizes: {}'.format(cln_param['pix_size']))
logger.info('Targets: {}'.format(targets))
sys.exit(-1)
reset_cln = True
if len(cln_param['pix_size']) < len(targets):
logger.warning('There are more target fields than pixel sizes. Appending blanks.')
while len(cln_param['pix_size']) < len(targets):
cln_param['pix_size'].append('')
elif len(cln_param['pix_size']) > len(targets):
logger.warning('There are more pixel sizes than target fields.')
logger.info('Current pixel sizes: {}'.format(cln_param['pix_size']))
logger.warning('The pixel size list will now be truncated to match the number of targets.')
cln_param['pix_size'] = cln_param['pix_size'][:len(targets)]
elif interactive:
print('Current pixel sizes set as:')
for i in range(len(cln_param['pix_size'])):
print('{0}: {1}'.format(targets[i],cln_param['pix_size'][i]))
resp = str(raw_input('Do you want revise the pixel sizes (y/n): '))
if resp.lower() in ['yes','ye','y']:
reset_cln = True
if reset_cln and interactive:
print('For each target enter the desired pixel size:')
for i in range(len(targets)):
cln_param['pix_size'][i] = cf.uinput('Pixel size for {}: '.format(targets[i]), cln_param['pix_size'][i])
logger.info('Setting pixel size for {0} as: {1}.'.format(targets[i], cln_param['pix_size'][i]))
logger.info('Updating config file to set pixel sizes.')
config_raw.set('clean','pix_size',cln_param['pix_size'])
configfile = open(config_file,'w')
config_raw.write(configfile)
configfile.close()
logger.info('Pixel sizes set as: {}.'.format(cln_param['pix_size']))
logger.info('For the targets: {}.'.format(targets))
reset_cln = False
if len(cln_param['im_size']) == 0 or len(cln_param['im_size']) != len(targets):
if not interactive:
logger.critical('The number of image sizes provided does not match the number of targets.')
logger.info('Image sizes: {}'.format(cln_param['im_size']))
logger.info('Targets: {}'.format(targets))
sys.exit(-1)
reset_cln = True
if len(cln_param['im_size']) < len(targets):
logger.warning('There are more target fields than image sizes. Appending blanks.')
while len(cln_param['im_size']) < len(targets):
cln_param['im_size'].append('')
elif len(cln_param['im_size']) > len(targets):
logger.warning('There are more image sizes than target fields.')
logger.info('Current image sizes: {} pixels.'.format(cln_param['im_size']))
logger.warning('The image size list will now be truncated to match the number of targets.')
cln_param['im_size'] = cln_param['im_size'][:len(targets)]
elif interactive:
print('Current images sizes set as:')
for i in range(len(cln_param['im_size'])):
print('{0}: {1}'.format(targets[i],cln_param['im_size'][i]))
resp = str(raw_input('Do you want revise the image sizes (y/n): '))
if resp.lower() in ['yes','ye','y']:
reset_cln = True
if reset_cln and interactive:
print('For each target enter the desired image size:')
for i in range(len(targets)):
print('Note: The pixel size for this target was set to: {}'.format(cln_param['pix_size'][i]))
cln_param['im_size'][i] = cf.uinput('Image size for {}: '.format(targets[i]), cln_param['im_size'][i])
logger.info('Setting image size for {0} as: {1} x {2}.'.format(targets[i], cln_param['im_size'][i],cln_param['pix_size'][i]))
logger.info('Updating config file to set image sizes.')
config_raw.set('clean','im_size',cln_param['im_size'])
configfile = open(config_file,'w')
config_raw.write(configfile)
configfile.close()
logger.info('Image sizes set as: {} pixels.'.format(cln_param['im_size']))
logger.info('For the targets: {}.'.format(targets))
for i in range(len(targets)):
target = targets[i]
field = fields[i]
gridder = 'wproject'
if calib['mosaic']:
for target_name in targets:
inx = [j for j in range(len(calib['target_names'])) if target_name in calib['target_names'][j]]
fields = numpy.array(calib['targets'],dtype='str')[inx]
field = ','.join(fields)
gridder = 'mosaic'
logger.info('Making dirty image of {} (inc. continuum).'.format(target))
command = "tclean(vis='{0}{1}.split', field='{2}', imagename='{3}{1}.cont.dirty', cell='{4}', imsize=[{5},{5}], specmode='cube', outframe='bary', veltype='radio', restfreq='{6}', gridder='{7}', wprojplanes=-1, pblimit=0.1, normtype='flatnoise', deconvolver='hogbom', weighting='briggs', robust={8}, niter=0, phasecenter='{9}', interactive=False)".format(src_dir,target,field,img_dir,cln_param['pix_size'][i],cln_param['im_size'][i],rest_freq,gridder,cln_param['robust'],cln_param['phasecenter'])
logger.info('Executing command: '+command)
exec(command)
cf.check_casalog(config,config_raw,logger,casalog)
logger.info('Completed making dirty continuum image.')
# Read configuration file with parameters
config_file = sys.argv[-1]
config,config_raw = cf.read_config(config_file)
interactive = config['global']['interactive']
# Set up your logger
logger = cf.get_logger(LOG_FILE_INFO = '{}.log'.format(config['global']['project_name']),
LOG_FILE_ERROR = '{}_errors.log'.format(config['global']['project_name'])) # Set up your logger
# Define MS file name
msfile = '{0}.ms'.format(config['global']['project_name'])
#Make dirty continuum image
cf.check_casaversion(logger)
cf.rmdir(config['global']['img_dir'],logger)
dirty_cont_image(config,config_raw,config_file,logger)
#Review and backup parameters file
cf.diff_pipeline_params(config_file,logger)
cf.backup_pipeline_params(config_file,logger)
|
[
"common_functions.read_config",
"common_functions.diff_pipeline_params",
"common_functions.makedir",
"imp.load_source",
"numpy.array",
"common_functions.check_casaversion",
"glob.glob",
"common_functions.check_casalog",
"common_functions.rmdir",
"common_functions.backup_pipeline_params"
] |
[((24, 82), 'imp.load_source', 'imp.load_source', (['"""common_functions"""', '"""common_functions.py"""'], {}), "('common_functions', 'common_functions.py')\n", (39, 82), False, 'import imp, glob, numpy\n'), ((7308, 7335), 'common_functions.read_config', 'cf.read_config', (['config_file'], {}), '(config_file)\n', (7322, 7335), True, 'import common_functions as cf\n'), ((7722, 7750), 'common_functions.check_casaversion', 'cf.check_casaversion', (['logger'], {}), '(logger)\n', (7742, 7750), True, 'import common_functions as cf\n'), ((7751, 7796), 'common_functions.rmdir', 'cf.rmdir', (["config['global']['img_dir']", 'logger'], {}), "(config['global']['img_dir'], logger)\n", (7759, 7796), True, 'import common_functions as cf\n'), ((7887, 7931), 'common_functions.diff_pipeline_params', 'cf.diff_pipeline_params', (['config_file', 'logger'], {}), '(config_file, logger)\n', (7910, 7931), True, 'import common_functions as cf\n'), ((7931, 7977), 'common_functions.backup_pipeline_params', 'cf.backup_pipeline_params', (['config_file', 'logger'], {}), '(config_file, logger)\n', (7956, 7977), True, 'import common_functions as cf\n'), ((1254, 1288), 'common_functions.makedir', 'cf.makedir', (["('/.' + img_dir)", 'logger'], {}), "('/.' + img_dir, logger)\n", (1264, 1288), True, 'import common_functions as cf\n'), ((1366, 1401), 'glob.glob', 'glob.glob', (["(img_dir + '*cont.dirty*')"], {}), "(img_dir + '*cont.dirty*')\n", (1375, 1401), False, 'import imp, glob, numpy\n'), ((7103, 7156), 'common_functions.check_casalog', 'cf.check_casalog', (['config', 'config_raw', 'logger', 'casalog'], {}), '(config, config_raw, logger, casalog)\n', (7119, 7156), True, 'import common_functions as cf\n'), ((6320, 6362), 'numpy.array', 'numpy.array', (["calib['targets']"], {'dtype': '"""str"""'}), "(calib['targets'], dtype='str')\n", (6331, 6362), False, 'import imp, glob, numpy\n')]
|
import logging
from pathlib import Path
import numpy as np
import pandas as pd
import plotly.express as px
import pyarrow.parquet as pq
from scipy.stats import betabinom as sp_betabinom
# import dashboard
from remade import dashboard as dashboard
def clip_df(df, column):
if column in df.columns:
df["_" + column] = df[column] # save original data _column
df[column] = np.clip(df[column], a_min=0, a_max=None)
def pd_wide_to_long_forward_reverse(group_wide, sep, direction):
stub_names = ["k", "N", "f"]
group_long = pd.wide_to_long(
group_wide,
stubnames=stub_names,
i="tax_id",
j="z",
sep=sep,
)[stub_names]
group_long["direction"] = direction
return group_long.reset_index()
def wide_to_long_df(group_wide):
group_long_forward = pd_wide_to_long_forward_reverse(
group_wide,
sep="+",
direction="Forward",
)
group_long_reverse = pd_wide_to_long_forward_reverse(
group_wide,
sep="-",
direction="Reverse",
)
group_long = pd.concat([group_long_forward, group_long_reverse])
# group_long.loc[:, ["k", "N"]] = group_long.loc[:, ["k", "N"]].astype(int)
return group_long
class Results:
def __init__(self, results_dir):
self.results_dir = Path(results_dir)
self._load_df_results()
self._set_cmap()
self._set_hover_info()
def _load_parquet_file(self, results_dir):
df = pq.read_table(results_dir).to_pandas()
return df
def _load_df_results(self):
df = self._load_parquet_file(self.results_dir)
for column in ["lambda_LR", "forward_lambda_LR", "reverse_lambda_LR"]:
clip_df(df, column)
df["D_max_significance"] = df["D_max"] / df["D_max_std"]
df["rho_Ac_abs"] = np.abs(df["rho_Ac"])
log_columns = [
"N_reads",
"N_alignments",
"lambda_LR",
"phi",
"k_sum_total",
"N_sum_total",
]
for column in log_columns:
log_column = "log_" + column
df.loc[:, log_column] = np.log10(1 + df[column])
self.df = df
self.all_tax_ids = set(self.df.tax_id.unique())
self.all_tax_names = set(self.df.tax_name.unique())
self.all_tax_ranks = set(self.df.tax_rank.unique())
self.shortnames = list(self.df.shortname.unique())
self.columns = list(self.df.columns)
self.set_marker_size(variable="N_reads", function="sqrt", slider=30)
def set_marker_size(self, variable="N_reads", function="sqrt", slider=30):
d_functions = {
"constant": np.ones_like,
"identity": lambda x: x,
"sqrt": np.sqrt,
"log10": np.log10,
}
self.df.loc[:, "size"] = d_functions[function](self.df[variable])
self.max_of_size = np.max(self.df["size"])
self.marker_size = slider
def filter(self, filters):
query = ""
for column, filter in filters.items():
if filter is None:
continue
elif column == "shortnames":
query += f"(shortname in {filter}) & "
elif column == "shortname":
query += f"(shortname == '{filter}') & "
elif column == "tax_id":
query += f"(tax_id == {filter}) & "
elif column == "tax_ids":
query += f"(tax_id in {filter}) & "
elif column == "tax_rank":
query += f"(tax_rank == {filter}) & "
elif column == "tax_ranks":
query += f"(tax_rank in {filter}) & "
elif column == "tax_name":
query += f"(tax_name == {filter}) & "
elif column == "tax_names":
query += f"(tax_name in {filter}) & "
else:
low, high = filter
if dashboard.utils.is_log_transform_column(column):
low = dashboard.utils.log_transform_slider(low)
high = dashboard.utils.log_transform_slider(high)
query += f"({low} <= {column} <= {high}) & "
query = query[:-2]
# print(query)
return self.df.query(query)
def _set_cmap(self):
# https://plotly.com/python/discrete-color/#color-sequences-in-plotly-express
# blue, orange, green, red, purple, brown, pink, grey, camouflage, turquoise
cmap = px.colors.qualitative.D3
N_cmap = len(cmap)
groupby = self.df.groupby("shortname", sort=False)
symbol_counter = 0
d_cmap = {}
d_symbols = {}
for i, (name, _) in enumerate(groupby):
if (i % N_cmap) == 0 and i != 0:
symbol_counter += 1
d_cmap[name] = cmap[i % N_cmap]
d_symbols[name] = symbol_counter
self.cmap = cmap
self.d_cmap = d_cmap
self.d_symbols = d_symbols
self.d_cmap_fit = {"Forward": cmap[0], "Reverse": cmap[3], "Fit": cmap[2]}
def _set_hover_info(self):
columns = list(self.df.columns)
placeholder = "_XXX_"
contains_Bayesian = any(["Bayesian" in column for column in columns])
if contains_Bayesian:
self.custom_data_columns = [
"shortname",
"tax_name",
"tax_rank",
"tax_id",
# Frequentist fits
"lambda_LR",
"D_max",
"D_max_std",
"q",
"q_std",
"phi",
"phi_std",
"asymmetry",
"rho_Ac",
# Bayesian Fits
"Bayesian_n_sigma",
"Bayesian_D_max",
"Bayesian_D_max_std",
"Bayesian_q",
"Bayesian_phi",
# Counts
"N_reads",
"N_alignments",
"N_sum_total",
"k_sum_total",
]
self.hovertemplate = (
"<b>%{customdata[_XXX_]}</b><br><br>"
"<b>Tax</b>: <br>"
" Name: %{customdata[_XXX_]} <br>"
" Rank: %{customdata[_XXX_]} <br>"
" ID: %{customdata[_XXX_]} <br><br>"
"<b>Fit Results</b>: <br>"
" LR: %{customdata[_XXX_]:9.2f} <br>"
" D max: %{customdata[_XXX_]:9.2f} ± %{customdata[_XXX_]:.2f} <br>"
" q: %{customdata[_XXX_]:9.2f} ± %{customdata[_XXX_]:.2f} <br>"
" phi: %{customdata[_XXX_]:9.3s} ± %{customdata[_XXX_]:.3s} <br>"
" asymmetry:%{customdata[_XXX_]:9.3f} <br>"
" rho_Ac: %{customdata[_XXX_]:9.3f} <br><br>"
"<b>Bayesian Fit Results</b>: <br>"
" n sigma: %{customdata[_XXX_]:9.2f} <br>"
" D max: %{customdata[_XXX_]:9.2f} <br>"
" q: %{customdata[_XXX_]:9.2f} <br>"
" phi: %{customdata[_XXX_]:9.3s} <br><br>"
"<b>Counts</b>: <br>"
" N reads: %{customdata[_XXX_]:6.3s} <br>"
" N alignments:%{customdata[_XXX_]:6.3s} <br>"
" N sum total: %{customdata[_XXX_]:6.3s} <br>"
" k sum total: %{customdata[_XXX_]:6.3s} <br>"
"<extra></extra>"
)
else:
self.custom_data_columns = [
"shortname",
"tax_name",
"tax_rank",
"tax_id",
# Frequentist fits
"lambda_LR",
"D_max",
"D_max_std",
"q",
"q_std",
"phi",
"phi_std",
"asymmetry",
"rho_Ac",
# Counts
"N_reads",
"N_alignments",
"N_sum_total",
"k_sum_total",
]
self.hovertemplate = (
"<b>%{customdata[_XXX_]}</b><br><br>"
"<b>Tax</b>: <br>"
" Name: %{customdata[_XXX_]} <br>"
" Rank: %{customdata[_XXX_]} <br>"
" ID: %{customdata[_XXX_]} <br><br>"
"<b>Fit Results</b>: <br>"
" LR: %{customdata[_XXX_]:9.2f} <br>"
" D max: %{customdata[_XXX_]:9.2f} ± %{customdata[_XXX_]:.2f} <br>"
" q: %{customdata[_XXX_]:9.2f} ± %{customdata[_XXX_]:.2f} <br>"
" phi: %{customdata[_XXX_]:9.3s} ± %{customdata[_XXX_]:.3s} <br>"
" asymmetry:%{customdata[_XXX_]:9.3f} <br>"
" rho_Ac: %{customdata[_XXX_]:9.3f} <br><br>"
"<b>Counts</b>: <br>"
" N reads: %{customdata[_XXX_]:6.3s} <br>"
" N alignments:%{customdata[_XXX_]:6.3s} <br>"
" N sum total: %{customdata[_XXX_]:6.3s} <br>"
" k sum total: %{customdata[_XXX_]:6.3s} <br>"
"<extra></extra>"
)
data_counter = 0
i = 0
while True:
if self.hovertemplate[i : i + len(placeholder)] == placeholder:
# break
s_new = self.hovertemplate[:i]
s_new += str(data_counter)
s_new += self.hovertemplate[i + len(placeholder) :]
self.hovertemplate = s_new
data_counter += 1
i += 1
if i >= len(self.hovertemplate):
break
self.customdata = self.df[self.custom_data_columns]
self.hovertemplate_fit = (
"Fit: <br>D(z) = %{y:.3f} ± %{error_y.array:.3f}<br>" "<extra></extra>"
)
def parse_click_data(self, click_data, column):
try:
index = self.custom_data_columns.index(column)
value = click_data["points"][0]["customdata"][index]
return value
except Exception as e:
raise e
def get_single_count_group(self, shortname, tax_id, forward_reverse=""):
query = f"shortname == '{shortname}' & tax_id == {tax_id}"
group_wide = self.df.query(query)
group = wide_to_long_df(group_wide)
if forward_reverse.lower() == "forward":
return group.query(f"direction=='Forward'")
elif forward_reverse.lower() == "reverse":
return group.query(f"direction=='Reverse'")
else:
return group
def get_single_fit_prediction(self, shortname, tax_id, forward_reverse=""):
query = f"shortname == '{shortname}' & tax_id == {tax_id}"
ds = self.df.query(query)
if len(ds) != 1:
raise AssertionError(f"Something wrong here, got: {ds}")
group = self.get_single_count_group(shortname, tax_id, forward_reverse)
if forward_reverse.lower() == "forward":
prefix = "forward_"
elif forward_reverse.lower() == "reverse":
prefix = "reverse_"
else:
prefix = ""
A = getattr(ds, f"{prefix}A").values
q = getattr(ds, f"{prefix}q").values
c = getattr(ds, f"{prefix}c").values
phi = getattr(ds, f"{prefix}phi").values
z = group.z.values[:15]
N = group.N.values[:15]
Dz = A * (1 - q) ** (np.abs(z) - 1) + c
alpha = Dz * phi
beta = (1 - Dz) * phi
dist = sp_betabinom(n=N, a=alpha, b=beta)
std = np.sqrt(dist.var()) / N
d_out = {"mu": Dz, "std": std, "Dz": Dz, "z": z}
return d_out
def load(results_dir=Path("./data/results")):
return Results(results_dir)
|
[
"pandas.wide_to_long",
"numpy.abs",
"remade.dashboard.utils.log_transform_slider",
"numpy.clip",
"pathlib.Path",
"numpy.max",
"remade.dashboard.utils.is_log_transform_column",
"pyarrow.parquet.read_table",
"numpy.log10",
"pandas.concat",
"scipy.stats.betabinom"
] |
[((1080, 1131), 'pandas.concat', 'pd.concat', (['[group_long_forward, group_long_reverse]'], {}), '([group_long_forward, group_long_reverse])\n', (1089, 1131), True, 'import pandas as pd\n'), ((11801, 11823), 'pathlib.Path', 'Path', (['"""./data/results"""'], {}), "('./data/results')\n", (11805, 11823), False, 'from pathlib import Path\n'), ((394, 434), 'numpy.clip', 'np.clip', (['df[column]'], {'a_min': '(0)', 'a_max': 'None'}), '(df[column], a_min=0, a_max=None)\n', (401, 434), True, 'import numpy as np\n'), ((552, 629), 'pandas.wide_to_long', 'pd.wide_to_long', (['group_wide'], {'stubnames': 'stub_names', 'i': '"""tax_id"""', 'j': '"""z"""', 'sep': 'sep'}), "(group_wide, stubnames=stub_names, i='tax_id', j='z', sep=sep)\n", (567, 629), True, 'import pandas as pd\n'), ((1317, 1334), 'pathlib.Path', 'Path', (['results_dir'], {}), '(results_dir)\n', (1321, 1334), False, 'from pathlib import Path\n'), ((1834, 1854), 'numpy.abs', 'np.abs', (["df['rho_Ac']"], {}), "(df['rho_Ac'])\n", (1840, 1854), True, 'import numpy as np\n'), ((2909, 2932), 'numpy.max', 'np.max', (["self.df['size']"], {}), "(self.df['size'])\n", (2915, 2932), True, 'import numpy as np\n'), ((11625, 11659), 'scipy.stats.betabinom', 'sp_betabinom', ([], {'n': 'N', 'a': 'alpha', 'b': 'beta'}), '(n=N, a=alpha, b=beta)\n', (11637, 11659), True, 'from scipy.stats import betabinom as sp_betabinom\n'), ((2151, 2175), 'numpy.log10', 'np.log10', (['(1 + df[column])'], {}), '(1 + df[column])\n', (2159, 2175), True, 'import numpy as np\n'), ((1484, 1510), 'pyarrow.parquet.read_table', 'pq.read_table', (['results_dir'], {}), '(results_dir)\n', (1497, 1510), True, 'import pyarrow.parquet as pq\n'), ((11534, 11543), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (11540, 11543), True, 'import numpy as np\n'), ((3949, 3996), 'remade.dashboard.utils.is_log_transform_column', 'dashboard.utils.is_log_transform_column', (['column'], {}), '(column)\n', (3988, 3996), True, 'from remade import dashboard as dashboard\n'), ((4024, 4065), 'remade.dashboard.utils.log_transform_slider', 'dashboard.utils.log_transform_slider', (['low'], {}), '(low)\n', (4060, 4065), True, 'from remade import dashboard as dashboard\n'), ((4093, 4135), 'remade.dashboard.utils.log_transform_slider', 'dashboard.utils.log_transform_slider', (['high'], {}), '(high)\n', (4129, 4135), True, 'from remade import dashboard as dashboard\n')]
|
import sys
from pyspark import SparkConf, SparkContext
def save_in_local_file(rdd_of_pairs, output_file_name):
file = open(output_file_name, "w")
file.write("Number of Spanish airports by type: " + "\n")
for line in rdd_of_pairs:
file.write(str(line) + "\n")
file.close()
def main(file_name: str) -> None:
spark_conf = SparkConf()
spark_context = SparkContext(conf=spark_conf)
logger = spark_context._jvm.org.apache.log4j
logger.LogManager.getLogger("org").setLevel(logger.Level.WARN)
airports_file = spark_context.textFile(sys.argv[1])
header = airports_file.first()
spanish_airports_by_type = airports_file \
.filter(lambda line: header not in line) \
.map(lambda line: line.split(',')) \
.filter(lambda line: line[8] == '"ES"') \
.map(lambda line: (line[2], 1)) \
.reduceByKey(lambda a, b: a + b) \
.collect()
for (airport_type, count) in spanish_airports_by_type:
print("%s: %i" % (airport_type, count))
save_in_local_file(spanish_airports_by_type, "output.txt")
spark_context.stop()
if __name__ == "__main__":
"""
Python program that uses Apache Spark to count Spanish airports by type
"""
if len(sys.argv) != 2:
print("Usage: spark-submit SpanishAirports.py <file>", file=sys.stderr)
exit(-1)
main(sys.argv[1])
|
[
"pyspark.SparkContext",
"pyspark.SparkConf"
] |
[((356, 367), 'pyspark.SparkConf', 'SparkConf', ([], {}), '()\n', (365, 367), False, 'from pyspark import SparkConf, SparkContext\n'), ((388, 417), 'pyspark.SparkContext', 'SparkContext', ([], {'conf': 'spark_conf'}), '(conf=spark_conf)\n', (400, 417), False, 'from pyspark import SparkConf, SparkContext\n')]
|
from gae_handlers import RestHandler
from model import Classroom
from permission import owns
def RelatedQuery(model, relationship_property):
"""Generates handlers for relationship-based queries."""
allow = 'GET, HEAD'
class RelatedQueryHandler(RestHandler):
"""Dynamically generated handler for listing a resource by their
relationship to another."""
requires_auth = True
def get(self, parent_type, rel_id):
# You must be a super admin or own the related object.
user = self.get_current_user()
rel_id = self.get_long_uid(parent_type, rel_id)
if not rel_id:
return self.http_not_found()
if not owns(user, rel_id):
return self.http_forbidden()
# Simulate a query string parameter so existing handler code
# can run the query.
self.request.GET[relationship_property] = rel_id
# Of all the kinds we can query with RelatedQuery (Classroom,
# Surveys, Reports) only clasrooms have a name for ordering.
ordered_types = (Classroom,)
if model in ordered_types and 'order' not in self.request.GET:
self.request.GET['order'] = 'name'
# There is no id-based GET for these RelatedQuery endpoints,
# e.g. we don't support /api/projects/X/users/Y.
# Skip right to the inherited query() method.
return super(RelatedQueryHandler, self).query(
override_permissions=True)
def post(self):
return self.http_method_not_allowed(self.allow)
def put(self):
return self.http_method_not_allowed(self.allow)
def delete(self):
return self.http_method_not_allowed(self.allow)
RelatedQueryHandler.model = model
RelatedQueryHandler.__name__ = '{}s by {}'.format(
model.__name__, relationship_property)
return RelatedQueryHandler
|
[
"permission.owns"
] |
[((720, 738), 'permission.owns', 'owns', (['user', 'rel_id'], {}), '(user, rel_id)\n', (724, 738), False, 'from permission import owns\n')]
|
# -*- coding: utf-8 -*-
import json
from .base import BaseApi
from collections import OrderedDict
class PackageApi(BaseApi):
def list(self, account=None, instance=None, in_memory=None):
""" 取得所有的软件包"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/list', raw=True, account=account, instance=instance, in_memory=json.dumps(in_memory))
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def get(self, package_name, detail=False, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get', raw=True, package_name=package_name, detail=json.dumps(detail), account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def new(self, package_name, info, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/new', package_name=package_name, info=json.dumps(info), account=account, instance=instance)
def install(self, package_name, upgrade=True, is_temp=False, account=None, instance=None):
""" 安装软件包"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/install', package_name=package_name, upgrade=json.dumps(upgrade),
is_temp=json.dumps(is_temp), account=account, instance=instance)
def set(self, package_name, info, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/set', package_name=package_name, info=json.dumps(info), account=account, instance=instance)
def remove(self, package_name, account=None, instance=None):
""" 取得软件包的信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove', package_name=package_name, account=account, instance=instance)
def register_form(self, name, form_def, overwrite=False, account=None, instance=None):
""" 注册表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_form', name=name, form_def=json.dumps(form_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_forms(self, package_name, account=None, instance=None):
""" 列出所有表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_forms', package_name=package_name, account=account, instance=instance)
def get_form(self, name, account=None, instance=None):
""" 取得表单信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_form', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_form(self, name, account=None, instance=None):
""" 删除表单"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_form', name=name, account=account, instance=instance)
def register_script(self, name, script_def, overwrite=False, account=None, instance=None):
""" 注册脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_script', name=name, code_def=json.dumps(script_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_scripts(self, package_name, account=None, instance=None):
""" 列出所有脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_scripts', package_name=package_name, account=account, instance=instance)
def get_script(self, name, account=None, instance=None):
""" 取得脚本信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_script', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_script(self, name, account=None, instance=None):
""" 删除脚本"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_script', name=name, account=account, instance=instance)
def register_rule(self, name, rule_def, overwrite=False, account=None, instance=None):
""" 注册规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_rule', name=name, rule_def=json.dumps(rule_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_rules(self, package_name, account=None, instance=None):
""" 列出所有规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_rules', package_name=package_name, account=account, instance=instance)
def get_rule(self, name, account=None, instance=None):
""" 取得规则信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_rule', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_rule(self, name, account=None, instance=None):
""" 删除规则"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_rule', name=name, account=account, instance=instance)
def register_template(self, name, template_def, overwrite=False, account=None, instance=None):
""" 注册模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_template', name=name, template_def=json.dumps(template_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_templates(self, package_name, account=None, instance=None):
""" 列出所有模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_templates', package_name=package_name, account=account, instance=instance)
def get_template(self, name, account=None, instance=None):
""" 取得模板信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_template', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_template(self, name, account=None, instance=None):
""" 删除模板"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_template', name=name, account=account, instance=instance)
def register_mdset(self, name, mdset_def, overwrite=False, account=None, instance=None):
""" 注册属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_mdset', name=name, mdset_def=json.dumps(mdset_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_mdsets(self, package_name, account=None, instance=None):
""" 列出所有属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_mdsets', package_name=package_name, account=account, instance=instance)
def get_mdset(self, name, account=None, instance=None):
""" 取得属性集信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_mdset', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_mdset(self, name, account=None, instance=None):
""" 删除属性集"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_mdset', name=name, account=account, instance=instance)
def register_stage(self, name, stage_def, overwrite=False, account=None, instance=None):
""" 注册阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_stage', name=name, stage_def=json.dumps(stage_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_stages(self, package_name, account=None, instance=None):
""" 列出所有阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_stages', package_name=package_name, account=account, instance=instance)
def get_stage(self, name, account=None, instance=None):
""" 取得阶段信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_stage', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_stage(self, name, account=None, instance=None):
""" 删除阶段"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_stage', name=name, account=account, instance=instance)
def register_workflow(self, name, workflow_def, overwrite=False, account=None, instance=None):
""" 注册流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_workflow', name=name, workflow_def=json.dumps(workflow_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_workflows(self, package_name, account=None, instance=None):
""" 列出所有流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_workflows', package_name=package_name, account=account, instance=instance)
def get_workflow(self, name, account=None, instance=None):
""" 取得流程信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_workflow', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_workflow(self, name, account=None, instance=None):
""" 删除流程"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_workflow', name=name, account=account, instance=instance)
def register_skin(self, name, skin_def, overwrite=False, account=None, instance=None):
""" 注册皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/register_skin', name=name, skin_def=json.dumps(skin_def), overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_skins(self, package_name, account=None, instance=None):
""" 列出所有皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_skins', package_name=package_name, account=account, instance=instance)
def get_skin(self, name, account=None, instance=None):
""" 取得皮肤信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_skin', raw=True, name=name, account=account, instance=instance)
return json.loads(resp.text, object_pairs_hook=OrderedDict)
def remove_skin(self, name, account=None, instance=None):
""" 删除皮肤"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_skin', name=name, account=account, instance=instance)
def add_resource(self, package_name, res_path, stream, overwrite=False, account=None, instance=None):
""" 注册资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._post('/api/v1/package/add_resource', package_name=package_name, res_path=res_path, files={'stream':('resource', stream)}, overwrite=json.dumps(overwrite), account=account, instance=instance)
def list_resources(self, package_name, account=None, instance=None):
""" 列出所有资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/list_resources', package_name=package_name, account=account, instance=instance)
def get_resource(self, package_name, res_path='/', account=None, instance=None):
""" 取得资源信息"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
resp = self._get('/api/v1/package/get_resource', raw=True, package_name=package_name, res_path=res_path, account=account, instance=instance)
return resp
def remove_resource(self, package_name, res_path, account=None, instance=None):
""" 删除资源"""
if not account: account = self.account_name
if not instance: instance = self.instance_name
return self._get('/api/v1/package/remove_resource', package_name=package_name, res_path=res_path, account=account, instance=instance)
|
[
"json.loads",
"json.dumps"
] |
[((468, 520), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (478, 520), False, 'import json\n'), ((894, 946), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (904, 946), False, 'import json\n'), ((3386, 3438), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (3396, 3438), False, 'import json\n'), ((4780, 4832), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (4790, 4832), False, 'import json\n'), ((6161, 6213), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (6171, 6213), False, 'import json\n'), ((7574, 7626), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (7584, 7626), False, 'import json\n'), ((8971, 9023), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (8981, 9023), False, 'import json\n'), ((10361, 10413), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (10371, 10413), False, 'import json\n'), ((11777, 11829), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (11787, 11829), False, 'import json\n'), ((13162, 13214), 'json.loads', 'json.loads', (['resp.text'], {'object_pairs_hook': 'OrderedDict'}), '(resp.text, object_pairs_hook=OrderedDict)\n', (13172, 13214), False, 'import json\n'), ((430, 451), 'json.dumps', 'json.dumps', (['in_memory'], {}), '(in_memory)\n', (440, 451), False, 'import json\n'), ((823, 841), 'json.dumps', 'json.dumps', (['detail'], {}), '(detail)\n', (833, 841), False, 'import json\n'), ((1230, 1246), 'json.dumps', 'json.dumps', (['info'], {}), '(info)\n', (1240, 1246), False, 'import json\n'), ((1596, 1615), 'json.dumps', 'json.dumps', (['upgrade'], {}), '(upgrade)\n', (1606, 1615), False, 'import json\n'), ((1650, 1669), 'json.dumps', 'json.dumps', (['is_temp'], {}), '(is_temp)\n', (1660, 1669), False, 'import json\n'), ((1990, 2006), 'json.dumps', 'json.dumps', (['info'], {}), '(info)\n', (2000, 2006), False, 'import json\n'), ((2659, 2679), 'json.dumps', 'json.dumps', (['form_def'], {}), '(form_def)\n', (2669, 2679), False, 'import json\n'), ((2691, 2712), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (2701, 2712), False, 'import json\n'), ((4043, 4065), 'json.dumps', 'json.dumps', (['script_def'], {}), '(script_def)\n', (4053, 4065), False, 'import json\n'), ((4077, 4098), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (4087, 4098), False, 'import json\n'), ((5434, 5454), 'json.dumps', 'json.dumps', (['rule_def'], {}), '(rule_def)\n', (5444, 5454), False, 'import json\n'), ((5466, 5487), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (5476, 5487), False, 'import json\n'), ((6827, 6851), 'json.dumps', 'json.dumps', (['template_def'], {}), '(template_def)\n', (6837, 6851), False, 'import json\n'), ((6863, 6884), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (6873, 6884), False, 'import json\n'), ((8237, 8258), 'json.dumps', 'json.dumps', (['mdset_def'], {}), '(mdset_def)\n', (8247, 8258), False, 'import json\n'), ((8270, 8291), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (8280, 8291), False, 'import json\n'), ((9629, 9650), 'json.dumps', 'json.dumps', (['stage_def'], {}), '(stage_def)\n', (9639, 9650), False, 'import json\n'), ((9662, 9683), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (9672, 9683), False, 'import json\n'), ((11030, 11054), 'json.dumps', 'json.dumps', (['workflow_def'], {}), '(workflow_def)\n', (11040, 11054), False, 'import json\n'), ((11066, 11087), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (11076, 11087), False, 'import json\n'), ((12435, 12455), 'json.dumps', 'json.dumps', (['skin_def'], {}), '(skin_def)\n', (12445, 12455), False, 'import json\n'), ((12467, 12488), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (12477, 12488), False, 'import json\n'), ((13901, 13922), 'json.dumps', 'json.dumps', (['overwrite'], {}), '(overwrite)\n', (13911, 13922), False, 'import json\n')]
|
# Copyright (c) 2008-2011 testtools developers. See LICENSE for details.
import os
import tempfile
import unittest
from testtools import TestCase
from testtools.compat import (
_b,
_u,
StringIO,
)
from testtools.content import (
attach_file,
Content,
content_from_file,
content_from_stream,
TracebackContent,
text_content,
)
from testtools.content_type import (
ContentType,
UTF8_TEXT,
)
from testtools.matchers import (
Equals,
MatchesException,
Raises,
raises,
)
from testtools.tests.helpers import an_exc_info
raises_value_error = Raises(MatchesException(ValueError))
class TestContent(TestCase):
def test___init___None_errors(self):
self.assertThat(lambda: Content(None, None), raises_value_error)
self.assertThat(
lambda: Content(None, lambda: ["traceback"]), raises_value_error)
self.assertThat(
lambda: Content(ContentType("text", "traceback"), None),
raises_value_error)
def test___init___sets_ivars(self):
content_type = ContentType("foo", "bar")
content = Content(content_type, lambda: ["bytes"])
self.assertEqual(content_type, content.content_type)
self.assertEqual(["bytes"], list(content.iter_bytes()))
def test___eq__(self):
content_type = ContentType("foo", "bar")
one_chunk = lambda: [_b("bytes")]
two_chunk = lambda: [_b("by"), _b("tes")]
content1 = Content(content_type, one_chunk)
content2 = Content(content_type, one_chunk)
content3 = Content(content_type, two_chunk)
content4 = Content(content_type, lambda: [_b("by"), _b("te")])
content5 = Content(ContentType("f", "b"), two_chunk)
self.assertEqual(content1, content2)
self.assertEqual(content1, content3)
self.assertNotEqual(content1, content4)
self.assertNotEqual(content1, content5)
def test___repr__(self):
content = Content(ContentType("application", "octet-stream"),
lambda: [_b("\x00bin"), _b("ary\xff")])
self.assertIn("\\x00binary\\xff", repr(content))
def test_iter_text_not_text_errors(self):
content_type = ContentType("foo", "bar")
content = Content(content_type, lambda: ["bytes"])
self.assertThat(content.iter_text, raises_value_error)
def test_iter_text_decodes(self):
content_type = ContentType("text", "strange", {"charset": "utf8"})
content = Content(
content_type, lambda: [_u("bytes\xea").encode("utf8")])
self.assertEqual([_u("bytes\xea")], list(content.iter_text()))
def test_iter_text_default_charset_iso_8859_1(self):
content_type = ContentType("text", "strange")
text = _u("bytes\xea")
iso_version = text.encode("ISO-8859-1")
content = Content(content_type, lambda: [iso_version])
self.assertEqual([text], list(content.iter_text()))
def test_from_file(self):
fd, path = tempfile.mkstemp()
self.addCleanup(os.remove, path)
os.write(fd, 'some data')
os.close(fd)
content = content_from_file(path, UTF8_TEXT, chunk_size=2)
self.assertThat(
list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a']))
def test_from_nonexistent_file(self):
directory = tempfile.mkdtemp()
nonexistent = os.path.join(directory, 'nonexistent-file')
content = content_from_file(nonexistent)
self.assertThat(content.iter_bytes, raises(IOError))
def test_from_file_default_type(self):
content = content_from_file('/nonexistent/path')
self.assertThat(content.content_type, Equals(UTF8_TEXT))
def test_from_file_eager_loading(self):
fd, path = tempfile.mkstemp()
os.write(fd, 'some data')
os.close(fd)
content = content_from_file(path, UTF8_TEXT, buffer_now=True)
os.remove(path)
self.assertThat(
_b('').join(content.iter_bytes()), Equals('some data'))
def test_from_stream(self):
data = StringIO('some data')
content = content_from_stream(data, UTF8_TEXT, chunk_size=2)
self.assertThat(
list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a']))
def test_from_stream_default_type(self):
data = StringIO('some data')
content = content_from_stream(data)
self.assertThat(content.content_type, Equals(UTF8_TEXT))
def test_from_stream_eager_loading(self):
fd, path = tempfile.mkstemp()
self.addCleanup(os.remove, path)
os.write(fd, 'some data')
stream = open(path, 'rb')
content = content_from_stream(stream, UTF8_TEXT, buffer_now=True)
os.write(fd, 'more data')
os.close(fd)
self.assertThat(
_b('').join(content.iter_bytes()), Equals('some data'))
def test_from_text(self):
data = _u("some data")
expected = Content(UTF8_TEXT, lambda: [data.encode('utf8')])
self.assertEqual(expected, text_content(data))
class TestTracebackContent(TestCase):
def test___init___None_errors(self):
self.assertThat(
lambda: TracebackContent(None, None), raises_value_error)
def test___init___sets_ivars(self):
content = TracebackContent(an_exc_info, self)
content_type = ContentType("text", "x-traceback",
{"language": "python", "charset": "utf8"})
self.assertEqual(content_type, content.content_type)
result = unittest.TestResult()
expected = result._exc_info_to_string(an_exc_info, self)
self.assertEqual(expected, ''.join(list(content.iter_text())))
class TestAttachFile(TestCase):
def make_file(self, data):
fd, path = tempfile.mkstemp()
self.addCleanup(os.remove, path)
os.write(fd, data)
os.close(fd)
return path
def test_simple(self):
class SomeTest(TestCase):
def test_foo(self):
pass
test = SomeTest('test_foo')
data = 'some data'
path = self.make_file(data)
my_content = text_content(data)
attach_file(test, path, name='foo')
self.assertEqual({'foo': my_content}, test.getDetails())
def test_optional_name(self):
# If no name is provided, attach_file just uses the base name of the
# file.
class SomeTest(TestCase):
def test_foo(self):
pass
test = SomeTest('test_foo')
path = self.make_file('some data')
base_path = os.path.basename(path)
attach_file(test, path)
self.assertEqual([base_path], list(test.getDetails()))
def test_lazy_read(self):
class SomeTest(TestCase):
def test_foo(self):
pass
test = SomeTest('test_foo')
path = self.make_file('some data')
attach_file(test, path, name='foo', buffer_now=False)
content = test.getDetails()['foo']
content_file = open(path, 'w')
content_file.write('new data')
content_file.close()
self.assertEqual(''.join(content.iter_bytes()), 'new data')
def test_eager_read_by_default(self):
class SomeTest(TestCase):
def test_foo(self):
pass
test = SomeTest('test_foo')
path = self.make_file('some data')
attach_file(test, path, name='foo')
content = test.getDetails()['foo']
content_file = open(path, 'w')
content_file.write('new data')
content_file.close()
self.assertEqual(''.join(content.iter_bytes()), 'some data')
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)
|
[
"os.remove",
"os.close",
"unittest.TestLoader",
"testtools.matchers.Equals",
"os.path.join",
"testtools.content.content_from_file",
"testtools.content.Content",
"tempfile.mkdtemp",
"testtools.content.TracebackContent",
"testtools.compat._b",
"testtools.matchers.MatchesException",
"os.path.basename",
"testtools.content_type.ContentType",
"testtools.compat.StringIO",
"os.write",
"unittest.TestResult",
"testtools.compat._u",
"testtools.content.content_from_stream",
"tempfile.mkstemp",
"testtools.content.text_content",
"testtools.content.attach_file",
"testtools.matchers.raises"
] |
[((621, 649), 'testtools.matchers.MatchesException', 'MatchesException', (['ValueError'], {}), '(ValueError)\n', (637, 649), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((1090, 1115), 'testtools.content_type.ContentType', 'ContentType', (['"""foo"""', '"""bar"""'], {}), "('foo', 'bar')\n", (1101, 1115), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((1134, 1175), 'testtools.content.Content', 'Content', (['content_type', "(lambda : ['bytes'])"], {}), "(content_type, lambda : ['bytes'])\n", (1141, 1175), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((1351, 1376), 'testtools.content_type.ContentType', 'ContentType', (['"""foo"""', '"""bar"""'], {}), "('foo', 'bar')\n", (1362, 1376), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((1488, 1520), 'testtools.content.Content', 'Content', (['content_type', 'one_chunk'], {}), '(content_type, one_chunk)\n', (1495, 1520), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((1540, 1572), 'testtools.content.Content', 'Content', (['content_type', 'one_chunk'], {}), '(content_type, one_chunk)\n', (1547, 1572), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((1592, 1624), 'testtools.content.Content', 'Content', (['content_type', 'two_chunk'], {}), '(content_type, two_chunk)\n', (1599, 1624), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((2222, 2247), 'testtools.content_type.ContentType', 'ContentType', (['"""foo"""', '"""bar"""'], {}), "('foo', 'bar')\n", (2233, 2247), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((2266, 2307), 'testtools.content.Content', 'Content', (['content_type', "(lambda : ['bytes'])"], {}), "(content_type, lambda : ['bytes'])\n", (2273, 2307), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((2432, 2483), 'testtools.content_type.ContentType', 'ContentType', (['"""text"""', '"""strange"""', "{'charset': 'utf8'}"], {}), "('text', 'strange', {'charset': 'utf8'})\n", (2443, 2483), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((2731, 2761), 'testtools.content_type.ContentType', 'ContentType', (['"""text"""', '"""strange"""'], {}), "('text', 'strange')\n", (2742, 2761), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((2777, 2789), 'testtools.compat._u', '_u', (['"""bytesê"""'], {}), "('bytesê')\n", (2779, 2789), False, 'from testtools.compat import _b, _u, StringIO\n'), ((2859, 2904), 'testtools.content.Content', 'Content', (['content_type', '(lambda : [iso_version])'], {}), '(content_type, lambda : [iso_version])\n', (2866, 2904), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((3014, 3032), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (3030, 3032), False, 'import tempfile\n'), ((3082, 3107), 'os.write', 'os.write', (['fd', '"""some data"""'], {}), "(fd, 'some data')\n", (3090, 3107), False, 'import os\n'), ((3116, 3128), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (3124, 3128), False, 'import os\n'), ((3147, 3195), 'testtools.content.content_from_file', 'content_from_file', (['path', 'UTF8_TEXT'], {'chunk_size': '(2)'}), '(path, UTF8_TEXT, chunk_size=2)\n', (3164, 3195), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((3363, 3381), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (3379, 3381), False, 'import tempfile\n'), ((3404, 3447), 'os.path.join', 'os.path.join', (['directory', '"""nonexistent-file"""'], {}), "(directory, 'nonexistent-file')\n", (3416, 3447), False, 'import os\n'), ((3466, 3496), 'testtools.content.content_from_file', 'content_from_file', (['nonexistent'], {}), '(nonexistent)\n', (3483, 3496), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((3620, 3658), 'testtools.content.content_from_file', 'content_from_file', (['"""/nonexistent/path"""'], {}), "('/nonexistent/path')\n", (3637, 3658), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((3788, 3806), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (3804, 3806), False, 'import tempfile\n'), ((3815, 3840), 'os.write', 'os.write', (['fd', '"""some data"""'], {}), "(fd, 'some data')\n", (3823, 3840), False, 'import os\n'), ((3849, 3861), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (3857, 3861), False, 'import os\n'), ((3880, 3931), 'testtools.content.content_from_file', 'content_from_file', (['path', 'UTF8_TEXT'], {'buffer_now': '(True)'}), '(path, UTF8_TEXT, buffer_now=True)\n', (3897, 3931), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((3940, 3955), 'os.remove', 'os.remove', (['path'], {}), '(path)\n', (3949, 3955), False, 'import os\n'), ((4097, 4118), 'testtools.compat.StringIO', 'StringIO', (['"""some data"""'], {}), "('some data')\n", (4105, 4118), False, 'from testtools.compat import _b, _u, StringIO\n'), ((4137, 4187), 'testtools.content.content_from_stream', 'content_from_stream', (['data', 'UTF8_TEXT'], {'chunk_size': '(2)'}), '(data, UTF8_TEXT, chunk_size=2)\n', (4156, 4187), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((4353, 4374), 'testtools.compat.StringIO', 'StringIO', (['"""some data"""'], {}), "('some data')\n", (4361, 4374), False, 'from testtools.compat import _b, _u, StringIO\n'), ((4393, 4418), 'testtools.content.content_from_stream', 'content_from_stream', (['data'], {}), '(data)\n', (4412, 4418), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((4550, 4568), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (4566, 4568), False, 'import tempfile\n'), ((4618, 4643), 'os.write', 'os.write', (['fd', '"""some data"""'], {}), "(fd, 'some data')\n", (4626, 4643), False, 'import os\n'), ((4696, 4751), 'testtools.content.content_from_stream', 'content_from_stream', (['stream', 'UTF8_TEXT'], {'buffer_now': '(True)'}), '(stream, UTF8_TEXT, buffer_now=True)\n', (4715, 4751), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((4760, 4785), 'os.write', 'os.write', (['fd', '"""more data"""'], {}), "(fd, 'more data')\n", (4768, 4785), False, 'import os\n'), ((4794, 4806), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (4802, 4806), False, 'import os\n'), ((4946, 4961), 'testtools.compat._u', '_u', (['"""some data"""'], {}), "('some data')\n", (4948, 4961), False, 'from testtools.compat import _b, _u, StringIO\n'), ((5322, 5357), 'testtools.content.TracebackContent', 'TracebackContent', (['an_exc_info', 'self'], {}), '(an_exc_info, self)\n', (5338, 5357), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((5381, 5458), 'testtools.content_type.ContentType', 'ContentType', (['"""text"""', '"""x-traceback"""', "{'language': 'python', 'charset': 'utf8'}"], {}), "('text', 'x-traceback', {'language': 'python', 'charset': 'utf8'})\n", (5392, 5458), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((5549, 5570), 'unittest.TestResult', 'unittest.TestResult', ([], {}), '()\n', (5568, 5570), False, 'import unittest\n'), ((5792, 5810), 'tempfile.mkstemp', 'tempfile.mkstemp', ([], {}), '()\n', (5808, 5810), False, 'import tempfile\n'), ((5860, 5878), 'os.write', 'os.write', (['fd', 'data'], {}), '(fd, data)\n', (5868, 5878), False, 'import os\n'), ((5887, 5899), 'os.close', 'os.close', (['fd'], {}), '(fd)\n', (5895, 5899), False, 'import os\n'), ((6155, 6173), 'testtools.content.text_content', 'text_content', (['data'], {}), '(data)\n', (6167, 6173), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((6182, 6217), 'testtools.content.attach_file', 'attach_file', (['test', 'path'], {'name': '"""foo"""'}), "(test, path, name='foo')\n", (6193, 6217), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((6597, 6619), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (6613, 6619), False, 'import os\n'), ((6628, 6651), 'testtools.content.attach_file', 'attach_file', (['test', 'path'], {}), '(test, path)\n', (6639, 6651), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((6920, 6973), 'testtools.content.attach_file', 'attach_file', (['test', 'path'], {'name': '"""foo"""', 'buffer_now': '(False)'}), "(test, path, name='foo', buffer_now=False)\n", (6931, 6973), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((7409, 7444), 'testtools.content.attach_file', 'attach_file', (['test', 'path'], {'name': '"""foo"""'}), "(test, path, name='foo')\n", (7420, 7444), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((1723, 1744), 'testtools.content_type.ContentType', 'ContentType', (['"""f"""', '"""b"""'], {}), "('f', 'b')\n", (1734, 1744), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((1999, 2041), 'testtools.content_type.ContentType', 'ContentType', (['"""application"""', '"""octet-stream"""'], {}), "('application', 'octet-stream')\n", (2010, 2041), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((3261, 3298), 'testtools.matchers.Equals', 'Equals', (["['so', 'me', ' d', 'at', 'a']"], {}), "(['so', 'me', ' d', 'at', 'a'])\n", (3267, 3298), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((3541, 3556), 'testtools.matchers.raises', 'raises', (['IOError'], {}), '(IOError)\n', (3547, 3556), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((3705, 3722), 'testtools.matchers.Equals', 'Equals', (['UTF8_TEXT'], {}), '(UTF8_TEXT)\n', (3711, 3722), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((4028, 4047), 'testtools.matchers.Equals', 'Equals', (['"""some data"""'], {}), "('some data')\n", (4034, 4047), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((4253, 4290), 'testtools.matchers.Equals', 'Equals', (["['so', 'me', ' d', 'at', 'a']"], {}), "(['so', 'me', ' d', 'at', 'a'])\n", (4259, 4290), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((4465, 4482), 'testtools.matchers.Equals', 'Equals', (['UTF8_TEXT'], {}), '(UTF8_TEXT)\n', (4471, 4482), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((4879, 4898), 'testtools.matchers.Equals', 'Equals', (['"""some data"""'], {}), "('some data')\n", (4885, 4898), False, 'from testtools.matchers import Equals, MatchesException, Raises, raises\n'), ((5066, 5084), 'testtools.content.text_content', 'text_content', (['data'], {}), '(data)\n', (5078, 5084), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((7731, 7743), 'unittest.TestLoader', 'TestLoader', ([], {}), '()\n', (7741, 7743), False, 'from unittest import TestLoader\n'), ((756, 775), 'testtools.content.Content', 'Content', (['None', 'None'], {}), '(None, None)\n', (763, 775), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((842, 879), 'testtools.content.Content', 'Content', (['None', "(lambda : ['traceback'])"], {}), "(None, lambda : ['traceback'])\n", (849, 879), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((1406, 1417), 'testtools.compat._b', '_b', (['"""bytes"""'], {}), "('bytes')\n", (1408, 1417), False, 'from testtools.compat import _b, _u, StringIO\n'), ((1448, 1456), 'testtools.compat._b', '_b', (['"""by"""'], {}), "('by')\n", (1450, 1456), False, 'from testtools.compat import _b, _u, StringIO\n'), ((1458, 1467), 'testtools.compat._b', '_b', (['"""tes"""'], {}), "('tes')\n", (1460, 1467), False, 'from testtools.compat import _b, _u, StringIO\n'), ((2605, 2617), 'testtools.compat._u', '_u', (['"""bytesê"""'], {}), "('bytesê')\n", (2607, 2617), False, 'from testtools.compat import _b, _u, StringIO\n'), ((5213, 5241), 'testtools.content.TracebackContent', 'TracebackContent', (['None', 'None'], {}), '(None, None)\n', (5229, 5241), False, 'from testtools.content import attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content\n'), ((953, 985), 'testtools.content_type.ContentType', 'ContentType', (['"""text"""', '"""traceback"""'], {}), "('text', 'traceback')\n", (964, 985), False, 'from testtools.content_type import ContentType, UTF8_TEXT\n'), ((1675, 1683), 'testtools.compat._b', '_b', (['"""by"""'], {}), "('by')\n", (1677, 1683), False, 'from testtools.compat import _b, _u, StringIO\n'), ((1685, 1693), 'testtools.compat._b', '_b', (['"""te"""'], {}), "('te')\n", (1687, 1693), False, 'from testtools.compat import _b, _u, StringIO\n'), ((2064, 2077), 'testtools.compat._b', '_b', (["'\\x00bin'"], {}), "('\\x00bin')\n", (2066, 2077), False, 'from testtools.compat import _b, _u, StringIO\n'), ((2079, 2089), 'testtools.compat._b', '_b', (['"""aryÿ"""'], {}), "('aryÿ')\n", (2081, 2089), False, 'from testtools.compat import _b, _u, StringIO\n'), ((3993, 3999), 'testtools.compat._b', '_b', (['""""""'], {}), "('')\n", (3995, 3999), False, 'from testtools.compat import _b, _u, StringIO\n'), ((4844, 4850), 'testtools.compat._b', '_b', (['""""""'], {}), "('')\n", (4846, 4850), False, 'from testtools.compat import _b, _u, StringIO\n'), ((2546, 2558), 'testtools.compat._u', '_u', (['"""bytesê"""'], {}), "('bytesê')\n", (2548, 2558), False, 'from testtools.compat import _b, _u, StringIO\n')]
|
from padertorch.data.segment import Segmenter
import numpy as np
import torch
def test_simple_case():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_fixed_anchor():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000, anchor=10)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], 10 + np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_random_anchor():
"""
Checks fix for random anchor in https://github.com/fgnt/padertorch/pull/91
"""
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=32000, anchor='random')
segmented = segmenter(ex)
assert type(segmented) == list, segmented
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=32000, anchor='random_max_segments')
segmented = segmenter(ex)
assert type(segmented) == list, segmented
assert len(segmented) == 2
def test_copy_keys():
segmenter = Segmenter(length=32000, include_keys=('x', 'y'),
shift=16000, copy_keys='gender')
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
expected_keys = [key for key in ex.keys() if not key == 'num_samples']
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in expected_keys])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_none():
segmenter = Segmenter(length=32000, shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_to_larger():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'])
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
error = False
try:
segmenter(ex)
except AssertionError:
error = True
assert error, segmenter
def test_include_none_with_torch():
segmenter = Segmenter(length=32000, shift=16000)
array = np.random.randn(5,10,64000)
ex = {'x': array.copy(), 'y': array.copy(),
'z': torch.tensor(array),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(entry['x'], entry['z'].numpy())
np.testing.assert_equal(entry['x'], entry['y'])
def test_error_include_list():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'])
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'z': np.arange(65000).tolist(),
'num_samples': 65000, 'gender': 'm'}
error = False
try:
segmenter(ex)
except ValueError:
error = True
assert error, segmenter
def test_include_none_ignore_list():
segmenter = Segmenter(length=32000, shift=16000)
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'z': np.arange(65000).tolist(),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
segmenter = Segmenter(length=32000, shift=16000,
copy_keys=['num_samples', 'gender'])
segmented = segmenter(ex)
assert type(segmented) == list, segmented
expected_keys = ['x', 'y', 'num_samples', 'gender']
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in expected_keys])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'])
def test_include_exclude():
segmenter = Segmenter(length=32000, shift=16000, exclude_keys='y')
ex = {'x': np.arange(65000), 'y': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['y'], np.arange(65000))
def test_axis():
segmenter = Segmenter(length=32000, shift=16000, include_keys=['x', 'y'],
axis=[-1, 0])
ex = {'x': np.arange(65000), 'y': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'][:, 0])
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['x', 'y', 'z'],
axis={'x': 0, 'y': 1, 'z': -1})
array = np.random.randn(65000, 5, 10)
ex = {'x': array.copy(), 'y': array.copy().transpose(1,0,2),
'z': torch.tensor(array.transpose(1,2,0)),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(entry['x'], entry['z'].numpy().transpose(2,0,1))
np.testing.assert_equal(entry['x'], entry['y'].transpose(1,0,2))
def test_axis_dict():
segmenter = Segmenter(length=32000, shift=16000, include_keys=['x', 'y'],
axis={'x': -1, 'y': 0})
ex = {'x': np.arange(65000), 'y': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['x'], np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['x'], entry['y'][:, 0])
def test_axis_dict_wildcard():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'],
axis={'audio_data': -1})
ex = {'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)},
'z': np.arange(65000),
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'],
np.arange(idx * 16000, 16000 + (idx + 1) * 16000)
)
np.testing.assert_equal(entry['audio_data']['x'],
entry['audio_data']['y'])
np.testing.assert_equal(entry['z'],
np.arange(65000))
def test_wildcard():
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'])
ex = {'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)},
'num_samples': 65000, 'gender': 'm'}
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'], np.arange(
idx * 16000, 16000 + (idx + 1) * 16000)
)
np.testing.assert_equal(entry['audio_data']['x'],
entry['audio_data']['y'])
def test_wildcard_exclude():
ex = {
'audio_data': {'x': np.arange(65000), 'y': np.arange(65000)[:, None]},
'z': np.arange(65000)[:, None],
'num_samples': 65000, 'gender': 'm'
}
segmenter = Segmenter(length=32000, shift=16000,
include_keys=['audio_data'],
exclude_keys=['audio_data.y'],
axis={'audio_data': -1})
segmented = segmenter(ex)
assert type(segmented) == list, segmented
for idx, entry in enumerate(segmented):
assert all([key in entry.keys() for key in ex.keys()])
np.testing.assert_equal(
entry['audio_data']['x'],
np.arange(idx * 16000, 16000 + (idx + 1) * 16000))
np.testing.assert_equal(entry['audio_data']['y'],
np.arange(65000)[:, None])
def test_length_mode():
examples = [{'x': np.arange(16000), 'y': np.arange(16000),
'num_samples': 16000, 'gender': 'm'},
{'x': np.arange(15900), 'y': np.arange(15900),
'num_samples': 15900, 'gender': 'm'}]
new_length = [{'constant': 950, 'max': 942, 'min': 1000},
{'constant': 950, 'max': 936, 'min': 994}]
for mode in ['constant', 'max', 'min']:
for idx, ex in enumerate(examples):
segmenter = Segmenter(length=950, include_keys=('x'),
mode=mode, padding=True)
segmented = segmenter(ex)
np.testing.assert_equal(segmented[0]['x'],
np.arange(0, new_length[idx][mode]))
new_length = [{'constant': 950, 'max': 947, 'min': 951},
{'constant': 950, 'max': 950, 'min': 954}]
for mode in ['constant', 'max', 'min']:
for idx, ex in enumerate(examples):
segmenter = Segmenter(length=950, shift=250, include_keys=('x'),
mode=mode, padding=True)
segmented = segmenter(ex)
np.testing.assert_equal(segmented[0]['x'],
np.arange(0, new_length[idx][mode]))
|
[
"numpy.random.randn",
"padertorch.data.segment.Segmenter",
"numpy.arange",
"numpy.testing.assert_equal",
"torch.tensor"
] |
[((120, 181), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'include_keys': "('x', 'y')", 'shift': '(16000)'}), "(length=32000, include_keys=('x', 'y'), shift=16000)\n", (129, 181), False, 'from padertorch.data.segment import Segmenter\n'), ((701, 773), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'include_keys': "('x', 'y')", 'shift': '(16000)', 'anchor': '(10)'}), "(length=32000, include_keys=('x', 'y'), shift=16000, anchor=10)\n", (710, 773), False, 'from padertorch.data.segment import Segmenter\n'), ((1498, 1576), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'include_keys': "('x', 'y')", 'shift': '(32000)', 'anchor': '"""random"""'}), "(length=32000, include_keys=('x', 'y'), shift=32000, anchor='random')\n", (1507, 1576), False, 'from padertorch.data.segment import Segmenter\n'), ((1696, 1792), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'include_keys': "('x', 'y')", 'shift': '(32000)', 'anchor': '"""random_max_segments"""'}), "(length=32000, include_keys=('x', 'y'), shift=32000, anchor=\n 'random_max_segments')\n", (1705, 1792), False, 'from padertorch.data.segment import Segmenter\n'), ((1961, 2047), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'include_keys': "('x', 'y')", 'shift': '(16000)', 'copy_keys': '"""gender"""'}), "(length=32000, include_keys=('x', 'y'), shift=16000, copy_keys=\n 'gender')\n", (1970, 2047), False, 'from padertorch.data.segment import Segmenter\n'), ((2641, 2677), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)'}), '(length=32000, shift=16000)\n', (2650, 2677), False, 'from padertorch.data.segment import Segmenter\n'), ((3176, 3242), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['x', 'y', 'z']"}), "(length=32000, shift=16000, include_keys=['x', 'y', 'z'])\n", (3185, 3242), False, 'from padertorch.data.segment import Segmenter\n'), ((3551, 3587), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)'}), '(length=32000, shift=16000)\n', (3560, 3587), False, 'from padertorch.data.segment import Segmenter\n'), ((3600, 3629), 'numpy.random.randn', 'np.random.randn', (['(5)', '(10)', '(64000)'], {}), '(5, 10, 64000)\n', (3615, 3629), True, 'import numpy as np\n'), ((4111, 4177), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['x', 'y', 'z']"}), "(length=32000, shift=16000, include_keys=['x', 'y', 'z'])\n", (4120, 4177), False, 'from padertorch.data.segment import Segmenter\n'), ((4525, 4561), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)'}), '(length=32000, shift=16000)\n', (4534, 4561), False, 'from padertorch.data.segment import Segmenter\n'), ((5015, 5088), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'copy_keys': "['num_samples', 'gender']"}), "(length=32000, shift=16000, copy_keys=['num_samples', 'gender'])\n", (5024, 5088), False, 'from padertorch.data.segment import Segmenter\n'), ((5568, 5622), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'exclude_keys': '"""y"""'}), "(length=32000, shift=16000, exclude_keys='y')\n", (5577, 5622), False, 'from padertorch.data.segment import Segmenter\n'), ((6114, 6189), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['x', 'y']", 'axis': '[-1, 0]'}), "(length=32000, shift=16000, include_keys=['x', 'y'], axis=[-1, 0])\n", (6123, 6189), False, 'from padertorch.data.segment import Segmenter\n'), ((6698, 6801), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['x', 'y', 'z']", 'axis': "{'x': 0, 'y': 1, 'z': -1}"}), "(length=32000, shift=16000, include_keys=['x', 'y', 'z'], axis={\n 'x': 0, 'y': 1, 'z': -1})\n", (6707, 6801), False, 'from padertorch.data.segment import Segmenter\n'), ((6861, 6890), 'numpy.random.randn', 'np.random.randn', (['(65000)', '(5)', '(10)'], {}), '(65000, 5, 10)\n', (6876, 6890), True, 'import numpy as np\n'), ((7433, 7522), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['x', 'y']", 'axis': "{'x': -1, 'y': 0}"}), "(length=32000, shift=16000, include_keys=['x', 'y'], axis={'x': -1,\n 'y': 0})\n", (7442, 7522), False, 'from padertorch.data.segment import Segmenter\n'), ((8059, 8154), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['audio_data']", 'axis': "{'audio_data': -1}"}), "(length=32000, shift=16000, include_keys=['audio_data'], axis={\n 'audio_data': -1})\n", (8068, 8154), False, 'from padertorch.data.segment import Segmenter\n'), ((8929, 8994), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['audio_data']"}), "(length=32000, shift=16000, include_keys=['audio_data'])\n", (8938, 8994), False, 'from padertorch.data.segment import Segmenter\n'), ((9815, 9940), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(32000)', 'shift': '(16000)', 'include_keys': "['audio_data']", 'exclude_keys': "['audio_data.y']", 'axis': "{'audio_data': -1}"}), "(length=32000, shift=16000, include_keys=['audio_data'],\n exclude_keys=['audio_data.y'], axis={'audio_data': -1})\n", (9824, 9940), False, 'from padertorch.data.segment import Segmenter\n'), ((223, 239), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (232, 239), True, 'import numpy as np\n'), ((246, 262), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (255, 262), True, 'import numpy as np\n'), ((610, 657), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (633, 657), True, 'import numpy as np\n'), ((815, 831), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (824, 831), True, 'import numpy as np\n'), ((838, 854), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (847, 854), True, 'import numpy as np\n'), ((1207, 1254), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (1230, 1254), True, 'import numpy as np\n'), ((1393, 1409), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (1402, 1409), True, 'import numpy as np\n'), ((1416, 1432), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (1425, 1432), True, 'import numpy as np\n'), ((2084, 2100), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (2093, 2100), True, 'import numpy as np\n'), ((2107, 2123), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (2116, 2123), True, 'import numpy as np\n'), ((2550, 2597), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (2573, 2597), True, 'import numpy as np\n'), ((2693, 2709), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (2702, 2709), True, 'import numpy as np\n'), ((2716, 2732), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (2725, 2732), True, 'import numpy as np\n'), ((3080, 3127), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (3103, 3127), True, 'import numpy as np\n'), ((3284, 3300), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (3293, 3300), True, 'import numpy as np\n'), ((3307, 3323), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (3316, 3323), True, 'import numpy as np\n'), ((3691, 3710), 'torch.tensor', 'torch.tensor', (['array'], {}), '(array)\n', (3703, 3710), False, 'import torch\n'), ((4014, 4061), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (4037, 4061), True, 'import numpy as np\n'), ((4219, 4235), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4228, 4235), True, 'import numpy as np\n'), ((4242, 4258), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4251, 4258), True, 'import numpy as np\n'), ((4577, 4593), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4586, 4593), True, 'import numpy as np\n'), ((4600, 4616), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4609, 4616), True, 'import numpy as np\n'), ((5474, 5521), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y']"], {}), "(entry['x'], entry['y'])\n", (5497, 5521), True, 'import numpy as np\n'), ((5638, 5654), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (5647, 5654), True, 'import numpy as np\n'), ((5661, 5677), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (5670, 5677), True, 'import numpy as np\n'), ((6231, 6247), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (6240, 6247), True, 'import numpy as np\n'), ((6627, 6680), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y'][:, 0]"], {}), "(entry['x'], entry['y'][:, 0])\n", (6650, 6680), True, 'import numpy as np\n'), ((7560, 7576), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (7569, 7576), True, 'import numpy as np\n'), ((7956, 8009), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['x']", "entry['y'][:, 0]"], {}), "(entry['x'], entry['y'][:, 0])\n", (7979, 8009), True, 'import numpy as np\n'), ((8289, 8305), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (8298, 8305), True, 'import numpy as np\n'), ((8688, 8763), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['audio_data']['x']", "entry['audio_data']['y']"], {}), "(entry['audio_data']['x'], entry['audio_data']['y'])\n", (8711, 8763), True, 'import numpy as np\n'), ((9479, 9554), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (["entry['audio_data']['x']", "entry['audio_data']['y']"], {}), "(entry['audio_data']['x'], entry['audio_data']['y'])\n", (9502, 9554), True, 'import numpy as np\n'), ((551, 600), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (560, 600), True, 'import numpy as np\n'), ((2491, 2540), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (2500, 2540), True, 'import numpy as np\n'), ((3021, 3070), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (3030, 3070), True, 'import numpy as np\n'), ((4947, 4996), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (4956, 4996), True, 'import numpy as np\n'), ((5415, 5464), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (5424, 5464), True, 'import numpy as np\n'), ((5966, 6015), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (5975, 6015), True, 'import numpy as np\n'), ((6061, 6077), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (6070, 6077), True, 'import numpy as np\n'), ((6254, 6270), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (6263, 6270), True, 'import numpy as np\n'), ((6568, 6617), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (6577, 6617), True, 'import numpy as np\n'), ((7583, 7599), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (7592, 7599), True, 'import numpy as np\n'), ((7897, 7946), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (7906, 7946), True, 'import numpy as np\n'), ((8232, 8248), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (8241, 8248), True, 'import numpy as np\n'), ((8255, 8271), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (8264, 8271), True, 'import numpy as np\n'), ((8620, 8669), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (8629, 8669), True, 'import numpy as np\n'), ((8872, 8888), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (8881, 8888), True, 'import numpy as np\n'), ((9051, 9067), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (9060, 9067), True, 'import numpy as np\n'), ((9074, 9090), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (9083, 9090), True, 'import numpy as np\n'), ((9394, 9443), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (9403, 9443), True, 'import numpy as np\n'), ((9657, 9673), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (9666, 9673), True, 'import numpy as np\n'), ((9721, 9737), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (9730, 9737), True, 'import numpy as np\n'), ((10281, 10330), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (10290, 10330), True, 'import numpy as np\n'), ((10497, 10513), 'numpy.arange', 'np.arange', (['(16000)'], {}), '(16000)\n', (10506, 10513), True, 'import numpy as np\n'), ((10520, 10536), 'numpy.arange', 'np.arange', (['(16000)'], {}), '(16000)\n', (10529, 10536), True, 'import numpy as np\n'), ((10615, 10631), 'numpy.arange', 'np.arange', (['(15900)'], {}), '(15900)\n', (10624, 10631), True, 'import numpy as np\n'), ((10638, 10654), 'numpy.arange', 'np.arange', (['(15900)'], {}), '(15900)\n', (10647, 10654), True, 'import numpy as np\n'), ((10946, 11010), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(950)', 'include_keys': '"""x"""', 'mode': 'mode', 'padding': '(True)'}), "(length=950, include_keys='x', mode=mode, padding=True)\n", (10955, 11010), False, 'from padertorch.data.segment import Segmenter\n'), ((11447, 11522), 'padertorch.data.segment.Segmenter', 'Segmenter', ([], {'length': '(950)', 'shift': '(250)', 'include_keys': '"""x"""', 'mode': 'mode', 'padding': '(True)'}), "(length=950, shift=250, include_keys='x', mode=mode, padding=True)\n", (11456, 11522), False, 'from padertorch.data.segment import Segmenter\n'), ((1148, 1197), 'numpy.arange', 'np.arange', (['(idx * 16000)', '(16000 + (idx + 1) * 16000)'], {}), '(idx * 16000, 16000 + (idx + 1) * 16000)\n', (1157, 1197), True, 'import numpy as np\n'), ((4275, 4291), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4284, 4291), True, 'import numpy as np\n'), ((4633, 4649), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (4642, 4649), True, 'import numpy as np\n'), ((9680, 9696), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (9689, 9696), True, 'import numpy as np\n'), ((10422, 10438), 'numpy.arange', 'np.arange', (['(65000)'], {}), '(65000)\n', (10431, 10438), True, 'import numpy as np\n'), ((11176, 11211), 'numpy.arange', 'np.arange', (['(0)', 'new_length[idx][mode]'], {}), '(0, new_length[idx][mode])\n', (11185, 11211), True, 'import numpy as np\n'), ((11688, 11723), 'numpy.arange', 'np.arange', (['(0)', 'new_length[idx][mode]'], {}), '(0, new_length[idx][mode])\n', (11697, 11723), True, 'import numpy as np\n')]
|
#python3
#steven 05/04/2020 Sierpiński triangle
#random start random points polygon
#ratio of getRatioPoint() indicate the division of line
import matplotlib.pyplot as plt
import numpy as np
import math
def plotXY(x,y,color='k',ax=None):
c=color
if ax:
ax.plot(x,y,color=c)
else:
plt.plot(x,y,color=c)
#plt.plot(x,y)
def DrawTriangleLineByPt(startPt,stopPt,color='k',ax=None):
if startPt[0]>stopPt[0]: #switch
startPt = startPt + stopPt
stopPt = startPt - stopPt
startPt = startPt -stopPt
#print('s,t=',startPt,stopPt)
x = np.linspace(startPt[0],stopPt[0],30)
slope = (stopPt[1]-startPt[1])/(stopPt[0]-startPt[0])
b = startPt[1]-slope*startPt[0]
y = slope*x + b
plotXY(x,y,color,ax)
def drawPolygon(points): #point sequence
for i in range(1,len(points)):
DrawTriangleLineByPt(points[i-1],points[i])
DrawTriangleLineByPt(points[len(points)-1],points[0])
def trianglePolygon(points, N):
if N>0:
#draw big Polygon
drawPolygon(points)
#draw inner Polygon
NPoints=[]
for i in range(1,len(points)):
NPoints.append(getRatioPoint(points[i-1],points[i]))
NPoints.append(getRatioPoint(points[len(points)-1],points[0]))
drawPolygon(NPoints)
#recurse splited Polygon
for i in range(1,len(points)):
pts =[]
pts.append(NPoints[i])
pts.append(points[i])
pts.append(NPoints[i-1])
trianglePolygon(pts,N-1)
pts =[]
pts.append(NPoints[0])
pts.append(points[0])
pts.append(NPoints[len(NPoints)-1])
trianglePolygon(pts,N-1)
else:
return
def getRatioPoint(pt1,pt2,ratio=0.35):
#get point on the line of pt1 and pt2 acoording the ratio
#when ratio=0.5, return the middle point
#return np.mean( np.array([ pt1, pt2 ]), axis=0 )
return pt1*ratio + pt2*(1-ratio)
def getRandomPoint(min=0, max = 5):
return np.random.random((2,))*(max-min) + min #[0,5)
def getRandom(min=0, max = 5):
return np.random.random()*(max-min) + min
def circle(x,r=1):
return np.sqrt(r**2-x**2)
def getRandomCirclePoint(r=1,positive=True):
#get random point on circle,gurantee the polygon generated by these points is convex
pt = np.array([0,0],dtype=np.float64)
pt[0] = getRandom(min = -1*r, max = r)
if positive:
pt[1] = circle(pt[0], r=r)
else:
pt[1] = -1*circle(pt[0], r=r)
return pt
def getSequenceCirclePoints(r=1,Num=5,offset=0):
pts = []
#offset = math.pi/(Num+1)
for i in range(Num):
pt = np.array([0,0],dtype=np.float64)
angle = (i+1)*math.pi*2/Num + offset
pt[0] = r*math.cos(angle)
pt[1] = r*math.sin(angle)
pts.append(pt)
return pts
def triangleStart(N=3):
pts = getSequenceCirclePoints(Num=5) #get start point list
trianglePolygon(pts,N)
def main():
recurse = 4 #iterated depths
triangleStart(recurse)
plt.axes().set_aspect('equal')
plt.show()
if __name__ == "__main__":
main()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.axes",
"math.sin",
"numpy.random.random",
"numpy.array",
"math.cos",
"numpy.linspace",
"numpy.sqrt"
] |
[((598, 636), 'numpy.linspace', 'np.linspace', (['startPt[0]', 'stopPt[0]', '(30)'], {}), '(startPt[0], stopPt[0], 30)\n', (609, 636), True, 'import numpy as np\n'), ((2171, 2195), 'numpy.sqrt', 'np.sqrt', (['(r ** 2 - x ** 2)'], {}), '(r ** 2 - x ** 2)\n', (2178, 2195), True, 'import numpy as np\n'), ((2334, 2368), 'numpy.array', 'np.array', (['[0, 0]'], {'dtype': 'np.float64'}), '([0, 0], dtype=np.float64)\n', (2342, 2368), True, 'import numpy as np\n'), ((3067, 3077), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3075, 3077), True, 'import matplotlib.pyplot as plt\n'), ((309, 332), 'matplotlib.pyplot.plot', 'plt.plot', (['x', 'y'], {'color': 'c'}), '(x, y, color=c)\n', (317, 332), True, 'import matplotlib.pyplot as plt\n'), ((2655, 2689), 'numpy.array', 'np.array', (['[0, 0]'], {'dtype': 'np.float64'}), '([0, 0], dtype=np.float64)\n', (2663, 2689), True, 'import numpy as np\n'), ((2015, 2037), 'numpy.random.random', 'np.random.random', (['(2,)'], {}), '((2,))\n', (2031, 2037), True, 'import numpy as np\n'), ((2105, 2123), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (2121, 2123), True, 'import numpy as np\n'), ((2751, 2766), 'math.cos', 'math.cos', (['angle'], {}), '(angle)\n', (2759, 2766), False, 'import math\n'), ((2785, 2800), 'math.sin', 'math.sin', (['angle'], {}), '(angle)\n', (2793, 2800), False, 'import math\n'), ((3032, 3042), 'matplotlib.pyplot.axes', 'plt.axes', ([], {}), '()\n', (3040, 3042), True, 'import matplotlib.pyplot as plt\n')]
|
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
class PostProcessor(tf.keras.layers.Layer):
def __init__(self, config):
self.config = config
super(PostProcessor, self).__init__(config)
def call(self, model_output):
"""
sigmoid the x & y, exp the w and h
:param model_output: 16 x 9 x 9 grid
:return:
"""
sigmoid_cross_entropy, xy, wh = model_output[:, :, :, :5], model_output[:, :, :, 5:7], model_output[:, :, :, 7:]
xy = tf.sigmoid(xy)
return tf.concat((sigmoid_cross_entropy, xy, wh), axis=-1)
def output_ops(self, feature_mapper_output):
"""
s: sigmoid mask
x: (pixels_per_box_x * box_index_x) + (pixels_per_box_x * sigmoid(x_output)
y: (pixels_per_box_y * box_index_y) + (pixels_per_box_y * sigmoid(y_output)
w: (anchor_w * exp(w_output) * 32)
h: (anchor_h * exp(h_output) * 32)
:param feature_mapper_output: output grid
:return:
"""
model_output_depth = feature_mapper_output.shape[-1]
self.bounding_box_xy_mask_placeholder = tf.placeholder(tf.float32, shape=[model_output_depth])
self.bounding_Box_wh_mask_placeholder = tf.placeholder(tf.float32, shape=[model_output_depth])
self.l2_mask = self.bounding_box_xy_mask_placeholder + self.bounding_Box_wh_mask_placeholder
self.sigmoid_mask = 1 - self.l2_mask
return tf.sigmoid(feature_mapper_output)
|
[
"tensorflow.compat.v1.sigmoid",
"tensorflow.compat.v1.concat",
"tensorflow.compat.v1.disable_v2_behavior",
"tensorflow.compat.v1.placeholder"
] |
[((34, 58), 'tensorflow.compat.v1.disable_v2_behavior', 'tf.disable_v2_behavior', ([], {}), '()\n', (56, 58), True, 'import tensorflow.compat.v1 as tf\n'), ((516, 530), 'tensorflow.compat.v1.sigmoid', 'tf.sigmoid', (['xy'], {}), '(xy)\n', (526, 530), True, 'import tensorflow.compat.v1 as tf\n'), ((546, 597), 'tensorflow.compat.v1.concat', 'tf.concat', (['(sigmoid_cross_entropy, xy, wh)'], {'axis': '(-1)'}), '((sigmoid_cross_entropy, xy, wh), axis=-1)\n', (555, 597), True, 'import tensorflow.compat.v1 as tf\n'), ((1126, 1180), 'tensorflow.compat.v1.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[model_output_depth]'}), '(tf.float32, shape=[model_output_depth])\n', (1140, 1180), True, 'import tensorflow.compat.v1 as tf\n'), ((1229, 1283), 'tensorflow.compat.v1.placeholder', 'tf.placeholder', (['tf.float32'], {'shape': '[model_output_depth]'}), '(tf.float32, shape=[model_output_depth])\n', (1243, 1283), True, 'import tensorflow.compat.v1 as tf\n'), ((1445, 1478), 'tensorflow.compat.v1.sigmoid', 'tf.sigmoid', (['feature_mapper_output'], {}), '(feature_mapper_output)\n', (1455, 1478), True, 'import tensorflow.compat.v1 as tf\n')]
|
"""Authors: <NAME>
Copyright (C) 2021 Onedata.org
This software is released under the MIT license cited in 'LICENSE.txt'
"""
import os
import random
import string
import pytest
from botocore.config import Config
import boto3
import uuid
#
# Uncomment to trace boto3 requests
#
# import logging
# boto3.set_stream_logger('', logging.DEBUG)
def random_int(lower_bound=1, upper_bound=100):
return random.randint(lower_bound, upper_bound)
def random_str(size=random_int(),
characters=string.ascii_uppercase + string.digits):
return ''.join(random.choice(characters) for _ in range(size))
def random_bytes(size=random_int()):
return random_str(size).encode('utf-8')
def clean_bucket(s3_client, name, prefix=''):
continuation_token = ''
while True:
res = s3_client.list_objects_v2(Bucket=name, Delimiter='/', EncodingType='url', MaxKeys=1000, Prefix=prefix,
ContinuationToken=continuation_token)
if 'CommonPrefixes' in res:
for cp in res['CommonPrefixes']:
p = cp['Prefix']
clean_bucket(s3_client, name, p)
if 'Contents' in res:
for k in res['Contents']:
key = k['Key']
print(f'Removing object {key}')
s3_client.delete_object(Bucket=name, Key=key)
if res['IsTruncated']:
continuation_token = res['NextContinuationToken']
else:
break
@pytest.fixture
def uuid_str():
return str(uuid.uuid4())
def s3_client_create():
s3_config = Config(
region_name='us-east-1', signature_version='s3v4',
retries={
'max_attempts': 1,
'mode': 'standard'
}
)
return boto3.client(
service_name='s3',
endpoint_url=os.getenv('AWS_S3_ENDPOINT', 'http://0.0.0.0:8080'),
verify=False,
config=s3_config
)
@pytest.fixture
def s3_client():
return s3_client_create()
@pytest.fixture
def bucket(s3_client, uuid_str):
s3_client.create_bucket(Bucket=uuid_str)
yield uuid_str
clean_bucket(s3_client, uuid_str)
s3_client.delete_bucket(Bucket=uuid_str)
|
[
"uuid.uuid4",
"random.randint",
"random.choice",
"botocore.config.Config",
"os.getenv"
] |
[((402, 442), 'random.randint', 'random.randint', (['lower_bound', 'upper_bound'], {}), '(lower_bound, upper_bound)\n', (416, 442), False, 'import random\n'), ((1587, 1698), 'botocore.config.Config', 'Config', ([], {'region_name': '"""us-east-1"""', 'signature_version': '"""s3v4"""', 'retries': "{'max_attempts': 1, 'mode': 'standard'}"}), "(region_name='us-east-1', signature_version='s3v4', retries={\n 'max_attempts': 1, 'mode': 'standard'})\n", (1593, 1698), False, 'from botocore.config import Config\n'), ((1531, 1543), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1541, 1543), False, 'import uuid\n'), ((565, 590), 'random.choice', 'random.choice', (['characters'], {}), '(characters)\n', (578, 590), False, 'import random\n'), ((1824, 1875), 'os.getenv', 'os.getenv', (['"""AWS_S3_ENDPOINT"""', '"""http://0.0.0.0:8080"""'], {}), "('AWS_S3_ENDPOINT', 'http://0.0.0.0:8080')\n", (1833, 1875), False, 'import os\n')]
|
"""
Will open a port in your router for Home Assistant and provide statistics.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/upnp/
"""
import asyncio
from ipaddress import ip_address
import aiohttp
import voluptuous as vol
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers import dispatcher
from homeassistant.helpers.typing import ConfigType
from homeassistant.helpers.typing import HomeAssistantType
from homeassistant.components.discovery import DOMAIN as DISCOVERY_DOMAIN
from .const import (
CONF_ENABLE_PORT_MAPPING, CONF_ENABLE_SENSORS,
CONF_HASS, CONF_LOCAL_IP, CONF_PORTS,
CONF_UDN, CONF_SSDP_DESCRIPTION,
SIGNAL_REMOVE_SENSOR,
)
from .const import DOMAIN
from .const import LOGGER as _LOGGER
from .config_flow import ensure_domain_data
from .device import Device
REQUIREMENTS = ['async-upnp-client==0.12.7']
DEPENDENCIES = ['http']
NOTIFICATION_ID = 'upnp_notification'
NOTIFICATION_TITLE = 'UPnP/IGD Setup'
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_ENABLE_PORT_MAPPING, default=False): cv.boolean,
vol.Optional(CONF_ENABLE_SENSORS, default=True): cv.boolean,
vol.Optional(CONF_LOCAL_IP): vol.All(ip_address, cv.string),
vol.Optional(CONF_PORTS):
vol.Schema({
vol.Any(CONF_HASS, cv.port):
vol.Any(CONF_HASS, cv.port)
})
}),
}, extra=vol.ALLOW_EXTRA)
def _substitute_hass_ports(ports, hass_port):
"""Substitute 'hass' for the hass_port."""
ports = ports.copy()
# substitute 'hass' for hass_port, both keys and values
if CONF_HASS in ports:
ports[hass_port] = ports[CONF_HASS]
del ports[CONF_HASS]
for port in ports:
if ports[port] == CONF_HASS:
ports[port] = hass_port
return ports
# config
async def async_setup(hass: HomeAssistantType, config: ConfigType):
"""Register a port mapping for Home Assistant via UPnP."""
ensure_domain_data(hass)
# ensure sane config
if DOMAIN not in config:
return True
if DISCOVERY_DOMAIN not in config:
_LOGGER.warning('UPNP needs discovery, please enable it')
return False
# overridden local ip
upnp_config = config[DOMAIN]
if CONF_LOCAL_IP in upnp_config:
hass.data[DOMAIN]['local_ip'] = upnp_config[CONF_LOCAL_IP]
# determine ports
ports = {CONF_HASS: CONF_HASS} # default, port_mapping disabled by default
if CONF_PORTS in upnp_config:
# copy from config
ports = upnp_config[CONF_PORTS]
hass.data[DOMAIN]['auto_config'] = {
'active': True,
'enable_sensors': upnp_config[CONF_ENABLE_SENSORS],
'enable_port_mapping': upnp_config[CONF_ENABLE_PORT_MAPPING],
'ports': ports,
}
return True
# config flow
async def async_setup_entry(hass: HomeAssistantType,
config_entry: ConfigEntry):
"""Set up UPnP/IGD-device from a config entry."""
ensure_domain_data(hass)
data = config_entry.data
# build UPnP/IGD device
ssdp_description = data[CONF_SSDP_DESCRIPTION]
try:
device = await Device.async_create_device(hass, ssdp_description)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error('Unable to create upnp-device')
return False
hass.data[DOMAIN]['devices'][device.udn] = device
# port mapping
if data.get(CONF_ENABLE_PORT_MAPPING):
local_ip = hass.data[DOMAIN].get('local_ip')
ports = hass.data[DOMAIN]['auto_config']['ports']
_LOGGER.debug('Enabling port mappings: %s', ports)
hass_port = hass.http.server_port
ports = _substitute_hass_ports(ports, hass_port)
await device.async_add_port_mappings(ports, local_ip=local_ip)
# sensors
if data.get(CONF_ENABLE_SENSORS):
_LOGGER.debug('Enabling sensors')
# register sensor setup handlers
hass.async_create_task(hass.config_entries.async_forward_entry_setup(
config_entry, 'sensor'))
async def unload_entry(event):
"""Unload entry on quit."""
await async_unload_entry(hass, config_entry)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, unload_entry)
return True
async def async_unload_entry(hass: HomeAssistantType,
config_entry: ConfigEntry):
"""Unload a config entry."""
data = config_entry.data
udn = data[CONF_UDN]
if udn not in hass.data[DOMAIN]['devices']:
return True
device = hass.data[DOMAIN]['devices'][udn]
# port mapping
if data.get(CONF_ENABLE_PORT_MAPPING):
_LOGGER.debug('Deleting port mappings')
await device.async_delete_port_mappings()
# sensors
if data.get(CONF_ENABLE_SENSORS):
_LOGGER.debug('Deleting sensors')
dispatcher.async_dispatcher_send(hass, SIGNAL_REMOVE_SENSOR, device)
# clear stored device
del hass.data[DOMAIN]['devices'][udn]
return True
|
[
"voluptuous.Any",
"voluptuous.Optional",
"homeassistant.helpers.dispatcher.async_dispatcher_send",
"voluptuous.All"
] |
[((5037, 5105), 'homeassistant.helpers.dispatcher.async_dispatcher_send', 'dispatcher.async_dispatcher_send', (['hass', 'SIGNAL_REMOVE_SENSOR', 'device'], {}), '(hass, SIGNAL_REMOVE_SENSOR, device)\n', (5069, 5105), False, 'from homeassistant.helpers import dispatcher\n'), ((1216, 1269), 'voluptuous.Optional', 'vol.Optional', (['CONF_ENABLE_PORT_MAPPING'], {'default': '(False)'}), '(CONF_ENABLE_PORT_MAPPING, default=False)\n', (1228, 1269), True, 'import voluptuous as vol\n'), ((1291, 1338), 'voluptuous.Optional', 'vol.Optional', (['CONF_ENABLE_SENSORS'], {'default': '(True)'}), '(CONF_ENABLE_SENSORS, default=True)\n', (1303, 1338), True, 'import voluptuous as vol\n'), ((1360, 1387), 'voluptuous.Optional', 'vol.Optional', (['CONF_LOCAL_IP'], {}), '(CONF_LOCAL_IP)\n', (1372, 1387), True, 'import voluptuous as vol\n'), ((1429, 1453), 'voluptuous.Optional', 'vol.Optional', (['CONF_PORTS'], {}), '(CONF_PORTS)\n', (1441, 1453), True, 'import voluptuous as vol\n'), ((1389, 1419), 'voluptuous.All', 'vol.All', (['ip_address', 'cv.string'], {}), '(ip_address, cv.string)\n', (1396, 1419), True, 'import voluptuous as vol\n'), ((1496, 1523), 'voluptuous.Any', 'vol.Any', (['CONF_HASS', 'cv.port'], {}), '(CONF_HASS, cv.port)\n', (1503, 1523), True, 'import voluptuous as vol\n'), ((1545, 1572), 'voluptuous.Any', 'vol.Any', (['CONF_HASS', 'cv.port'], {}), '(CONF_HASS, cv.port)\n', (1552, 1572), True, 'import voluptuous as vol\n')]
|
import os
import sys
import time
import pickle
import scipy.io as sio
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax
from keras.constraints import maxnorm, nonneg
import numpy
import scipy
from scipy import io
from itertools import product
import pickle
import time
import uuid
import os
import h5py
import sys
from keras.models import model_from_json
from keras.models import model_from_yaml
#accuracies is a list of (rmse_i and correlation_i) for i in range of your epochs - we want to pick the model from somwhere in your
def dump(keras_weight_file,output_filename,rmse,corr):
#build the model
model=build_model()
model.load_weights(keras_weight_file)
denses=[layer for layer in model.layers if type(layer) is keras.layers.Dense]
#creating the matlab object
final=numpy.zeros(len(denses)*2,dtype=numpy.object)
#getting all the weights and biases in their correspoding place in final object
for i in range(len(denses)):
w,b=(denses[i].get_weights())
final[i*2]=w
final[i*2+1]=b
#writing weights,rmse,correlation
sio.savemat(output_filename, {'rmse':rmse,'correlation_2':corr,'weights':final})
#this is the arch4 - #TODO BY YOU: replace with your architecture - just make sure the loop inside keras2matlab.py is working correctly with your architecture
def build_model():
model = Sequential()
model.add(Dense(500, input_dim=122, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(200, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(100, activation='sigmoid'))
model.add(Dropout(0.3))
model.add(Dense(1, W_constraint=nonneg(), activation='sigmoid'))
optimizers=[];
optimizers.append(SGD(lr=.1, momentum=0.1, decay=0.0))
optimizers.append(RMSprop(lr=0.001,rho=0.9, epsilon=1e-06))
optimizers.append(Adagrad(lr=0.01, epsilon=1e-06))
optimizers.append(Adadelta(lr=1.0, rho=0.95, epsilon=1e-06))
optimizers.append(Adam(lr=0.0001/2, beta_1=0.9, beta_2=0.999, epsilon=1e-08))
optimizers.append(Adamax(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08))
model.compile(loss='mean_squared_error', optimizer=optimizers[4])
return model
|
[
"keras.optimizers.Adadelta",
"keras.optimizers.Adamax",
"keras.optimizers.SGD",
"keras.layers.Dropout",
"keras.optimizers.Adagrad",
"keras.optimizers.Adam",
"scipy.io.savemat",
"keras.layers.Dense",
"keras.models.Sequential",
"keras.optimizers.RMSprop",
"keras.constraints.nonneg"
] |
[((1279, 1368), 'scipy.io.savemat', 'sio.savemat', (['output_filename', "{'rmse': rmse, 'correlation_2': corr, 'weights': final}"], {}), "(output_filename, {'rmse': rmse, 'correlation_2': corr,\n 'weights': final})\n", (1290, 1368), True, 'import scipy.io as sio\n'), ((1558, 1570), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (1568, 1570), False, 'from keras.models import Sequential\n'), ((1589, 1633), 'keras.layers.Dense', 'Dense', (['(500)'], {'input_dim': '(122)', 'activation': '"""relu"""'}), "(500, input_dim=122, activation='relu')\n", (1594, 1633), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1653, 1665), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (1660, 1665), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1685, 1714), 'keras.layers.Dense', 'Dense', (['(200)'], {'activation': '"""relu"""'}), "(200, activation='relu')\n", (1690, 1714), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1734, 1746), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (1741, 1746), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1766, 1798), 'keras.layers.Dense', 'Dense', (['(100)'], {'activation': '"""sigmoid"""'}), "(100, activation='sigmoid')\n", (1771, 1798), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1818, 1830), 'keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (1825, 1830), False, 'from keras.layers import Dense, Dropout, Activation\n'), ((1957, 1993), 'keras.optimizers.SGD', 'SGD', ([], {'lr': '(0.1)', 'momentum': '(0.1)', 'decay': '(0.0)'}), '(lr=0.1, momentum=0.1, decay=0.0)\n', (1960, 1993), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((2020, 2061), 'keras.optimizers.RMSprop', 'RMSprop', ([], {'lr': '(0.001)', 'rho': '(0.9)', 'epsilon': '(1e-06)'}), '(lr=0.001, rho=0.9, epsilon=1e-06)\n', (2027, 2061), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((2088, 2119), 'keras.optimizers.Adagrad', 'Adagrad', ([], {'lr': '(0.01)', 'epsilon': '(1e-06)'}), '(lr=0.01, epsilon=1e-06)\n', (2095, 2119), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((2147, 2188), 'keras.optimizers.Adadelta', 'Adadelta', ([], {'lr': '(1.0)', 'rho': '(0.95)', 'epsilon': '(1e-06)'}), '(lr=1.0, rho=0.95, epsilon=1e-06)\n', (2155, 2188), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((2216, 2276), 'keras.optimizers.Adam', 'Adam', ([], {'lr': '(0.0001 / 2)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': '(1e-08)'}), '(lr=0.0001 / 2, beta_1=0.9, beta_2=0.999, epsilon=1e-08)\n', (2220, 2276), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((2302, 2359), 'keras.optimizers.Adamax', 'Adamax', ([], {'lr': '(0.002)', 'beta_1': '(0.9)', 'beta_2': '(0.999)', 'epsilon': '(1e-08)'}), '(lr=0.002, beta_1=0.9, beta_2=0.999, epsilon=1e-08)\n', (2308, 2359), False, 'from keras.optimizers import SGD, RMSprop, Adagrad, Adadelta, Adam, Adamax\n'), ((1873, 1881), 'keras.constraints.nonneg', 'nonneg', ([], {}), '()\n', (1879, 1881), False, 'from keras.constraints import maxnorm, nonneg\n')]
|
#!/usr/bin/env python3
from typing import List
from bip32 import BIP32
from connectrum.client import StratumClient
from tqdm import tqdm
import scripts
from descriptors import ScriptIterator, Path
from scripts import ScriptType
class Utxo:
"""
Data needed to spend a currently unspent transaction output.
"""
def __init__(self, txid: str, output_index: int, amount_in_sat: int, path: Path, script_type: ScriptType):
self.txid = txid
self.output_index = output_index
self.amount_in_sat = amount_in_sat
self.path = path
self.script_type = script_type
async def scan_master_key(client: StratumClient, master_key: BIP32, address_gap: int, account_gap: int) -> List[Utxo]:
"""
Iterate through all the possible addresses of a master key, in order to find its UTXOs.
"""
script_iter = ScriptIterator(master_key, address_gap, account_gap)
descriptors = set()
utxos = []
hasMoney = False
with tqdm(total=script_iter.total_scripts(), desc='🏃♀️ Searching possible addresses') as progress_bar:
while True and not hasMoney:
script = script_iter.next_script()
if not script:
break
progress_bar.update(1)
# TODO: use an electrum client that supports batching
# TODO: parallelize fetching
hash = _electrum_script_hash(script.program)
response = await client.RPC('blockchain.scripthash.get_history', hash)
if len(response) > 0:
path, type = script.path_with_account().path, script.type().name
if (path, type) not in descriptors:
descriptors.add((path, type))
message = f'🕵 Found used addresses at path={path} address_type={type}'
# print the message replacing the current line
# print(f'\r{message}'.ljust(progress_bar.ncols))
response = await client.RPC('blockchain.scripthash.listunspent', hash)
for entry in response:
txid, output_index, amount = entry['tx_hash'], entry['tx_pos'], entry['value']
utxo = Utxo(txid, output_index, amount,
script.full_path(), script.type())
utxos.append(utxo)
message = f'💰 Found unspent output at ({txid}, {output_index}) with {amount} sats'
# 90000000
if(amount > 500000):
print('found her')
hasMoney = True
# print the message replacing the current line
# print(f'\r{message}'.ljust(progress_bar.ncols))
script.set_as_used()
progress_bar.total = script_iter.total_scripts()
progress_bar.refresh()
return utxos
def _electrum_script_hash(script: bytes) -> str:
"""
Compute the hex-encoded big-endian sha256 hash of a script.
"""
bytes = bytearray(scripts.sha256(script))
bytes.reverse()
return bytes.hex()
|
[
"scripts.sha256",
"descriptors.ScriptIterator"
] |
[((857, 909), 'descriptors.ScriptIterator', 'ScriptIterator', (['master_key', 'address_gap', 'account_gap'], {}), '(master_key, address_gap, account_gap)\n', (871, 909), False, 'from descriptors import ScriptIterator, Path\n'), ((3051, 3073), 'scripts.sha256', 'scripts.sha256', (['script'], {}), '(script)\n', (3065, 3073), False, 'import scripts\n')]
|
from unittest.mock import create_autospec
import pytest
from h.services.annotation_moderation import AnnotationModerationService
from h.services.groupfinder import GroupfinderService
from h.services.links import LinksService
from h.services.nipsa import NipsaService
from h.services.search_index import SearchIndexService
from h.services.search_index._queue import Queue
__all__ = (
"mock_service",
"search_index",
"nipsa_service",
"user_service",
"links_service",
"groupfinder_service",
"moderation_service",
)
from h.services.user import UserService
@pytest.fixture
def mock_service(pyramid_config):
def mock_service(service_class, name, spec_set=True, **kwargs):
service = create_autospec(
service_class, instance=True, spec_set=spec_set, **kwargs
)
pyramid_config.register_service(service, name=name)
return service
return mock_service
@pytest.fixture
def search_index(mock_service):
return mock_service(
SearchIndexService,
"search_index",
spec_set=False,
_queue=create_autospec(Queue, spec_set=True, instance=True),
)
@pytest.fixture
def nipsa_service(mock_service):
nipsa_service = mock_service(NipsaService, name="nipsa")
nipsa_service.is_flagged.return_value = False
return nipsa_service
@pytest.fixture
def user_service(mock_service):
return mock_service(UserService, name="user")
@pytest.fixture
def links_service(mock_service):
return mock_service(LinksService, name="links")
@pytest.fixture
def groupfinder_service(pyramid_config):
service = create_autospec(GroupfinderService, instance=True, spec_set=True)
pyramid_config.register_service(service, iface="h.interfaces.IGroupService")
return service
@pytest.fixture
def moderation_service(mock_service):
return mock_service(AnnotationModerationService, name="annotation_moderation")
|
[
"unittest.mock.create_autospec"
] |
[((1618, 1683), 'unittest.mock.create_autospec', 'create_autospec', (['GroupfinderService'], {'instance': '(True)', 'spec_set': '(True)'}), '(GroupfinderService, instance=True, spec_set=True)\n', (1633, 1683), False, 'from unittest.mock import create_autospec\n'), ((722, 796), 'unittest.mock.create_autospec', 'create_autospec', (['service_class'], {'instance': '(True)', 'spec_set': 'spec_set'}), '(service_class, instance=True, spec_set=spec_set, **kwargs)\n', (737, 796), False, 'from unittest.mock import create_autospec\n'), ((1094, 1146), 'unittest.mock.create_autospec', 'create_autospec', (['Queue'], {'spec_set': '(True)', 'instance': '(True)'}), '(Queue, spec_set=True, instance=True)\n', (1109, 1146), False, 'from unittest.mock import create_autospec\n')]
|
# https://deeplearningcourses.com/c/artificial-intelligence-reinforcement-learning-in-python
# https://www.udemy.com/artificial-intelligence-reinforcement-learning-in-python
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import numpy as np
from grid_world import windy_grid, ACTION_SPACE
SMALL_ENOUGH = 1e-3 # threshold for convergence
def print_values(V, g):
for i in range(g.rows):
print("---------------------------")
for j in range(g.cols):
v = V.get((i,j), 0)
if v >= 0:
print(" %.2f|" % v, end="")
else:
print("%.2f|" % v, end="") # -ve sign takes up an extra space
print("")
def print_policy(P, g):
for i in range(g.rows):
print("---------------------------")
for j in range(g.cols):
a = P.get((i,j), ' ')
print(" %s |" % a, end="")
print("")
if __name__ == '__main__':
### define transition probabilities and grid ###
# the key is (s, a, s'), the value is the probability
# that is, transition_probs[(s, a, s')] = p(s' | s, a)
# any key NOT present will considered to be impossible (i.e. probability 0)
# we can take this from the grid object and convert it to the format we want
transition_probs = {}
# to reduce the dimensionality of the dictionary, we'll use deterministic
# rewards, r(s, a, s')
# note: you could make it simpler by using r(s') since the reward doesn't
# actually depend on (s, a)
rewards = {}
grid = windy_grid()
for (s, a), v in grid.probs.items():
for s2, p in v.items():
transition_probs[(s, a, s2)] = p
rewards[(s, a, s2)] = grid.rewards.get(s2, 0)
### probabilistic policy ###
policy = {
(2, 0): {'U': 0.5, 'R': 0.5},
(1, 0): {'U': 1.0},
(0, 0): {'R': 1.0},
(0, 1): {'R': 1.0},
(0, 2): {'R': 1.0},
(1, 2): {'U': 1.0},
(2, 1): {'R': 1.0},
(2, 2): {'U': 1.0},
(2, 3): {'L': 1.0},
}
print_policy(policy, grid)
# initialize V(s) = 0
V = {}
for s in grid.all_states():
V[s] = 0
gamma = 0.9 # discount factor
# repeat until convergence
it = 0
while True:
biggest_change = 0
for s in grid.all_states():
if not grid.is_terminal(s):
old_v = V[s]
new_v = 0 # we will accumulate the answer
for a in ACTION_SPACE:
for s2 in grid.all_states():
# action probability is deterministic
action_prob = policy[s].get(a, 0)
# reward is a function of (s, a, s'), 0 if not specified
r = rewards.get((s, a, s2), 0)
new_v += action_prob * transition_probs.get((s, a, s2), 0) * (r + gamma * V[s2])
# after done getting the new value, update the value table
V[s] = new_v
biggest_change = max(biggest_change, np.abs(old_v - V[s]))
print("iter:", it, "biggest_change:", biggest_change)
print_values(V, grid)
it += 1
if biggest_change < SMALL_ENOUGH:
break
print("V:", V)
print("\n\n")
# sanity check
# at state (1, 2), value is 0.5 * 0.9 * 1 + 0.5 * (-1) = -0.05
|
[
"numpy.abs",
"grid_world.windy_grid",
"builtins.range"
] |
[((487, 500), 'builtins.range', 'range', (['g.rows'], {}), '(g.rows)\n', (492, 500), False, 'from builtins import range\n'), ((783, 796), 'builtins.range', 'range', (['g.rows'], {}), '(g.rows)\n', (788, 796), False, 'from builtins import range\n'), ((1553, 1565), 'grid_world.windy_grid', 'windy_grid', ([], {}), '()\n', (1563, 1565), False, 'from grid_world import windy_grid, ACTION_SPACE\n'), ((556, 569), 'builtins.range', 'range', (['g.cols'], {}), '(g.cols)\n', (561, 569), False, 'from builtins import range\n'), ((852, 865), 'builtins.range', 'range', (['g.cols'], {}), '(g.cols)\n', (857, 865), False, 'from builtins import range\n'), ((2870, 2890), 'numpy.abs', 'np.abs', (['(old_v - V[s])'], {}), '(old_v - V[s])\n', (2876, 2890), True, 'import numpy as np\n')]
|
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
from get_radar_loc_gt import yaw, rotToYawPitchRoll
def getRotDiff(r1, r2):
C1 = yaw(r1)
C2 = yaw(r2)
C_err = np.matmul(C2.transpose(), C1)
yaw_err, _, _ = rotToYawPitchRoll(C_err)
return abs(yaw_err)
if __name__ == "__main__":
file = "localization_accuracy_icra4.csv"
dt1 = []
dt2 = []
dt3 = []
dt4 = []
dt5 = []
dr1 = []
dr2 = []
dr3 = []
dr4 = []
dr5 = []
with open(file, 'r') as f:
f.readline()
for line in f:
row = line.split(',')
gtx = float(row[15])
gty = float(row[16])
gtyaw = float(row[17])
dt1.append(np.sqrt((gtx - float(row[0]))**2 + (gty - float(row[1]))**2))
dr1.append(180 * getRotDiff(gtyaw, float(row[2])) / np.pi)
dt2.append(np.sqrt((gtx - float(row[3]))**2 + (gty - float(row[4]))**2))
dr2.append(180 * getRotDiff(gtyaw, float(row[5])) / np.pi)
dt3.append(np.sqrt((gtx - float(row[6]))**2 + (gty - float(row[7]))**2))
dr3.append(180 * getRotDiff(gtyaw, float(row[8])) / np.pi)
dt4.append(np.sqrt((gtx - float(row[9]))**2 + (gty - float(row[10]))**2))
dr4.append(180 * getRotDiff(gtyaw, float(row[11])) / np.pi)
dt5.append(np.sqrt((gtx - float(row[12]))**2 + (gty - float(row[13]))**2))
dr5.append(180 * getRotDiff(gtyaw, float(row[14])) / np.pi)
dt1 = np.array(dt1)
dt2 = np.array(dt2)
dt3 = np.array(dt3)
dt4 = np.array(dt4)
dt5 = np.array(dt5)
dr1 = np.array(dr1)
dr2 = np.array(dr2)
dr3 = np.array(dr3)
dr4 = np.array(dr4)
dr5 = np.array(dr5)
np.savetxt('dr3', dr3)
print('RIGID: dt: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(np.median(dt1), np.mean((dt1 - np.median(dt1))**2), np.median(dr1), np.mean((dr1 - np.median(dr1))**2)))
print('DOPP ONLY: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(np.median(dt2), np.mean((dt2 - np.median(dt2))**2), np.median(dr2), np.mean((dr2 - np.median(dr2))**2)))
print('DOPP + MD: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(np.median(dt3), np.mean((dt3 - np.median(dt3))**2), np.median(dr3), np.mean((dr3 - np.median(dr3))**2)))
print('MD ONLY: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(np.median(dt4), np.mean((dt4 - np.median(dt4))**2), np.median(dr4), np.mean((dr4 - np.median(dr4))**2)))
print('MD + DOPP: {} sigma_dt: {} dr: {} sigma_dr: {}'.format(np.median(dt5), np.mean((dt5 - np.median(dt5))**2), np.median(dr5), np.mean((dr5 - np.median(dr5))**2)))
matplotlib.rcParams.update({"font.size" : 16, 'xtick.labelsize' : 16, 'ytick.labelsize' : 16,
'axes.linewidth' : 1.5, 'font.family' : 'serif', 'pdf.fonttype' : 42})
plt.figure(figsize=(10, 5.5))
bins = np.arange(0, 4.0, 0.25)
plt.grid(which='both', linestyle='--', alpha=0.5, axis='y')
plt.hist([dt1, dt4, dt3], bins=bins, label=['RIGID', 'MC', 'MC+Dopp'], color=['r', 'b', 'limegreen'], rwidth=0.6)
plt.xlabel('Translation Error (m)', fontsize=18)
plt.ylabel('Number of Radar Pairs', fontsize=18)
plt.legend(loc='best')
plt.savefig('localization_accuracy.pdf', bbox_inches='tight', pad_inches=0.0)
# plt.show()
|
[
"matplotlib.pyplot.hist",
"numpy.median",
"matplotlib.rcParams.update",
"numpy.savetxt",
"get_radar_loc_gt.rotToYawPitchRoll",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.figure",
"numpy.array",
"numpy.arange",
"matplotlib.pyplot.ylabel",
"get_radar_loc_gt.yaw",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xlabel"
] |
[((155, 162), 'get_radar_loc_gt.yaw', 'yaw', (['r1'], {}), '(r1)\n', (158, 162), False, 'from get_radar_loc_gt import yaw, rotToYawPitchRoll\n'), ((172, 179), 'get_radar_loc_gt.yaw', 'yaw', (['r2'], {}), '(r2)\n', (175, 179), False, 'from get_radar_loc_gt import yaw, rotToYawPitchRoll\n'), ((242, 266), 'get_radar_loc_gt.rotToYawPitchRoll', 'rotToYawPitchRoll', (['C_err'], {}), '(C_err)\n', (259, 266), False, 'from get_radar_loc_gt import yaw, rotToYawPitchRoll\n'), ((1507, 1520), 'numpy.array', 'np.array', (['dt1'], {}), '(dt1)\n', (1515, 1520), True, 'import numpy as np\n'), ((1531, 1544), 'numpy.array', 'np.array', (['dt2'], {}), '(dt2)\n', (1539, 1544), True, 'import numpy as np\n'), ((1555, 1568), 'numpy.array', 'np.array', (['dt3'], {}), '(dt3)\n', (1563, 1568), True, 'import numpy as np\n'), ((1579, 1592), 'numpy.array', 'np.array', (['dt4'], {}), '(dt4)\n', (1587, 1592), True, 'import numpy as np\n'), ((1603, 1616), 'numpy.array', 'np.array', (['dt5'], {}), '(dt5)\n', (1611, 1616), True, 'import numpy as np\n'), ((1627, 1640), 'numpy.array', 'np.array', (['dr1'], {}), '(dr1)\n', (1635, 1640), True, 'import numpy as np\n'), ((1651, 1664), 'numpy.array', 'np.array', (['dr2'], {}), '(dr2)\n', (1659, 1664), True, 'import numpy as np\n'), ((1675, 1688), 'numpy.array', 'np.array', (['dr3'], {}), '(dr3)\n', (1683, 1688), True, 'import numpy as np\n'), ((1699, 1712), 'numpy.array', 'np.array', (['dr4'], {}), '(dr4)\n', (1707, 1712), True, 'import numpy as np\n'), ((1723, 1736), 'numpy.array', 'np.array', (['dr5'], {}), '(dr5)\n', (1731, 1736), True, 'import numpy as np\n'), ((1742, 1764), 'numpy.savetxt', 'np.savetxt', (['"""dr3"""', 'dr3'], {}), "('dr3', dr3)\n", (1752, 1764), True, 'import numpy as np\n'), ((2624, 2790), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 16, 'xtick.labelsize': 16, 'ytick.labelsize': 16,\n 'axes.linewidth': 1.5, 'font.family': 'serif', 'pdf.fonttype': 42}"], {}), "({'font.size': 16, 'xtick.labelsize': 16,\n 'ytick.labelsize': 16, 'axes.linewidth': 1.5, 'font.family': 'serif',\n 'pdf.fonttype': 42})\n", (2650, 2790), False, 'import matplotlib\n'), ((2825, 2854), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 5.5)'}), '(figsize=(10, 5.5))\n', (2835, 2854), True, 'import matplotlib.pyplot as plt\n'), ((2866, 2889), 'numpy.arange', 'np.arange', (['(0)', '(4.0)', '(0.25)'], {}), '(0, 4.0, 0.25)\n', (2875, 2889), True, 'import numpy as np\n'), ((2894, 2953), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'which': '"""both"""', 'linestyle': '"""--"""', 'alpha': '(0.5)', 'axis': '"""y"""'}), "(which='both', linestyle='--', alpha=0.5, axis='y')\n", (2902, 2953), True, 'import matplotlib.pyplot as plt\n'), ((2958, 3075), 'matplotlib.pyplot.hist', 'plt.hist', (['[dt1, dt4, dt3]'], {'bins': 'bins', 'label': "['RIGID', 'MC', 'MC+Dopp']", 'color': "['r', 'b', 'limegreen']", 'rwidth': '(0.6)'}), "([dt1, dt4, dt3], bins=bins, label=['RIGID', 'MC', 'MC+Dopp'],\n color=['r', 'b', 'limegreen'], rwidth=0.6)\n", (2966, 3075), True, 'import matplotlib.pyplot as plt\n'), ((3076, 3124), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Translation Error (m)"""'], {'fontsize': '(18)'}), "('Translation Error (m)', fontsize=18)\n", (3086, 3124), True, 'import matplotlib.pyplot as plt\n'), ((3129, 3177), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of Radar Pairs"""'], {'fontsize': '(18)'}), "('Number of Radar Pairs', fontsize=18)\n", (3139, 3177), True, 'import matplotlib.pyplot as plt\n'), ((3182, 3204), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (3192, 3204), True, 'import matplotlib.pyplot as plt\n'), ((3209, 3286), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""localization_accuracy.pdf"""'], {'bbox_inches': '"""tight"""', 'pad_inches': '(0.0)'}), "('localization_accuracy.pdf', bbox_inches='tight', pad_inches=0.0)\n", (3220, 3286), True, 'import matplotlib.pyplot as plt\n'), ((1832, 1846), 'numpy.median', 'np.median', (['dt1'], {}), '(dt1)\n', (1841, 1846), True, 'import numpy as np\n'), ((1884, 1898), 'numpy.median', 'np.median', (['dr1'], {}), '(dr1)\n', (1893, 1898), True, 'import numpy as np\n'), ((2003, 2017), 'numpy.median', 'np.median', (['dt2'], {}), '(dt2)\n', (2012, 2017), True, 'import numpy as np\n'), ((2055, 2069), 'numpy.median', 'np.median', (['dr2'], {}), '(dr2)\n', (2064, 2069), True, 'import numpy as np\n'), ((2174, 2188), 'numpy.median', 'np.median', (['dt3'], {}), '(dt3)\n', (2183, 2188), True, 'import numpy as np\n'), ((2226, 2240), 'numpy.median', 'np.median', (['dr3'], {}), '(dr3)\n', (2235, 2240), True, 'import numpy as np\n'), ((2343, 2357), 'numpy.median', 'np.median', (['dt4'], {}), '(dt4)\n', (2352, 2357), True, 'import numpy as np\n'), ((2395, 2409), 'numpy.median', 'np.median', (['dr4'], {}), '(dr4)\n', (2404, 2409), True, 'import numpy as np\n'), ((2514, 2528), 'numpy.median', 'np.median', (['dt5'], {}), '(dt5)\n', (2523, 2528), True, 'import numpy as np\n'), ((2566, 2580), 'numpy.median', 'np.median', (['dr5'], {}), '(dr5)\n', (2575, 2580), True, 'import numpy as np\n'), ((1863, 1877), 'numpy.median', 'np.median', (['dt1'], {}), '(dt1)\n', (1872, 1877), True, 'import numpy as np\n'), ((1915, 1929), 'numpy.median', 'np.median', (['dr1'], {}), '(dr1)\n', (1924, 1929), True, 'import numpy as np\n'), ((2034, 2048), 'numpy.median', 'np.median', (['dt2'], {}), '(dt2)\n', (2043, 2048), True, 'import numpy as np\n'), ((2086, 2100), 'numpy.median', 'np.median', (['dr2'], {}), '(dr2)\n', (2095, 2100), True, 'import numpy as np\n'), ((2205, 2219), 'numpy.median', 'np.median', (['dt3'], {}), '(dt3)\n', (2214, 2219), True, 'import numpy as np\n'), ((2257, 2271), 'numpy.median', 'np.median', (['dr3'], {}), '(dr3)\n', (2266, 2271), True, 'import numpy as np\n'), ((2374, 2388), 'numpy.median', 'np.median', (['dt4'], {}), '(dt4)\n', (2383, 2388), True, 'import numpy as np\n'), ((2426, 2440), 'numpy.median', 'np.median', (['dr4'], {}), '(dr4)\n', (2435, 2440), True, 'import numpy as np\n'), ((2545, 2559), 'numpy.median', 'np.median', (['dt5'], {}), '(dt5)\n', (2554, 2559), True, 'import numpy as np\n'), ((2597, 2611), 'numpy.median', 'np.median', (['dr5'], {}), '(dr5)\n', (2606, 2611), True, 'import numpy as np\n')]
|
r"""
Support for embedded TeX expressions in Matplotlib.
Requirements:
* LaTeX.
* \*Agg backends: dvipng>=1.6.
* PS backend: PSfrag, dvips, and Ghostscript>=9.0.
* PDF and SVG backends: if LuaTeX is present, it will be used to speed up some
post-processing steps, but note that it is not used to parse the TeX string
itself (only LaTeX is supported).
To enable TeX rendering of all text in your Matplotlib figure, set
:rc:`text.usetex` to True.
TeX and dvipng/dvips processing results are cached
in ~/.matplotlib/tex.cache for reuse between sessions.
`TexManager.get_rgba` can also be used to directly obtain raster output as RGBA
NumPy arrays.
"""
import functools
import hashlib
import logging
import os
from pathlib import Path
import subprocess
from tempfile import TemporaryDirectory
import numpy as np
from packaging.version import parse as parse_version
import matplotlib as mpl
from matplotlib import _api, cbook, dviread, rcParams
_log = logging.getLogger(__name__)
def _usepackage_if_not_loaded(package, *, option=None):
"""
Output LaTeX code that loads a package (possibly with an option) if it
hasn't been loaded yet.
LaTeX cannot load twice a package with different options, so this helper
can be used to protect against users loading arbitrary packages/options in
their custom preamble.
"""
option = f"[{option}]" if option is not None else ""
return (
r"\makeatletter"
r"\@ifpackageloaded{%(package)s}{}{\usepackage%(option)s{%(package)s}}"
r"\makeatother"
) % {"package": package, "option": option}
class TexManager:
"""
Convert strings to dvi files using TeX, caching the results to a directory.
Repeated calls to this constructor always return the same instance.
"""
texcache = os.path.join(mpl.get_cachedir(), 'tex.cache')
_grey_arrayd = {}
_font_family = 'serif'
_font_families = ('serif', 'sans-serif', 'cursive', 'monospace')
_font_info = {
'new century schoolbook': ('pnc', r'\renewcommand{\rmdefault}{pnc}'),
'bookman': ('pbk', r'\renewcommand{\rmdefault}{pbk}'),
'times': ('ptm', r'\usepackage{mathptmx}'),
'palatino': ('ppl', r'\usepackage{mathpazo}'),
'zapf chancery': ('pzc', r'\usepackage{chancery}'),
'cursive': ('pzc', r'\usepackage{chancery}'),
'charter': ('pch', r'\usepackage{charter}'),
'serif': ('cmr', ''),
'sans-serif': ('cmss', ''),
'helvetica': ('phv', r'\usepackage{helvet}'),
'avant garde': ('pag', r'\usepackage{avant}'),
'courier': ('pcr', r'\usepackage{courier}'),
# Loading the type1ec package ensures that cm-super is installed, which
# is necessary for unicode computer modern. (It also allows the use of
# computer modern at arbitrary sizes, but that's just a side effect.)
'monospace': ('cmtt', r'\usepackage{type1ec}'),
'computer modern roman': ('cmr', r'\usepackage{type1ec}'),
'computer modern sans serif': ('cmss', r'\usepackage{type1ec}'),
'computer modern typewriter': ('cmtt', r'\usepackage{type1ec}')}
_font_types = {
'new century schoolbook': 'serif', 'bookman': 'serif',
'times': 'serif', 'palatino': 'serif', 'charter': 'serif',
'computer modern roman': 'serif', 'zapf chancery': 'cursive',
'helvetica': 'sans-serif', 'avant garde': 'sans-serif',
'computer modern sans serif': 'sans-serif',
'courier': 'monospace', 'computer modern typewriter': 'monospace'}
grey_arrayd = _api.deprecate_privatize_attribute("3.5")
font_family = _api.deprecate_privatize_attribute("3.5")
font_families = _api.deprecate_privatize_attribute("3.5")
font_info = _api.deprecate_privatize_attribute("3.5")
@functools.lru_cache() # Always return the same instance.
def __new__(cls):
Path(cls.texcache).mkdir(parents=True, exist_ok=True)
return object.__new__(cls)
def get_font_config(self):
ff = rcParams['font.family']
ff_val = ff[0].lower() if len(ff) == 1 else None
reduced_notation = False
if len(ff) == 1 and ff_val in self._font_families:
self._font_family = ff_val
elif len(ff) == 1 and ff_val in self._font_info:
reduced_notation = True
self._font_family = self._font_types[ff_val]
else:
_log.info('font.family must be one of (%s) when text.usetex is '
'True. serif will be used by default.',
', '.join(self._font_families))
self._font_family = 'serif'
fontconfig = [self._font_family]
fonts = {}
for font_family in self._font_families:
if reduced_notation and self._font_family == font_family:
fonts[font_family] = self._font_info[ff_val]
else:
for font in rcParams['font.' + font_family]:
if font.lower() in self._font_info:
fonts[font_family] = self._font_info[font.lower()]
_log.debug(
'family: %s, font: %s, info: %s',
font_family, font, self._font_info[font.lower()])
break
else:
_log.debug('%s font is not compatible with usetex.',
font)
else:
_log.info('No LaTeX-compatible font found for the %s font'
'family in rcParams. Using default.',
font_family)
fonts[font_family] = self._font_info[font_family]
fontconfig.append(fonts[font_family][0])
# Add a hash of the latex preamble to fontconfig so that the
# correct png is selected for strings rendered with same font and dpi
# even if the latex preamble changes within the session
preamble_bytes = self.get_custom_preamble().encode('utf-8')
fontconfig.append(hashlib.md5(preamble_bytes).hexdigest())
# The following packages and commands need to be included in the latex
# file's preamble:
cmd = {fonts[family][1]
for family in ['serif', 'sans-serif', 'monospace']}
if self._font_family == 'cursive':
cmd.add(fonts['cursive'][1])
cmd.add(r'\usepackage{type1cm}')
self._font_preamble = '\n'.join(sorted(cmd))
return ''.join(fontconfig)
def get_basefile(self, tex, fontsize, dpi=None):
"""
Return a filename based on a hash of the string, fontsize, and dpi.
"""
s = ''.join([tex, self.get_font_config(), '%f' % fontsize,
self.get_custom_preamble(), str(dpi or '')])
return os.path.join(
self.texcache, hashlib.md5(s.encode('utf-8')).hexdigest())
def get_font_preamble(self):
"""
Return a string containing font configuration for the tex preamble.
"""
return self._font_preamble
def get_custom_preamble(self):
"""Return a string containing user additions to the tex preamble."""
return rcParams['text.latex.preamble']
def _get_preamble(self):
return "\n".join([
r"\documentclass{article}",
# Pass-through \mathdefault, which is used in non-usetex mode to
# use the default text font but was historically suppressed in
# usetex mode.
r"\newcommand{\mathdefault}[1]{#1}",
self._font_preamble,
r"\usepackage[utf8]{inputenc}",
r"\DeclareUnicodeCharacter{2212}{\ensuremath{-}}",
# geometry is loaded before the custom preamble as convert_psfrags
# relies on a custom preamble to change the geometry.
r"\usepackage[papersize=72in, margin=1in]{geometry}",
self.get_custom_preamble(),
# Use `underscore` package to take care of underscores in text
# The [strings] option allows to use underscores in file names
_usepackage_if_not_loaded("underscore", option="strings"),
# Custom packages (e.g. newtxtext) may already have loaded textcomp
# with different options.
_usepackage_if_not_loaded("textcomp"),
])
def make_tex(self, tex, fontsize):
"""
Generate a tex file to render the tex string at a specific font size.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
texfile = '%s.tex' % basefile
fontcmd = {'sans-serif': r'{\sffamily %s}',
'monospace': r'{\ttfamily %s}'}.get(self._font_family,
r'{\rmfamily %s}')
Path(texfile).write_text(
r"""
%s
\pagestyle{empty}
\begin{document}
%% The empty hbox ensures that a page is printed even for empty inputs, except
%% when using psfrag which gets confused by it.
\fontsize{%f}{%f}%%
\ifdefined\psfrag\else\hbox{}\fi%%
%s
\end{document}
""" % (self._get_preamble(), fontsize, fontsize * 1.25, fontcmd % tex),
encoding='utf-8')
return texfile
def _run_checked_subprocess(self, command, tex, *, cwd=None):
_log.debug(cbook._pformat_subprocess(command))
try:
report = subprocess.check_output(
command, cwd=cwd if cwd is not None else self.texcache,
stderr=subprocess.STDOUT)
except FileNotFoundError as exc:
raise RuntimeError(
'Failed to process string with tex because {} could not be '
'found'.format(command[0])) from exc
except subprocess.CalledProcessError as exc:
raise RuntimeError(
'{prog} was not able to process the following string:\n'
'{tex!r}\n\n'
'Here is the full report generated by {prog}:\n'
'{exc}\n\n'.format(
prog=command[0],
tex=tex.encode('unicode_escape'),
exc=exc.output.decode('utf-8'))) from exc
_log.debug(report)
return report
def make_dvi(self, tex, fontsize):
"""
Generate a dvi file containing latex's layout of tex string.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
dvifile = '%s.dvi' % basefile
if not os.path.exists(dvifile):
texfile = Path(self.make_tex(tex, fontsize))
# Generate the dvi in a temporary directory to avoid race
# conditions e.g. if multiple processes try to process the same tex
# string at the same time. Having tmpdir be a subdirectory of the
# final output dir ensures that they are on the same filesystem,
# and thus replace() works atomically. It also allows referring to
# the texfile with a relative path (for pathological MPLCONFIGDIRs,
# the absolute path may contain characters (e.g. ~) that TeX does
# not support.)
with TemporaryDirectory(dir=Path(dvifile).parent) as tmpdir:
self._run_checked_subprocess(
["latex", "-interaction=nonstopmode", "--halt-on-error",
f"../{texfile.name}"], tex, cwd=tmpdir)
(Path(tmpdir) / Path(dvifile).name).replace(dvifile)
return dvifile
def make_png(self, tex, fontsize, dpi):
"""
Generate a png file containing latex's rendering of tex string.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize, dpi)
pngfile = '%s.png' % basefile
# see get_rgba for a discussion of the background
if not os.path.exists(pngfile):
dvifile = self.make_dvi(tex, fontsize)
cmd = ["dvipng", "-bg", "Transparent", "-D", str(dpi),
"-T", "tight", "-o", pngfile, dvifile]
# When testing, disable FreeType rendering for reproducibility; but
# dvipng 1.16 has a bug (fixed in f3ff241) that breaks --freetype0
# mode, so for it we keep FreeType enabled; the image will be
# slightly off.
bad_ver = parse_version("1.16")
if (getattr(mpl, "_called_from_pytest", False)
and mpl._get_executable_info("dvipng").version != bad_ver):
cmd.insert(1, "--freetype0")
self._run_checked_subprocess(cmd, tex)
return pngfile
def get_grey(self, tex, fontsize=None, dpi=None):
"""Return the alpha channel."""
if not fontsize:
fontsize = rcParams['font.size']
if not dpi:
dpi = rcParams['savefig.dpi']
key = tex, self.get_font_config(), fontsize, dpi
alpha = self._grey_arrayd.get(key)
if alpha is None:
pngfile = self.make_png(tex, fontsize, dpi)
rgba = mpl.image.imread(os.path.join(self.texcache, pngfile))
self._grey_arrayd[key] = alpha = rgba[:, :, -1]
return alpha
def get_rgba(self, tex, fontsize=None, dpi=None, rgb=(0, 0, 0)):
r"""
Return latex's rendering of the tex string as an rgba array.
Examples
--------
>>> texmanager = TexManager()
>>> s = r"\TeX\ is $\displaystyle\sum_n\frac{-e^{i\pi}}{2^n}$!"
>>> Z = texmanager.get_rgba(s, fontsize=12, dpi=80, rgb=(1, 0, 0))
"""
alpha = self.get_grey(tex, fontsize, dpi)
rgba = np.empty((*alpha.shape, 4))
rgba[..., :3] = mpl.colors.to_rgb(rgb)
rgba[..., -1] = alpha
return rgba
def get_text_width_height_descent(self, tex, fontsize, renderer=None):
"""Return width, height and descent of the text."""
if tex.strip() == '':
return 0, 0, 0
dvifile = self.make_dvi(tex, fontsize)
dpi_fraction = renderer.points_to_pixels(1.) if renderer else 1
with dviread.Dvi(dvifile, 72 * dpi_fraction) as dvi:
page, = dvi
# A total height (including the descent) needs to be returned.
return page.width, page.height + page.descent, page.descent
|
[
"matplotlib.get_cachedir",
"matplotlib.cbook._pformat_subprocess",
"matplotlib.dviread.Dvi",
"hashlib.md5",
"numpy.empty",
"subprocess.check_output",
"packaging.version.parse",
"os.path.exists",
"matplotlib.colors.to_rgb",
"pathlib.Path",
"matplotlib._api.deprecate_privatize_attribute",
"functools.lru_cache",
"os.path.join",
"matplotlib._get_executable_info",
"logging.getLogger"
] |
[((996, 1023), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1013, 1023), False, 'import logging\n'), ((3660, 3701), 'matplotlib._api.deprecate_privatize_attribute', '_api.deprecate_privatize_attribute', (['"""3.5"""'], {}), "('3.5')\n", (3694, 3701), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((3721, 3762), 'matplotlib._api.deprecate_privatize_attribute', '_api.deprecate_privatize_attribute', (['"""3.5"""'], {}), "('3.5')\n", (3755, 3762), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((3784, 3825), 'matplotlib._api.deprecate_privatize_attribute', '_api.deprecate_privatize_attribute', (['"""3.5"""'], {}), "('3.5')\n", (3818, 3825), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((3843, 3884), 'matplotlib._api.deprecate_privatize_attribute', '_api.deprecate_privatize_attribute', (['"""3.5"""'], {}), "('3.5')\n", (3877, 3884), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((3893, 3914), 'functools.lru_cache', 'functools.lru_cache', ([], {}), '()\n', (3912, 3914), False, 'import functools\n'), ((1876, 1894), 'matplotlib.get_cachedir', 'mpl.get_cachedir', ([], {}), '()\n', (1892, 1894), True, 'import matplotlib as mpl\n'), ((13939, 13966), 'numpy.empty', 'np.empty', (['(*alpha.shape, 4)'], {}), '((*alpha.shape, 4))\n', (13947, 13966), True, 'import numpy as np\n'), ((13992, 14014), 'matplotlib.colors.to_rgb', 'mpl.colors.to_rgb', (['rgb'], {}), '(rgb)\n', (14009, 14014), True, 'import matplotlib as mpl\n'), ((9552, 9586), 'matplotlib.cbook._pformat_subprocess', 'cbook._pformat_subprocess', (['command'], {}), '(command)\n', (9577, 9586), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((9624, 9734), 'subprocess.check_output', 'subprocess.check_output', (['command'], {'cwd': '(cwd if cwd is not None else self.texcache)', 'stderr': 'subprocess.STDOUT'}), '(command, cwd=cwd if cwd is not None else self.\n texcache, stderr=subprocess.STDOUT)\n', (9647, 9734), False, 'import subprocess\n'), ((10753, 10776), 'os.path.exists', 'os.path.exists', (['dvifile'], {}), '(dvifile)\n', (10767, 10776), False, 'import os\n'), ((12122, 12145), 'os.path.exists', 'os.path.exists', (['pngfile'], {}), '(pngfile)\n', (12136, 12145), False, 'import os\n'), ((12614, 12635), 'packaging.version.parse', 'parse_version', (['"""1.16"""'], {}), "('1.16')\n", (12627, 12635), True, 'from packaging.version import parse as parse_version\n'), ((14400, 14439), 'matplotlib.dviread.Dvi', 'dviread.Dvi', (['dvifile', '(72 * dpi_fraction)'], {}), '(dvifile, 72 * dpi_fraction)\n', (14411, 14439), False, 'from matplotlib import _api, cbook, dviread, rcParams\n'), ((3983, 4001), 'pathlib.Path', 'Path', (['cls.texcache'], {}), '(cls.texcache)\n', (3987, 4001), False, 'from pathlib import Path\n'), ((9042, 9055), 'pathlib.Path', 'Path', (['texfile'], {}), '(texfile)\n', (9046, 9055), False, 'from pathlib import Path\n'), ((13356, 13392), 'os.path.join', 'os.path.join', (['self.texcache', 'pngfile'], {}), '(self.texcache, pngfile)\n', (13368, 13392), False, 'import os\n'), ((6210, 6237), 'hashlib.md5', 'hashlib.md5', (['preamble_bytes'], {}), '(preamble_bytes)\n', (6221, 6237), False, 'import hashlib\n'), ((12721, 12755), 'matplotlib._get_executable_info', 'mpl._get_executable_info', (['"""dvipng"""'], {}), "('dvipng')\n", (12745, 12755), True, 'import matplotlib as mpl\n'), ((11457, 11470), 'pathlib.Path', 'Path', (['dvifile'], {}), '(dvifile)\n', (11461, 11470), False, 'from pathlib import Path\n'), ((11695, 11707), 'pathlib.Path', 'Path', (['tmpdir'], {}), '(tmpdir)\n', (11699, 11707), False, 'from pathlib import Path\n'), ((11710, 11723), 'pathlib.Path', 'Path', (['dvifile'], {}), '(dvifile)\n', (11714, 11723), False, 'from pathlib import Path\n')]
|
"""
Settings and global config values for ricecooker.
"""
import atexit
import hashlib
import logging.config
import os
import shutil
import socket
import tempfile
import requests
from requests_file import FileAdapter
UPDATE = False
COMPRESS = False
THUMBNAILS = False
PUBLISH = False
PROGRESS_MANAGER = None
SUSHI_BAR_CLIENT = None
STAGE = False
# When this is set to true, any failure will raise an error and stop the chef.
# This will likely be set to true in a future version of ricecooker, once
# we can ensure all ricecooker internal functions handle non-fatal errors
# properly.
STRICT = False
# Sometimes chef runs will get stuck indefinitely waiting on data from SSL conn,
# so we add a timeout value as suggested in https://stackoverflow.com/a/30771995
socket.setdefaulttimeout(20)
# Logging is configured globally by calling setup_logging() in the chef's `main`
# Use this as `from ricecooker.config import LOGGER` in your suchichef.py code,
# or use the stanard `logging.getLogger(__name__)` to get a namespaced logger.
LOGGER = logging.getLogger()
# Keep error log when setup_logging is called
_ERROR_LOG = None
_MAIN_LOG = None
def setup_logging(level=logging.INFO, main_log=None, error_log=None, add_loggers=None):
"""
Set up logging, useful to call from your sushi chef main script
:param level: Minimum default level for all loggers and handlers
:param main_log: Main log (typically added in chefs.SushiChef)
:param error_log: Name of file to log (append) errors in
:param add_loggers: An iterable of other loggers to configure (['scrapy'])
"""
global _ERROR_LOG, _MAIN_LOG
if not error_log:
error_log = _ERROR_LOG
else:
_ERROR_LOG = error_log
if not main_log:
main_log = _MAIN_LOG
else:
_MAIN_LOG = main_log
# logging dictconfig for handlers
handlers = {
"console": {
"level": level,
"class": "logging.StreamHandler",
"formatter": "colored",
}
}
logger_handlers = ["console"]
if main_log:
logger_handlers.append("file")
handlers["file"] = {
"level": level,
"class": "logging.FileHandler",
"filename": main_log,
"formatter": "simple_date",
}
if error_log:
logger_handlers.append("error")
handlers["error"] = {
"level": logging.WARNING,
"class": "logging.FileHandler",
"filename": error_log,
"formatter": "simple_date",
}
# The default configuration of a logger (used in below config)
default_logger_config = {
"handlers": logger_handlers,
"propagate": False,
"level": level,
}
config = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"colored": {
"()": "colorlog.ColoredFormatter",
"format": "%(log_color)s%(levelname)-8s%(reset)s %(blue)s%(message)s",
},
"simple_date": {
"format": "%(levelname)-8s %(asctime)s %(name)s - %(message)s",
"datefmt": "%Y-%m-%d %H:%M:%S",
},
},
"handlers": handlers,
"loggers": {
"": {"handlers": logger_handlers, "level": level},
"ricecooker": default_logger_config,
},
}
for logger in add_loggers or ():
config["loggers"][logger] = default_logger_config
logging.config.dictConfig(config)
# Silence noisy libraries loggers
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("cachecontrol.controller").setLevel(logging.WARNING)
logging.getLogger("requests.packages").setLevel(logging.WARNING)
logging.getLogger("urllib3.util.retry").setLevel(logging.WARNING)
logging.getLogger("urllib3.connection").setLevel(logging.CRITICAL)
logging.getLogger("urllib3.connectionpool").setLevel(logging.WARNING)
logging.getLogger("PIL.PngImagePlugin").setLevel(logging.WARNING)
# Setup default logging. This is required so we have a basic logging setup until
# prope user-confgured logging is configured in `SushiChef.config_logger`.
setup_logging()
# Domain and file store location for uploading to production Studio server
DEFAULT_DOMAIN = "https://api.studio.learningequality.org"
DOMAIN_ENV = os.getenv("STUDIO_URL", None)
if DOMAIN_ENV is None: # check old ENV varable for backward compatibility
DOMAIN_ENV = os.getenv("CONTENTWORKSHOP_URL", None)
DOMAIN = DOMAIN_ENV if DOMAIN_ENV else DEFAULT_DOMAIN
if DOMAIN.endswith("/"):
DOMAIN = DOMAIN.rstrip("/")
FILE_STORE_LOCATION = hashlib.md5(DOMAIN.encode("utf-8")).hexdigest()
# Allow users to choose which phantomjs they use
PHANTOMJS_PATH = os.getenv("PHANTOMJS_PATH", None)
# URL for authenticating user on Kolibri Studio
AUTHENTICATION_URL = "{domain}/api/internal/authenticate_user_internal"
# URL for checking compatible version on Kolibri Studio
VERSION_CHECK_URL = "{domain}/api/internal/check_version"
# URL for getting file diff
FILE_DIFF_URL = "{domain}/api/internal/file_diff"
# URL for uploading files to server
FILE_UPLOAD_URL = "{domain}/api/internal/file_upload"
# URL for getting an upload URL from the server
GET_UPLOAD_URL = "{domain}/api/file/upload_url"
# URL for getting a file from the server
FILE_STORAGE_URL = "{domain}/content/storage/{f}/{s}/{filename}"
# URL for creating channel on server
CREATE_CHANNEL_URL = "{domain}/api/internal/create_channel"
# URL for adding nodes to channel
ADD_NODES_URL = "{domain}/api/internal/add_nodes"
# URL for adding nodes to channel from file
ADD_NODES_FROM_FILE_URL = "{domain}/api/internal/api_add_nodes_from_file"
# URL for making final changes to channel
FINISH_CHANNEL_URL = "{domain}/api/internal/finish_channel"
# URL to return after channel is created
OPEN_CHANNEL_URL = "{domain}/channels/{channel_id}/{access}"
# URL for publishing channel
PUBLISH_CHANNEL_URL = "{domain}/api/internal/publish_channel"
# Folder to store downloaded files
STORAGE_DIRECTORY = "storage"
# Folder to store progress tracking information
RESTORE_DIRECTORY = "restore"
# Session for communicating to Kolibri Studio
SESSION = requests.Session()
# Cache for filenames
FILECACHE_DIRECTORY = ".ricecookerfilecache"
FAILED_FILES = []
# Session for downloading files
DOWNLOAD_SESSION = requests.Session()
DOWNLOAD_SESSION.mount("file://", FileAdapter())
# Environment variable indicating we should use a proxy for youtube_dl downloads
USEPROXY = False
USEPROXY = (
True
if os.getenv("USEPROXY") is not None or os.getenv("PROXY_LIST") is not None
else False
)
# CSV headers
CSV_HEADERS = [
"Source ID",
"Topic Structure",
"Old Title",
"New Title",
"Old Description",
"New Description",
"Old Tags",
"New Tags",
"Last Modified",
]
# Automatic temporary direcotry cleanup
chef_temp_dir = os.path.join(os.getcwd(), ".ricecooker-temp")
@atexit.register
def delete_temp_dir():
if os.path.exists(chef_temp_dir):
LOGGER.debug("Deleting chef temp files at {}".format(chef_temp_dir))
shutil.rmtree(chef_temp_dir)
# While in most cases a chef run will clean up after itself, make sure that if it didn't,
# temp files from the old run are deleted so that they do not accumulate.
delete_temp_dir()
# If tempdir is set already, that means the user has explicitly chosen a location for temp storage
if not tempfile.tempdir:
os.makedirs(chef_temp_dir)
LOGGER.debug("Setting chef temp dir to {}".format(chef_temp_dir))
# Store all chef temp files in one dir to avoid issues with temp or even primary storage filling up
# because of failure by the chef to clean up temp files manually.
tempfile.tempdir = chef_temp_dir
# Record data about past chef runs in chefdata/ dir
DATA_DIR = "chefdata"
DATA_FILENAME = "chef_data.json"
DATA_PATH = os.path.join(DATA_DIR, DATA_FILENAME)
CHEF_DATA_DEFAULT = {
"current_run": None,
"runs": [],
"tree_archives": {"previous": None, "current": None},
}
TREES_DATA_DIR = os.path.join(DATA_DIR, "trees")
# Character limits based on Kolibri models
TRUNCATE_MSG = (
"\t\t{kind} {id}: {field} {value} is too long - max {max} characters (truncating)"
)
MAX_TITLE_LENGTH = 200
MAX_SOURCE_ID_LENGTH = 200
MAX_DESCRIPTION_LENGTH = 400
MAX_TAGLINE_LENGTH = 150
MAX_AUTHOR_LENGTH = 200
MAX_AGGREGATOR_LENGTH = 200
MAX_PROVIDER_LENGTH = 200
MAX_SOURCE_URL_LENGTH = 400
MAX_ORIGINAL_FILENAME_LENGTH = 255
MAX_LICENSE_DESCRIPTION_LENGTH = 400
MAX_COPYRIGHT_HOLDER_LENGTH = 200
MAX_CHAR_LIMITS = {
"title": {"kind": "Node", "field": "title", "max": MAX_TITLE_LENGTH},
"source_id": {"kind": "Node", "field": "source_id", "max": MAX_SOURCE_ID_LENGTH},
"description": {
"kind": "Node",
"field": "description",
"max": MAX_DESCRIPTION_LENGTH,
},
"tagline": {"kind": "Channel", "field": "tagline", "max": MAX_TAGLINE_LENGTH},
"author": {"kind": "Node", "field": "author", "max": MAX_AUTHOR_LENGTH},
"question_source_url": {
"kind": "Question",
"field": "source url",
"max": MAX_SOURCE_URL_LENGTH,
},
"original_filename": {
"kind": "File",
"field": "original filename",
"max": MAX_ORIGINAL_FILENAME_LENGTH,
},
"file_source_url": {
"kind": "File",
"field": "source url",
"max": MAX_SOURCE_URL_LENGTH,
},
"license_description": {
"kind": "License",
"field": "license description",
"max": MAX_LICENSE_DESCRIPTION_LENGTH,
},
"copyright_holder": {
"kind": "License",
"field": "copyright holder",
"max": MAX_COPYRIGHT_HOLDER_LENGTH,
},
"provider": {"kind": "Provider", "field": "provider", "max": MAX_PROVIDER_LENGTH},
"aggregator": {
"kind": "Aggregator",
"field": "aggregator",
"max": MAX_AGGREGATOR_LENGTH,
},
}
def print_truncate(field, id, value, kind=None):
limit = MAX_CHAR_LIMITS.get(field)
LOGGER.warning(
TRUNCATE_MSG.format(
kind=kind or limit["kind"],
id=id,
field=limit["field"],
value=value,
max=limit["max"],
)
)
def get_storage_path(filename):
"""get_storage_path: returns path to storage directory for downloading content
Args: filename (str): Name of file to store
Returns: string path to file
"""
directory = os.path.join(STORAGE_DIRECTORY, filename[0], filename[1])
# Make storage directory for downloaded files if it doesn't already exist
if not os.path.exists(directory):
os.makedirs(directory)
return os.path.join(directory, filename)
def authentication_url():
"""authentication_url: returns url to login to Kolibri Studio
Args: None
Returns: string url to authenticate_user_internal endpoint
"""
return AUTHENTICATION_URL.format(domain=DOMAIN)
def init_file_mapping_store():
"""init_file_mapping_store: creates log to keep track of downloaded files
Args: None
Returns: None
"""
# Make storage directory for restore files if it doesn't already exist
path = os.path.join(RESTORE_DIRECTORY, FILE_STORE_LOCATION)
if not os.path.exists(path):
os.makedirs(path)
def get_restore_path(filename):
"""get_restore_path: returns path to directory for restoration points
Args:
filename (str): Name of file to store
Returns: string path to file
"""
path = os.path.join(RESTORE_DIRECTORY, FILE_STORE_LOCATION)
if not os.path.exists(path):
os.makedirs(path)
return os.path.join(path, filename + ".pickle")
def check_version_url():
"""check_version_url: returns url to check ricecooker version
Args: None
Returns: string url to check version endpoint
"""
return VERSION_CHECK_URL.format(domain=DOMAIN)
def file_diff_url():
"""file_diff_url: returns url to get file diff
Args: None
Returns: string url to file_diff endpoint
"""
return FILE_DIFF_URL.format(domain=DOMAIN)
def file_upload_url():
"""file_upload_url: returns url to upload files
Args: None
Returns: string url to file_upload endpoint
"""
return FILE_UPLOAD_URL.format(domain=DOMAIN)
def get_upload_url():
"""file_upload_url: returns url to upload files
Args: None
Returns: string url to file_upload endpoint
"""
return GET_UPLOAD_URL.format(domain=DOMAIN)
def get_storage_url(filename):
"""get_storage_url: returns the URL for a given file on the storage service
Args: filename (str): Name of file
Returns: string URL for file
"""
return FILE_STORAGE_URL.format(
domain=DOMAIN, f=filename[0], s=filename[1], filename=filename
)
def create_channel_url():
"""create_channel_url: returns url to create channel
Args: None
Returns: string url to create_channel endpoint
"""
return CREATE_CHANNEL_URL.format(domain=DOMAIN)
def add_nodes_url():
"""add_nodes_url: returns url to add nodes to channel
Args: None
Returns: string url to add_nodes endpoint
"""
return ADD_NODES_URL.format(domain=DOMAIN)
def add_nodes_from_file_url():
"""add_nodes_from_file_url: returns url to add nodes to channel using json file
Args: None
Returns: string url to add_nodes endpoint
"""
return ADD_NODES_FROM_FILE_URL.format(domain=DOMAIN)
def finish_channel_url():
"""finish_channel_url: returns url to finish uploading a channel
Args: None
Returns: string url to finish_channel endpoint
"""
return FINISH_CHANNEL_URL.format(domain=DOMAIN)
def open_channel_url(channel, staging=False):
"""open_channel_url: returns url to uploaded channel
Args:
channel (str): channel id of uploaded channel
Returns: string url to open channel
"""
frontend_domain = DOMAIN.replace(
"api.", ""
) # Don't send them to the API domain for preview / review.
return OPEN_CHANNEL_URL.format(
domain=frontend_domain,
channel_id=channel,
access="staging" if staging or STAGE else "edit",
)
def publish_channel_url():
"""open_channel_url: returns url to publish channel
Args: None
Returns: string url to publish channel
"""
return PUBLISH_CHANNEL_URL.format(domain=DOMAIN)
|
[
"os.getenv",
"os.makedirs",
"os.getcwd",
"requests.Session",
"os.path.exists",
"socket.setdefaulttimeout",
"shutil.rmtree",
"os.path.join",
"requests_file.FileAdapter"
] |
[((767, 795), 'socket.setdefaulttimeout', 'socket.setdefaulttimeout', (['(20)'], {}), '(20)\n', (791, 795), False, 'import socket\n'), ((4377, 4406), 'os.getenv', 'os.getenv', (['"""STUDIO_URL"""', 'None'], {}), "('STUDIO_URL', None)\n", (4386, 4406), False, 'import os\n'), ((4786, 4819), 'os.getenv', 'os.getenv', (['"""PHANTOMJS_PATH"""', 'None'], {}), "('PHANTOMJS_PATH', None)\n", (4795, 4819), False, 'import os\n'), ((6232, 6250), 'requests.Session', 'requests.Session', ([], {}), '()\n', (6248, 6250), False, 'import requests\n'), ((6390, 6408), 'requests.Session', 'requests.Session', ([], {}), '()\n', (6406, 6408), False, 'import requests\n'), ((7921, 7958), 'os.path.join', 'os.path.join', (['DATA_DIR', 'DATA_FILENAME'], {}), '(DATA_DIR, DATA_FILENAME)\n', (7933, 7958), False, 'import os\n'), ((8099, 8130), 'os.path.join', 'os.path.join', (['DATA_DIR', '"""trees"""'], {}), "(DATA_DIR, 'trees')\n", (8111, 8130), False, 'import os\n'), ((4499, 4537), 'os.getenv', 'os.getenv', (['"""CONTENTWORKSHOP_URL"""', 'None'], {}), "('CONTENTWORKSHOP_URL', None)\n", (4508, 4537), False, 'import os\n'), ((6443, 6456), 'requests_file.FileAdapter', 'FileAdapter', ([], {}), '()\n', (6454, 6456), False, 'from requests_file import FileAdapter\n'), ((6952, 6963), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6961, 6963), False, 'import os\n'), ((7034, 7063), 'os.path.exists', 'os.path.exists', (['chef_temp_dir'], {}), '(chef_temp_dir)\n', (7048, 7063), False, 'import os\n'), ((7492, 7518), 'os.makedirs', 'os.makedirs', (['chef_temp_dir'], {}), '(chef_temp_dir)\n', (7503, 7518), False, 'import os\n'), ((10493, 10550), 'os.path.join', 'os.path.join', (['STORAGE_DIRECTORY', 'filename[0]', 'filename[1]'], {}), '(STORAGE_DIRECTORY, filename[0], filename[1])\n', (10505, 10550), False, 'import os\n'), ((10709, 10742), 'os.path.join', 'os.path.join', (['directory', 'filename'], {}), '(directory, filename)\n', (10721, 10742), False, 'import os\n'), ((11213, 11265), 'os.path.join', 'os.path.join', (['RESTORE_DIRECTORY', 'FILE_STORE_LOCATION'], {}), '(RESTORE_DIRECTORY, FILE_STORE_LOCATION)\n', (11225, 11265), False, 'import os\n'), ((11541, 11593), 'os.path.join', 'os.path.join', (['RESTORE_DIRECTORY', 'FILE_STORE_LOCATION'], {}), '(RESTORE_DIRECTORY, FILE_STORE_LOCATION)\n', (11553, 11593), False, 'import os\n'), ((11664, 11704), 'os.path.join', 'os.path.join', (['path', "(filename + '.pickle')"], {}), "(path, filename + '.pickle')\n", (11676, 11704), False, 'import os\n'), ((7150, 7178), 'shutil.rmtree', 'shutil.rmtree', (['chef_temp_dir'], {}), '(chef_temp_dir)\n', (7163, 7178), False, 'import shutil\n'), ((10640, 10665), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (10654, 10665), False, 'import os\n'), ((10675, 10697), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (10686, 10697), False, 'import os\n'), ((11277, 11297), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (11291, 11297), False, 'import os\n'), ((11307, 11324), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (11318, 11324), False, 'import os\n'), ((11605, 11625), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (11619, 11625), False, 'import os\n'), ((11635, 11652), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (11646, 11652), False, 'import os\n'), ((6586, 6607), 'os.getenv', 'os.getenv', (['"""USEPROXY"""'], {}), "('USEPROXY')\n", (6595, 6607), False, 'import os\n'), ((6623, 6646), 'os.getenv', 'os.getenv', (['"""PROXY_LIST"""'], {}), "('PROXY_LIST')\n", (6632, 6646), False, 'import os\n')]
|
import datetime
import json
import os
import bcrypt
from flask import session
from pymongo import MongoClient
if "credentials.json" not in os.listdir('.'):
data = {"mongo_URI" : os.environ['mongo_URI']}
else:
data = json.load(open("credentials.json", "r"))
client = MongoClient(data["mongo_URI"])
class User:
def __init__(self, username, email, password):
self.username = username
self.password = password
self. email = email
def __repr__(self):
return '<User %r>' % self.username
def add_user(username, password, email):
collection = client.asteria.usercred
entry = {"username": username.lower(), "password":password, "email":email.lower(), "time_of_creation":datetime.datetime.utcnow()}
collection.insert_one(entry)
client.close()
return True
def log(action, ip, username, incoming, outgoing, status):
collection = client.asteria.logs
entry = {"username":username.lower(), "action":action, "ip": ip, "incoming": incoming, "outgoing":outgoing, "status":status, "timestamp": datetime.datetime.utcnow()}
collection.insert_one(entry)
client.close()
return True
def credentials_valid(username, password):
collection = client.asteria.usercred
res = collection.find_one({"username":username.lower()})
client.close()
if res != None:
return bcrypt.checkpw(password.encode(), res["password"])
return False
def username_taken(username):
collection = client.asteria.usercred
res = collection.find_one({"username":username.lower()})
client.close()
if res != None:
return True
return False
def get_user():
username = session['username']
collection = client.asteria.usercred
res = collection.find_one({"username":username.lower()})
client.close()
return User(res['username'], res['email'], res['password'])
def hash_password(password):
passwd = password.encode()
salt = bcrypt.gensalt()
hashed = bcrypt.hashpw(passwd, salt)
return hashed
def change_user(password, email):
username = session['username']
collection = client.asteria.usercred
collection.update_one({'username':username.lower()}, {"$set":{"password": password, "email": email}})
|
[
"pymongo.MongoClient",
"bcrypt.gensalt",
"datetime.datetime.utcnow",
"os.listdir",
"bcrypt.hashpw"
] |
[((278, 308), 'pymongo.MongoClient', 'MongoClient', (["data['mongo_URI']"], {}), "(data['mongo_URI'])\n", (289, 308), False, 'from pymongo import MongoClient\n'), ((141, 156), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (151, 156), False, 'import os\n'), ((1940, 1956), 'bcrypt.gensalt', 'bcrypt.gensalt', ([], {}), '()\n', (1954, 1956), False, 'import bcrypt\n'), ((1970, 1997), 'bcrypt.hashpw', 'bcrypt.hashpw', (['passwd', 'salt'], {}), '(passwd, salt)\n', (1983, 1997), False, 'import bcrypt\n'), ((723, 749), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (747, 749), False, 'import datetime\n'), ((1058, 1084), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (1082, 1084), False, 'import datetime\n')]
|
"""
test_core.py
Created by <NAME> on 2009-08-09
"""
import unittest
import os
import sys
import tempfile
import shutil
import keyring.backend
import keyring.core
PASSWORD_TEXT = "<PASSWORD>"
PASSWORD_TEXT_2 = "<PASSWORD>"
KEYRINGRC = "keyringrc.cfg"
class TestKeyring(keyring.backend.KeyringBackend):
"""A faked keyring for test.
"""
def supported(self):
return 0
def get_password(self, service, username):
return PASSWORD_TEXT
def set_password(self, service, username, password):
return 0
class TestKeyring2(TestKeyring):
"""Another faked keyring for test.
"""
def get_password(self, service, username):
return PASSWORD_TEXT_2
class CoreTestCase(unittest.TestCase):
def test_set_get_password(self):
"""Test the basic function of the keyring.
"""
keyring.core.set_password("test", "user", "<PASSWORD>")
self.assertEqual(keyring.core.get_password("test", "user"), "<PASSWORD>")
def test_set_keyring_in_runtime(self):
"""Test the function of set keyring in runtime.
"""
keyring.core.set_keyring(TestKeyring())
keyring.core.set_password("test", "user", "password")
self.assertEqual(keyring.core.get_password("test", "user"),
PASSWORD_TEXT)
def test_set_keyring_in_config(self):
"""Test setting the keyring by config file.
"""
# create the config file
config_file = open(KEYRINGRC,'w')
config_file.writelines(["[backend]\n",
# the path for the user created keyring
"keyring-path= %s\n" % os.path.dirname(os.path.abspath(__file__)),
# the name of the keyring class
"default-keyring=test_core.TestKeyring2\n" ])
config_file.close()
# init the keyring lib, the lib will automaticlly load the
# config file and load the user defined module
keyring.core.init_backend()
keyring.core.set_password("<PASSWORD>", "user", "password")
self.assertEqual(keyring.core.get_password("test", "user"),
PASSWORD_TEXT_2)
os.remove(KEYRINGRC)
def test_load_config(self):
tempdir = tempfile.mkdtemp()
old_location = os.getcwd()
os.chdir(tempdir)
personal_cfg = os.path.join(os.path.expanduser("~"), "keyringrc.cfg")
if os.path.exists(personal_cfg):
os.rename(personal_cfg, personal_cfg+'.old')
personal_renamed = True
else:
personal_renamed = False
# loading with an empty environment
keyring.core.load_config()
# loading with a file that doesn't have a backend section
cfg = os.path.join(tempdir, "keyringrc.cfg")
f = open(cfg, 'w')
f.write('[keyring]')
f.close()
keyring.core.load_config()
# loading with a file that doesn't have a default-keyring value
cfg = os.path.join(tempdir, "keyringrc.cfg")
f = open(cfg, 'w')
f.write('[backend]')
f.close()
keyring.core.load_config()
os.chdir(old_location)
shutil.rmtree(tempdir)
if personal_renamed:
os.rename(personal_cfg+'.old', personal_cfg)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(CoreTestCase))
return suite
if __name__ == "__main__":
unittest.main(defaultTest="test_suite")
|
[
"unittest.main",
"os.path.expanduser",
"os.remove",
"os.path.abspath",
"unittest.TestSuite",
"os.getcwd",
"os.rename",
"os.path.exists",
"unittest.makeSuite",
"tempfile.mkdtemp",
"shutil.rmtree",
"os.path.join",
"os.chdir"
] |
[((3320, 3340), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (3338, 3340), False, 'import unittest\n'), ((3442, 3481), 'unittest.main', 'unittest.main', ([], {'defaultTest': '"""test_suite"""'}), "(defaultTest='test_suite')\n", (3455, 3481), False, 'import unittest\n'), ((2181, 2201), 'os.remove', 'os.remove', (['KEYRINGRC'], {}), '(KEYRINGRC)\n', (2190, 2201), False, 'import os\n'), ((2253, 2271), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (2269, 2271), False, 'import tempfile\n'), ((2295, 2306), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2304, 2306), False, 'import os\n'), ((2315, 2332), 'os.chdir', 'os.chdir', (['tempdir'], {}), '(tempdir)\n', (2323, 2332), False, 'import os\n'), ((2422, 2450), 'os.path.exists', 'os.path.exists', (['personal_cfg'], {}), '(personal_cfg)\n', (2436, 2450), False, 'import os\n'), ((2757, 2795), 'os.path.join', 'os.path.join', (['tempdir', '"""keyringrc.cfg"""'], {}), "(tempdir, 'keyringrc.cfg')\n", (2769, 2795), False, 'import os\n'), ((2992, 3030), 'os.path.join', 'os.path.join', (['tempdir', '"""keyringrc.cfg"""'], {}), "(tempdir, 'keyringrc.cfg')\n", (3004, 3030), False, 'import os\n'), ((3149, 3171), 'os.chdir', 'os.chdir', (['old_location'], {}), '(old_location)\n', (3157, 3171), False, 'import os\n'), ((3180, 3202), 'shutil.rmtree', 'shutil.rmtree', (['tempdir'], {}), '(tempdir)\n', (3193, 3202), False, 'import shutil\n'), ((3359, 3391), 'unittest.makeSuite', 'unittest.makeSuite', (['CoreTestCase'], {}), '(CoreTestCase)\n', (3377, 3391), False, 'import unittest\n'), ((2369, 2392), 'os.path.expanduser', 'os.path.expanduser', (['"""~"""'], {}), "('~')\n", (2387, 2392), False, 'import os\n'), ((2464, 2510), 'os.rename', 'os.rename', (['personal_cfg', "(personal_cfg + '.old')"], {}), "(personal_cfg, personal_cfg + '.old')\n", (2473, 2510), False, 'import os\n'), ((3244, 3290), 'os.rename', 'os.rename', (["(personal_cfg + '.old')", 'personal_cfg'], {}), "(personal_cfg + '.old', personal_cfg)\n", (3253, 3290), False, 'import os\n'), ((1689, 1714), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1704, 1714), False, 'import os\n')]
|
#
# Copyright (c) 2022 Airbyte, Inc., all rights reserved.
#
import re
from base64 import b64decode
from unittest import mock
import pytest
import responses
from airbyte_cdk.models import SyncMode
from freezegun import freeze_time
from pytest import raises
from requests.exceptions import ConnectionError
from source_amazon_ads.schemas.profile import AccountInfo, Profile
from source_amazon_ads.spec import AmazonAdsConfig
from source_amazon_ads.streams import (
SponsoredBrandsReportStream,
SponsoredBrandsVideoReportStream,
SponsoredDisplayReportStream,
SponsoredProductsReportStream,
)
from source_amazon_ads.streams.report_streams.report_streams import ReportGenerationFailure, ReportGenerationInProgress, TooManyRequests
"""
METRIC_RESPONSE is gzip compressed binary representing this string:
[
{
"campaignId": 214078428,
"campaignName": "sample-campaign-name-214078428"
},
{
"campaignId": 44504582,
"campaignName": "sample-campaign-name-44504582"
},
{
"campaignId": 509144838,
"campaignName": "sample-campaign-name-509144838"
},
{
"campaignId": 231712082,
"campaignName": "sample-campaign-name-231712082"
},
{
"campaignId": 895306040,
"campaignName": "sample-campaign-name-895306040"
}
]
"""
METRIC_RESPONSE = b64decode(
"""
<KEY>
CxPWzQOK68I1KQE11ergMNrExNTAxNTCiBSTYXrwGmxqYGloYmJhTJKb4ZrwGm1kbGhuaGRAmqPh
mvAabWFpamxgZmBiQIrRcE1go7liAYX9dsTHAQAA
"""
)
METRICS_COUNT = 5
def setup_responses(init_response=None, init_response_products=None, init_response_brands=None, status_response=None, metric_response=None):
if init_response:
responses.add(responses.POST, re.compile(r"https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"), body=init_response, status=202)
if init_response_products:
responses.add(
responses.POST,
re.compile(r"https://advertising-api.amazon.com/v2/sp/[a-zA-Z]+/report"),
body=init_response_products,
status=202,
)
if init_response_brands:
responses.add(
responses.POST, re.compile(r"https://advertising-api.amazon.com/v2/hsa/[a-zA-Z]+/report"), body=init_response_brands, status=202
)
if status_response:
responses.add(
responses.GET,
re.compile(r"https://advertising-api.amazon.com/v2/reports/[^/]+$"),
body=status_response,
)
if metric_response:
responses.add(
responses.GET,
"https://advertising-api-test.amazon.com/v1/reports/amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca/download",
body=metric_response,
)
REPORT_INIT_RESPONSE = """
{"reportId":"amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca","recordType":"campaigns","status":"IN_PROGRESS","statusDetails":"Generating report"}
"""
REPORT_STATUS_RESPONSE = """
{"reportId":"amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca","status":"SUCCESS","statusDetails":"Report successfully generated","location":"https://advertising-api-test.amazon.com/v1/reports/amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca/download","fileSize":144}
"""
def make_profiles(profile_type="seller"):
return [
Profile(
profileId=1,
timezone="America/Los_Angeles",
accountInfo=AccountInfo(marketplaceStringId="", id="", type=profile_type),
)
]
@responses.activate
def test_display_report_stream(test_config):
setup_responses(
init_response=REPORT_INIT_RESPONSE,
status_response=REPORT_STATUS_RESPONSE,
metric_response=METRIC_RESPONSE,
)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
metrics = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(metrics) == METRICS_COUNT * len(stream.metrics_map)
updated_state = stream.get_updated_state(None, stream_slice)
assert updated_state == stream_slice
profiles = make_profiles(profile_type="vendor")
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
metrics = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
# Skip asins record for vendor profiles
assert len(metrics) == METRICS_COUNT * (len(stream.metrics_map) - 1)
@responses.activate
def test_products_report_stream(test_config):
setup_responses(
init_response_products=REPORT_INIT_RESPONSE,
status_response=REPORT_STATUS_RESPONSE,
metric_response=METRIC_RESPONSE,
)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles(profile_type="vendor")
stream = SponsoredProductsReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725", "retry_count": 3}
metrics = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(metrics) == METRICS_COUNT * len(stream.metrics_map)
@responses.activate
def test_brands_report_stream(test_config):
setup_responses(
init_response_brands=REPORT_INIT_RESPONSE,
status_response=REPORT_STATUS_RESPONSE,
metric_response=METRIC_RESPONSE,
)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredBrandsReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
metrics = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(metrics) == METRICS_COUNT * len(stream.metrics_map)
@responses.activate
def test_brands_video_report_stream(test_config):
setup_responses(
init_response_brands=REPORT_INIT_RESPONSE,
status_response=REPORT_STATUS_RESPONSE,
metric_response=METRIC_RESPONSE,
)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredBrandsVideoReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
metrics = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(metrics) == METRICS_COUNT * len(stream.metrics_map)
@responses.activate
def test_display_report_stream_init_failure(mocker, test_config):
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
responses.add(
responses.POST, re.compile(r"https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"), json={"error": "some error"}, status=400
)
sleep_mock = mocker.patch("time.sleep")
with pytest.raises(Exception):
[m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert sleep_mock.call_count == 4
assert len(responses.calls) == 5
@responses.activate
def test_display_report_stream_init_http_exception(mocker, test_config):
mocker.patch("time.sleep", lambda x: None)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
responses.add(responses.POST, re.compile(r"https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"), body=ConnectionError())
with raises(ConnectionError):
_ = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(responses.calls) == 5
@responses.activate
def test_display_report_stream_init_too_many_requests(mocker, test_config):
mocker.patch("time.sleep", lambda x: None)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
responses.add(responses.POST, re.compile(r"https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"), json={}, status=429)
with raises(TooManyRequests):
_ = [m for m in stream.read_records(SyncMode.incremental, stream_slice=stream_slice)]
assert len(responses.calls) == 5
@pytest.mark.parametrize(
("modifiers", "expected"),
[
(
[
(lambda x: x <= 5, "SUCCESS", None),
],
5,
),
(
[
(lambda x: x > 5, "SUCCESS", None),
],
10,
),
(
[
(lambda x: x > 5, None, "2021-01-02 03:34:05"),
],
ReportGenerationInProgress,
),
(
[
(lambda x: x >= 1 and x <= 5, "FAILURE", None),
(lambda x: x >= 6 and x <= 10, None, "2021-01-02 03:23:05"),
(lambda x: x >= 11, "SUCCESS", "2021-01-02 03:24:06"),
],
15,
),
(
[
(lambda x: True, "FAILURE", None),
(lambda x: x >= 10, None, "2021-01-02 03:34:05"),
(lambda x: x >= 15, None, "2021-01-02 04:04:05"),
(lambda x: x >= 20, None, "2021-01-02 04:34:05"),
(lambda x: x >= 25, None, "2021-01-02 05:04:05"),
(lambda x: x >= 30, None, "2021-01-02 05:34:05"),
],
ReportGenerationFailure,
),
],
)
@responses.activate
def test_display_report_stream_backoff(mocker, test_config, modifiers, expected):
mocker.patch("time.sleep")
setup_responses(init_response=REPORT_INIT_RESPONSE, metric_response=METRIC_RESPONSE)
with freeze_time("2021-01-02 03:04:05") as frozen_time:
class StatusCallback:
count: int = 0
def __call__(self, request):
self.count += 1
response = REPORT_STATUS_RESPONSE.replace("SUCCESS", "IN_PROGRESS")
for index, status, time in modifiers:
if index(self.count):
if status:
response = response.replace("IN_PROGRESS", status)
if time:
frozen_time.move_to(time)
return (200, {}, response)
callback = StatusCallback()
responses.add_callback(responses.GET, re.compile(r"https://advertising-api.amazon.com/v2/reports/[^/]+$"), callback=callback)
config = AmazonAdsConfig(**test_config)
profiles = make_profiles()
stream = SponsoredDisplayReportStream(config, profiles, authenticator=mock.MagicMock())
stream_slice = {"reportDate": "20210725"}
if isinstance(expected, int):
list(stream.read_records(SyncMode.incremental, stream_slice=stream_slice))
assert callback.count == expected
elif issubclass(expected, Exception):
with pytest.raises(expected):
list(stream.read_records(SyncMode.incremental, stream_slice=stream_slice))
@freeze_time("2021-07-30 04:26:08")
@responses.activate
def test_display_report_stream_slices_full_refresh(test_config):
config = AmazonAdsConfig(**test_config)
stream = SponsoredDisplayReportStream(config, None, authenticator=mock.MagicMock())
slices = stream.stream_slices(SyncMode.full_refresh, cursor_field=stream.cursor_field)
assert slices == [{"reportDate": "20210730"}]
@freeze_time("2021-07-30 04:26:08")
@responses.activate
def test_display_report_stream_slices_incremental(test_config):
config = AmazonAdsConfig(**test_config)
stream = SponsoredDisplayReportStream(config, None, authenticator=mock.MagicMock())
stream_state = {"reportDate": "20210726"}
slices = stream.stream_slices(SyncMode.incremental, cursor_field=stream.cursor_field, stream_state=stream_state)
assert slices == [
{"reportDate": "20210723"},
{"reportDate": "20210724"},
{"reportDate": "20210725"},
{"reportDate": "20210726"},
{"reportDate": "20210727"},
{"reportDate": "20210728"},
{"reportDate": "20210729"},
{"reportDate": "20210730"},
]
stream_state = {"reportDate": "20210730"}
slices = stream.stream_slices(SyncMode.incremental, cursor_field=stream.cursor_field, stream_state=stream_state)
assert slices == [
{"reportDate": "20210727"},
{"reportDate": "20210728"},
{"reportDate": "20210729"},
{"reportDate": "20210730"},
]
stream_state = {"reportDate": "20210731"}
slices = stream.stream_slices(SyncMode.incremental, cursor_field=stream.cursor_field, stream_state=stream_state)
assert slices == [
{"reportDate": "20210728"},
{"reportDate": "20210729"},
{"reportDate": "20210730"},
]
slices = stream.stream_slices(SyncMode.incremental, cursor_field=stream.cursor_field, stream_state={})
assert slices == [{"reportDate": "20210730"}]
slices = stream.stream_slices(SyncMode.incremental, cursor_field=None, stream_state={})
assert slices == [{"reportDate": "20210730"}]
|
[
"unittest.mock.MagicMock",
"requests.exceptions.ConnectionError",
"responses.add",
"source_amazon_ads.schemas.profile.AccountInfo",
"base64.b64decode",
"pytest.raises",
"pytest.mark.parametrize",
"freezegun.freeze_time",
"source_amazon_ads.spec.AmazonAdsConfig",
"re.compile"
] |
[((1295, 1447), 'base64.b64decode', 'b64decode', (['"""\n<KEY>\nCxPWzQOK68I1KQE11ergMNrExNTAxNTCiBSTYXrwGmxqYGloYmJhTJKb4ZrwGm1kbGhuaGRAmqPh\nmvAabWFpamxgZmBiQIrRcE1go7liAYX9dsTHAQAA\n"""'], {}), '(\n """\n<KEY>\nCxPWzQOK68I1KQE11ergMNrExNTAxNTCiBSTYXrwGmxqYGloYmJhTJKb4ZrwGm1kbGhuaGRAmqPh\nmvAabWFpamxgZmBiQIrRcE1go7liAYX9dsTHAQAA\n"""\n )\n', (1304, 1447), False, 'from base64 import b64decode\n'), ((8410, 9158), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('modifiers', 'expected')", "[([(lambda x: x <= 5, 'SUCCESS', None)], 5), ([(lambda x: x > 5, 'SUCCESS',\n None)], 10), ([(lambda x: x > 5, None, '2021-01-02 03:34:05')],\n ReportGenerationInProgress), ([(lambda x: x >= 1 and x <= 5, 'FAILURE',\n None), (lambda x: x >= 6 and x <= 10, None, '2021-01-02 03:23:05'), (lambda\n x: x >= 11, 'SUCCESS', '2021-01-02 03:24:06')], 15), ([(lambda x: True,\n 'FAILURE', None), (lambda x: x >= 10, None, '2021-01-02 03:34:05'), (lambda\n x: x >= 15, None, '2021-01-02 04:04:05'), (lambda x: x >= 20, None,\n '2021-01-02 04:34:05'), (lambda x: x >= 25, None, '2021-01-02 05:04:05'\n ), (lambda x: x >= 30, None, '2021-01-02 05:34:05')],\n ReportGenerationFailure)]"], {}), "(('modifiers', 'expected'), [([(lambda x: x <= 5,\n 'SUCCESS', None)], 5), ([(lambda x: x > 5, 'SUCCESS', None)], 10), ([(\n lambda x: x > 5, None, '2021-01-02 03:34:05')],\n ReportGenerationInProgress), ([(lambda x: x >= 1 and x <= 5, 'FAILURE',\n None), (lambda x: x >= 6 and x <= 10, None, '2021-01-02 03:23:05'), (lambda\n x: x >= 11, 'SUCCESS', '2021-01-02 03:24:06')], 15), ([(lambda x: True,\n 'FAILURE', None), (lambda x: x >= 10, None, '2021-01-02 03:34:05'), (lambda\n x: x >= 15, None, '2021-01-02 04:04:05'), (lambda x: x >= 20, None,\n '2021-01-02 04:34:05'), (lambda x: x >= 25, None, '2021-01-02 05:04:05'\n ), (lambda x: x >= 30, None, '2021-01-02 05:34:05')],\n ReportGenerationFailure)])\n", (8433, 9158), False, 'import pytest\n'), ((11211, 11245), 'freezegun.freeze_time', 'freeze_time', (['"""2021-07-30 04:26:08"""'], {}), "('2021-07-30 04:26:08')\n", (11222, 11245), False, 'from freezegun import freeze_time\n'), ((11607, 11641), 'freezegun.freeze_time', 'freeze_time', (['"""2021-07-30 04:26:08"""'], {}), "('2021-07-30 04:26:08')\n", (11618, 11641), False, 'from freezegun import freeze_time\n'), ((3685, 3715), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (3700, 3715), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((4764, 4794), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (4779, 4794), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((5415, 5445), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (5430, 5445), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((6031, 6061), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (6046, 6061), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((6500, 6530), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (6515, 6530), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((7262, 7292), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (7277, 7292), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((7915, 7945), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (7930, 7945), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((11344, 11374), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (11359, 11374), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((11739, 11769), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (11754, 11769), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((2445, 2631), 'responses.add', 'responses.add', (['responses.GET', '"""https://advertising-api-test.amazon.com/v1/reports/amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca/download"""'], {'body': 'metric_response'}), "(responses.GET,\n 'https://advertising-api-test.amazon.com/v1/reports/amzn1.sdAPI.v1.m1.61022EEC.2ac27e60-665c-46b4-b5a9-d72f216cc8ca/download'\n , body=metric_response)\n", (2458, 2631), False, 'import responses\n'), ((6743, 6811), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/sd/[a-zA-Z]+/report')\n", (6753, 6811), False, 'import re\n'), ((6915, 6939), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (6928, 6939), False, 'import pytest\n'), ((7496, 7564), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/sd/[a-zA-Z]+/report')\n", (7506, 7564), False, 'import re\n'), ((7601, 7624), 'pytest.raises', 'raises', (['ConnectionError'], {}), '(ConnectionError)\n', (7607, 7624), False, 'from pytest import raises\n'), ((8149, 8217), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/sd/[a-zA-Z]+/report')\n", (8159, 8217), False, 'import re\n'), ((8251, 8274), 'pytest.raises', 'raises', (['TooManyRequests'], {}), '(TooManyRequests)\n', (8257, 8274), False, 'from pytest import raises\n'), ((9849, 9883), 'freezegun.freeze_time', 'freeze_time', (['"""2021-01-02 03:04:05"""'], {}), "('2021-01-02 03:04:05')\n", (9860, 9883), False, 'from freezegun import freeze_time\n'), ((10645, 10675), 'source_amazon_ads.spec.AmazonAdsConfig', 'AmazonAdsConfig', ([], {}), '(**test_config)\n', (10660, 10675), False, 'from source_amazon_ads.spec import AmazonAdsConfig\n'), ((1665, 1733), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/sd/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/sd/[a-zA-Z]+/report')\n", (1675, 1733), False, 'import re\n'), ((1862, 1933), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/v2/sp/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/v2/sp/[a-zA-Z]+/report')\n", (1872, 1933), False, 'import re\n'), ((2091, 2163), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/v2/hsa/[a-zA-Z]+/report"""'], {}), "('https://advertising-api.amazon.com/v2/hsa/[a-zA-Z]+/report')\n", (2101, 2163), False, 'import re\n'), ((2300, 2366), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/v2/reports/[^/]+$"""'], {}), "('https://advertising-api.amazon.com/v2/reports/[^/]+$')\n", (2310, 2366), False, 'import re\n'), ((3822, 3838), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3836, 3838), False, 'from unittest import mock\n'), ((4282, 4298), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (4296, 4298), False, 'from unittest import mock\n'), ((4923, 4939), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (4937, 4939), False, 'from unittest import mock\n'), ((5551, 5567), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5565, 5567), False, 'from unittest import mock\n'), ((6172, 6188), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6186, 6188), False, 'from unittest import mock\n'), ((6636, 6652), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (6650, 6652), False, 'from unittest import mock\n'), ((7398, 7414), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (7412, 7414), False, 'from unittest import mock\n'), ((7572, 7589), 'requests.exceptions.ConnectionError', 'ConnectionError', ([], {}), '()\n', (7587, 7589), False, 'from requests.exceptions import ConnectionError\n'), ((8051, 8067), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (8065, 8067), False, 'from unittest import mock\n'), ((10540, 10606), 're.compile', 're.compile', (['"""https://advertising-api.amazon.com/v2/reports/[^/]+$"""'], {}), "('https://advertising-api.amazon.com/v2/reports/[^/]+$')\n", (10550, 10606), False, 'import re\n'), ((11445, 11461), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (11459, 11461), False, 'from unittest import mock\n'), ((11840, 11856), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (11854, 11856), False, 'from unittest import mock\n'), ((3365, 3426), 'source_amazon_ads.schemas.profile.AccountInfo', 'AccountInfo', ([], {'marketplaceStringId': '""""""', 'id': '""""""', 'type': 'profile_type'}), "(marketplaceStringId='', id='', type=profile_type)\n", (3376, 3426), False, 'from source_amazon_ads.schemas.profile import AccountInfo, Profile\n'), ((10789, 10805), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (10803, 10805), False, 'from unittest import mock\n'), ((11092, 11115), 'pytest.raises', 'pytest.raises', (['expected'], {}), '(expected)\n', (11105, 11115), False, 'import pytest\n')]
|
from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup
import os
from collections import defaultdict
import inspect
import pandas as pd
import numpy as np
from scipy import stats
import re
from graphviz import Digraph
import plotly
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import json
import re
import uuid
from functools import wraps
from importlib import reload
from werkzeug.utils import secure_filename
from sklearn.preprocessing import OneHotEncoder, StandardScaler, label_binarize, KBinsDiscretizer
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from utils import *
from fairness_instru import *
import warnings
warnings.filterwarnings('ignore')
app = Flask(__name__)
cache = []
user_id = uuid.uuid1()
# uploads_dir = os.path.join(app.instance_path, 'media')
script_name = os.getenv('SCRIPT_NAME', '')
# variable essentials are package import commands that are written to function python file.
#
# function python file is excutable scripts that calls fairness_instru and then generates DAGs and intermediate log dicts, which is stored in pickle format.
essentials = """import os
from collections import defaultdict
import inspect
import pandas as pd
import numpy as np
from scipy import stats
import re
from graphviz import Digraph
import plotly
from plotly.subplots import make_subplots
import plotly.graph_objects as go
import json
from functools import wraps
from sklearn.preprocessing import OneHotEncoder, StandardScaler, label_binarize, KBinsDiscretizer
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from utils import *
from fairness_instru import *
"""
# Play data pipeline codes that generates ADULT_NORMAL case
playdata_AD_normal = """def adult_pipeline_normal(f_path = 'data/adult_train.csv'):
data = pd.read_csv(f_path, na_values='?', index_col=0)
# data = raw_data.dropna()
labels = label_binarize(data['income-per-year'], ['>50K', '<=50K'])
nested_categorical_feature_transformation = Pipeline(steps=[
('impute', SimpleImputer(missing_values=np.nan, strategy='most_frequent')),
('encode', OneHotEncoder(handle_unknown='ignore'))
])
nested_feature_transformation = ColumnTransformer(transformers=[
('categorical', nested_categorical_feature_transformation, ['education', 'workclass']),
('numeric', StandardScaler(), ['age', 'hours-per-week'])
])
nested_pipeline = Pipeline([
('features', nested_feature_transformation),
('classifier', DecisionTreeClassifier())])
return nested_pipeline"""
# Play data pipeline that generates codes case
playdata_CM = """def compas_pipeline(f_path = 'data/compas_train.csv'):
data = pd.read_csv(f_path)
data = data[['sex', 'dob','age','c_charge_degree', 'race','score_text','priors_count','days_b_screening_arrest',
'decile_score','is_recid','two_year_recid','c_jail_in','c_jail_out']]
data = data.loc[(data['days_b_screening_arrest'] <= 30)]
data = data.loc[(data['days_b_screening_arrest'] >= -30)]
data = data.loc[(data['is_recid'] != -1)]
data = data.loc[(data['c_charge_degree'] != "O")]
data = data.loc[(data['score_text'] != 'N/A')]
data = data.replace('Medium', "Low")
labels = LabelEncoder().fit_transform(data['score_text'])
#sklearn pipeline
impute1_and_onehot = Pipeline([('imputer1', SimpleImputer(strategy='most_frequent')),
('onehot', OneHotEncoder(handle_unknown='ignore'))])
impute2_and_bin = Pipeline([('imputer2', SimpleImputer(strategy='mean')),
('discretizer', KBinsDiscretizer(n_bins=4, encode='ordinal', strategy='uniform'))])
featurizer = ColumnTransformer(transformers=[
('impute1_and_onehot', impute1_and_onehot, ['is_recid']),
('impute2_and_bin', impute2_and_bin, ['age'])
])
pipeline = Pipeline([
('features', featurizer),
('classifier', LogisticRegression())
])
return pipeline"""
# variable initilization
target_name, pos_group, code, name, organization = "", "", "", "", ""
num_target, cat_target = [], []
cache = []
log_dict = {}
plot_dict = {}
rand_rgb = {}
# flask secret_key, randomly generated
app.secret_key = "shdgfashfasdsfsdf"
def login_required(f):
@wraps(f)
def wrap(*args, **kwargs):
"""
Function checking the login status of user.
If no login status, redirect to login package
"""
if 'logged_in' in session and session['logged_in']:
return f(*args, **kwargs)
else:
flash('You need to login first')
return redirect(url_for('login'))
return wrap
@app.route('/login', methods = ['GET', 'POST'])
def login():
"""
Login Page Flask function
In Login page:
takes in user information.
dropdown menu for user to select play data
upload function for data upload if new case specified by user
Returns:
url for login page. Redirect to main home page if valid login session
write pipeline code to executable function python file which calls fairness_instru wrapper generating DAGs and intermediate log dict files
load intermediate dicts as well as DAG(stored in svg) and parse them to main home page
"""
error = None
# set default_value here
if request.method == 'POST':
global name
name = request.form['name'] if request.form['name'] else '<NAME>'
global organization
organization = request.form['organization'] if request.form['organization'] else 'Y university'
global demo
demo = request.form['demo']
global code
code = """def adult_pipeline_easy(f_path = 'playdata/AD_train.csv'):
raw_data = pd.read_csv(f_path, na_values='?', index_col=0)
data = raw_data.dropna()
labels = label_binarize(data['income-per-year'], ['>50K', '<=50K'])
feature_transformation = ColumnTransformer(transformers=[
('categorical', OneHotEncoder(handle_unknown='ignore'), ['education', 'workclass']),
('numeric', StandardScaler(), ['age', 'hours-per-week'])
])
income_pipeline = Pipeline([
('features', feature_transformation),
('classifier', DecisionTreeClassifier())])
return income_pipeline""" if not request.form['code'] else request.form['code']
global target_name, pos_group
target_name, pos_group = list(map(lambda x: x.strip(), request.form['target_name'].split(','))) if request.form['target_name'] else ("income-per-year", ">50K")
global cat_target
cat_target = list(map(lambda x: x.strip(), request.form['cat_target'].split(','))) if request.form['cat_target'] else ['sex', 'race']
global num_target
num_target = list(map(lambda x: x.strip(), request.form['num_target'].split(','))) if request.form['num_target'] else ['age', 'hours-per-week']
global save_path
save_path = f'experiments/{user_id}'
global perform_target
perform_target = request.form['perform_target'] if request.form['perform_target'] else 'PR'
session['logged_in'] = True
# cache is used for stacking user click event, so that figures will show in sequence
global cache
cache = []
global log_dict
global rand_rgb
global plot_dict
global target_df
# to_json_dict stores all user entered info, saved in uid format
to_json_dict = request.form.to_dict(flat=False)
with open(f'media/{user_id}.json', 'w+') as f:
json.dump(to_json_dict, f)
flash('You were just logged in')
# options for dsiplaying play data cases
# load saved play data intermediate dict files generated from fairness_instru
if not demo == 'USER':
log_dict = pickle.load(open(f"playdata/{demo}/checkpoints/log_dict_train.p", 'rb'))
rand_rgb = pickle.load(open(f"playdata/{demo}/checkpoints/rand_color_train.p", 'rb'))
rand_rgb = pickle.load(open(f"playdata/{demo}/checkpoints/rand_color_train.p", 'rb'))
plot_dict = pickle.load(open(f"playdata/{demo}/checkpoints/plot_dict_train.p", 'rb'))
target_df = pickle.load(open(f"playdata/{demo}/checkpoints/target_df_train.p", 'rb'))
if demo =='AD_normal':
code = playdata_AD_normal
elif demo == 'CM':
code = playdata_CM
target_name, pos_group = 'score_text', 'High'
cat_target = ['sex', 'race']
num_target = ['age']
with open(f"playdata/{demo}/DAG/pipeline.svg", 'r') as content:
svg = content.read()
with open(f'templates/{demo}.html', 'w+') as f:
f.write("{% extends 'index1.html' %}\n")
f.write("{% block content %}\n")
f.write(svg)
f.write('\n')
f.write("{% endblock %}\n")
return redirect(url_for('home'))
# below handles user defined cases. including:
# save to executable function python file which generates DAGs and intermediate dict files
# load intermediate dict files
# load dags
# parse intermediate dict and DAGs to main home page
# laod user uploaded file
file = request.files['file']
if file.filename == '':
flash('No selected File')
return redirect(url_for('logged_in'))
if file:
filename = secure_filename(file.filename)
file.save(f'media/{user_id}.csv')
# pipeline codes to be outputed into executable function python file. Extract pipeline function title first.
function_title = code.split('(')[0].replace('def ','')+"()"
input_args = code.split('(')[1].split(')')[0].split(',')
for i, item in enumerate(input_args):
if 'f_path' in item:
input_args[i] = f'f_path = \"./media/{user_id}.csv\"'
input_arg = ','.join(input_args)
code = ''.join([code.split('(')[0], '(', input_arg, ')', ')'.join(code.split(')')[1:])])
# write essentials and pipeline codes to executable function python file. add trace wrapper above function declare line.
with open(f'{user_id}.py', 'w+') as f:
f.write(essentials)
f.write(f"""@tracer(cat_col = {cat_target}, numerical_col = {num_target}, sensi_atts={cat_target}, target_name = \"{target_name}\", training=True, save_path=\"{save_path}\", dag_save=\"svg\")\n{code}\n""")
f.write(f"pipeline = {function_title}")
os.system(f"python {user_id}.py")
img = save_path + "/DAG/pipeline.svg"
with open(img, 'r') as content:
svg = content.read()
with open(f'templates/{user_id}.html', 'w+') as f:
f.write("{% extends 'index1.html' %}\n")
f.write("{% block content %}\n")
f.write(svg)
f.write('\n')
f.write("{% endblock %}\n")
# load saved intermediate dict files generated from executable function python file.
log_dict = pickle.load(open(save_path+"/checkpoints/log_dict_train.p", 'rb'))
rand_rgb = pickle.load(open(save_path+"/checkpoints/rand_color_train.p", 'rb'))
rand_rgb = pickle.load(open(save_path+"/checkpoints/rand_color_train.p", 'rb'))
plot_dict = pickle.load(open(save_path+"/checkpoints/plot_dict_train.p", 'rb'))
target_df = pickle.load(open(save_path+"/checkpoints/target_df_train.p", 'rb'))
return redirect(url_for('home'))
return render_template("login_2.html", error = error, script_name = script_name)
@app.route('/', methods=['GET'])
@login_required
def home():
"""
Main Function Flask function
html adopts hierachical format. child html file takes care of DAG visualization while parent html deals with dynamic changes in codes, dag color, tables and histograms.
In Main home page:
Display user information in head row
Display raw pipeline code. Change color w.r.t click events
Display DAG generated from pipeline code. Change color w.r.t click events
Display intermediate changes in both static lables and population stats
Display visualization of changes in static lables and performance labels
Returns:
url for main home page.
"""
selected_status = request.args.get('type')
if selected_status is not None:
cache.append(selected_status)
corr_color = [rand_rgb[int(step)] for step in cache]
plots = {}
to_plot = []
code_with_color = ""
# variable initilization
tables_to_display, titles, labels, code_titles, plt_xs, plt_ys, plt_titles, plt_xas, plt_yas, plot_log_changes = [], [], [], [], [], [], [], [], [], []
# cache is used to store user click events
for status in cache:
if 'Classifier' in int_to_string(int(status)):
label_inverse = {1: '<=50K', 0:'>50K'}
target_df[target_name].replace(label_inverse, inplace = True)
target_df['pred_'+target_name].replace(label_inverse, inplace = True)
plt_titles.insert(0, 'Performance Label')
to_plot.insert(0, (get_performance_label(target_df, cat_target, target_name, pos_group), perform_target))
to_plot.append(static_label(target_df, cat_target, target_name))
plot_log_changes.append(pd.DataFrame(static_label(target_df, cat_target, target_name)))
else:
to_plot.append(sort_dict_key(plot_dict[int(status)]))
plot_log_changes.append(pd.DataFrame(sort_dict_key(plot_dict[int(status)])))
# display tables
if int(status) in log_dict.keys():
temp_table = log_dict[int(status)]
if len(plot_log_changes) == 1:
tables_to_display.append('No changes')
else:
if plot_log_changes[-1].equals(plot_log_changes[-2]):
tables_to_display.append('No changes')
else:
tables_to_display.append((plot_log_changes[-1] - plot_log_changes[-2]).to_html(classes = 'table table-striped'))
for key, dataframe in temp_table.items():
tables_to_display.append(dataframe.to_html(classes = 'table table-striped'))
num_cat = "NUMERICAL features" if key == 'num' else "CATEGORICAL features"
titles.append(int_to_string(int(status)))
labels.append(' -- Static Label, show changes in percentage')
titles.append(int_to_string(int(status)))
labels.append(" -- TARGET changed in "+num_cat)
code_titles.append(int_to_string(int(status)))
# start_plotly
if key == 'cat':
plt_titles.append('INSPECTING ' + int_to_string(int(status)))
else:
plt_titles.append('INSPECTING ' + int_to_string(int(status)))
else:
if len(plot_log_changes) == 1:
tables_to_display.append('No changes')
else:
if plot_log_changes[-1].equals(plot_log_changes[-2]):
tables_to_display.append('No changes')
else:
tables_to_display.append((plot_log_changes[-1] - plot_log_changes[-2]).to_html(classes = 'table table-striped'))
tables_to_display.append('No changes')
titles.append(int_to_string(int(status)))
labels.append(' -- Static Label, show changes in percentage')
titles.append(int_to_string(int(status)))
labels.append('')
code_titles.append(int_to_string(int(status)))
plt_titles.append('INSPECTING ' + int_to_string(int(status)))
plots = create_hist_sub_plot(to_plot[::-1], plt_titles[::-1], pos_group)
# change code color w.r.t click events
code_with_color = change_code_color(corr_color, code_titles, code)
template_to_render = user_id if demo=="USER" else demo
# parse variables to html file.
return render_template(f'{template_to_render}.html',
plots = plots, tables = tables_to_display[::-1], titles = titles[::-1],
labels = labels[::-1], colors = np.array(corr_color[::-1]).repeat(2).tolist(),
code = code_with_color, name = name, org = organization, script_name = script_name)
@app.route('/logout')
@login_required
def logout():
session.pop('logged_in', None)
flash('You were just logged out')
return redirect(url_for('login'))
if __name__ == '__main__':
app.run(debug=True)
|
[
"json.dump",
"flask.flash",
"flask.session.pop",
"flask.request.args.get",
"warnings.filterwarnings",
"flask.Flask",
"os.system",
"werkzeug.utils.secure_filename",
"uuid.uuid1",
"flask.url_for",
"flask.request.form.to_dict",
"numpy.array",
"functools.wraps",
"flask.render_template",
"os.getenv"
] |
[((924, 957), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (947, 957), False, 'import warnings\n'), ((965, 980), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (970, 980), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((1004, 1016), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (1014, 1016), False, 'import uuid\n'), ((1090, 1118), 'os.getenv', 'os.getenv', (['"""SCRIPT_NAME"""', '""""""'], {}), "('SCRIPT_NAME', '')\n", (1099, 1118), False, 'import os\n'), ((4788, 4796), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (4793, 4796), False, 'from functools import wraps\n'), ((12123, 12192), 'flask.render_template', 'render_template', (['"""login_2.html"""'], {'error': 'error', 'script_name': 'script_name'}), "('login_2.html', error=error, script_name=script_name)\n", (12138, 12192), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((12928, 12952), 'flask.request.args.get', 'request.args.get', (['"""type"""'], {}), "('type')\n", (12944, 12952), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((16991, 17021), 'flask.session.pop', 'session.pop', (['"""logged_in"""', 'None'], {}), "('logged_in', None)\n", (17002, 17021), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((17026, 17059), 'flask.flash', 'flash', (['"""You were just logged out"""'], {}), "('You were just logged out')\n", (17031, 17059), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((7986, 8018), 'flask.request.form.to_dict', 'request.form.to_dict', ([], {'flat': '(False)'}), '(flat=False)\n', (8006, 8018), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((8122, 8154), 'flask.flash', 'flash', (['"""You were just logged in"""'], {}), "('You were just logged in')\n", (8127, 8154), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((11136, 11169), 'os.system', 'os.system', (['f"""python {user_id}.py"""'], {}), "(f'python {user_id}.py')\n", (11145, 11169), False, 'import os\n'), ((17080, 17096), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (17087, 17096), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((5084, 5116), 'flask.flash', 'flash', (['"""You need to login first"""'], {}), "('You need to login first')\n", (5089, 5116), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((8086, 8112), 'json.dump', 'json.dump', (['to_json_dict', 'f'], {}), '(to_json_dict, f)\n', (8095, 8112), False, 'import json\n'), ((9918, 9943), 'flask.flash', 'flash', (['"""No selected File"""'], {}), "('No selected File')\n", (9923, 9943), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((10034, 10064), 'werkzeug.utils.secure_filename', 'secure_filename', (['file.filename'], {}), '(file.filename)\n', (10049, 10064), False, 'from werkzeug.utils import secure_filename\n'), ((12095, 12110), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (12102, 12110), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((5145, 5161), 'flask.url_for', 'url_for', (['"""login"""'], {}), "('login')\n", (5152, 5161), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((9510, 9525), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (9517, 9525), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((9972, 9992), 'flask.url_for', 'url_for', (['"""logged_in"""'], {}), "('logged_in')\n", (9979, 9992), False, 'from flask import Flask, render_template, redirect, url_for, request, session, flash, Markup\n'), ((16794, 16820), 'numpy.array', 'np.array', (['corr_color[::-1]'], {}), '(corr_color[::-1])\n', (16802, 16820), True, 'import numpy as np\n')]
|
import torch
from torch import nn
class SwishImplementation(torch.autograd.Function):
@staticmethod
def forward(ctx, i):
result = i * torch.sigmoid(i)
ctx.save_for_backward(i)
return result
@staticmethod
def backward(ctx, grad_output):
i = ctx.saved_variables[0]
sigmoid_i = torch.sigmoid(i)
return grad_output * (sigmoid_i * (1 + i * (1 - sigmoid_i)))
class MemoryEfficientSwish(nn.Module):
def forward(self, x):
return SwishImplementation.apply(x)
class Swish(nn.Module):
def forward(self, x):
return x * torch.sigmoid(x)
|
[
"torch.sigmoid"
] |
[((348, 364), 'torch.sigmoid', 'torch.sigmoid', (['i'], {}), '(i)\n', (361, 364), False, 'import torch\n'), ((159, 175), 'torch.sigmoid', 'torch.sigmoid', (['i'], {}), '(i)\n', (172, 175), False, 'import torch\n'), ((625, 641), 'torch.sigmoid', 'torch.sigmoid', (['x'], {}), '(x)\n', (638, 641), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
"""
.. Authors
<NAME> <<EMAIL>>
A set of tools to help with automatic API documentation of XICSRT.
Description
-----------
XICSRT uses sphinx for documentation, and API docs are based on the idea of
code self documentation though `python` doc strings. This module contains
a set of decorators and helper function to aid in self documentation.
The most important part of this module is the `@dochelper` decorator which
should be used for all element classes.
.. Note:
Philosophy: Python help() should be just as readable as the Sphinx webpage.
Todo:
- The config docstrings should all be indented follow the help() standard.
- Would it be helpful to show which inherited class the options came from?
"""
import inspect
def dochelper(cls):
"""
A functional wrapper for the DocHelper class. Intended to be used
as a decorator.
This decorator does the following:
1. Adds a 'Configuration Options' section to the class docstring that
contains all options defined in default_config and any class ancestors.
"""
return DocHelper(cls)()
class DocHelper:
"""
A class to help generate docstrings for XICSRT.
This is expected to be used through the `@dochelper` class decorator.
"""
def __init__(self, cls):
self.cls = cls
self.update_class_docstring(cls)
def __call__(self):
return self.cls
def update_class_docstring(self, cls):
#print(cls.__name__, cls.__qualname__)
if cls.__doc__ == None:
cls.__doc__ = ''
cls.__doc__ += '\n'
cls.__doc__ += 'Configuration Options:\n\n'
#cls.__doc__ += '----------------------\n\n'
for ancestor in inspect.getmro(cls):
if hasattr(ancestor, 'default_config'):
doc = ancestor.default_config.__doc__
if doc is not None:
cls.__doc__ += inspect.getdoc(ancestor.default_config)
cls.__doc__ += '\n\n'
return cls
|
[
"inspect.getmro",
"inspect.getdoc"
] |
[((1730, 1749), 'inspect.getmro', 'inspect.getmro', (['cls'], {}), '(cls)\n', (1744, 1749), False, 'import inspect\n'), ((1928, 1967), 'inspect.getdoc', 'inspect.getdoc', (['ancestor.default_config'], {}), '(ancestor.default_config)\n', (1942, 1967), False, 'import inspect\n')]
|
from ddtrace import tracer
from ddtrace.propagation.b3 import B3HTTPPropagator
tracer.configure(
http_propagator=B3HTTPPropagator,
hostname="ingest.lightstep.com",
port=443,
https=True
)
tracer.set_tags(
{
"lightstep.service_name": "lightstep-py",
"lightstep.access_token": "<access-token>",
}
)
from json import loads, dumps
from uuid import uuid4
from flask import Flask, request, render_template
# BLOCK 0
from kitchen_service import KitchenService
from kitchen_consumer import KitchenConsumer
from donut import Donut
from status import NEW_ORDER
# BLOCK 1
app = Flask(__name__)
app.static_folder = 'static'
# BLOCK 2
kitchen_service = KitchenService()
kitchen_consumer = KitchenConsumer()
@app.route('/')
def home():
return render_template('index.html')
@app.route('/order', methods=['POST'])
def order():
# print(os.environ.get('DD_TRACE_AGENT_URL'))
# print(os.environ.get('DD_TRACE_GLOBAL_TAGS'))
order_id = str(uuid4())
for donut_data in loads(next(request.form.keys()))['donuts']:
for _ in range(donut_data['quantity']):
kitchen_consumer.add_donut(donut_data, order_id)
return kitchen_consumer.check_status(order_id)
@app.route('/status', methods=['POST'])
def status():
return kitchen_consumer.check_status(
loads(next(request.form.keys()))['order_id']
)
@app.route('/kitchen/add_donut', methods=['POST'])
def add_donut(*args, **kwargs):
kitchen_service.add_donut(
Donut(
request.form['flavor'],
request.form['order_id'],
NEW_ORDER
)
)
return '200'
@app.route('/kitchen/get_donuts', methods=['GET'])
def get_donuts():
return dumps(
[
{
'flavor': donut.flavor,
'order_id': donut.order_id,
'status': donut.status
} for donut in kitchen_service.get_all_donuts()
]
)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=8082)
|
[
"uuid.uuid4",
"kitchen_consumer.KitchenConsumer",
"ddtrace.tracer.set_tags",
"flask.Flask",
"flask.request.form.keys",
"flask.render_template",
"ddtrace.tracer.configure",
"kitchen_service.KitchenService",
"donut.Donut"
] |
[((80, 190), 'ddtrace.tracer.configure', 'tracer.configure', ([], {'http_propagator': 'B3HTTPPropagator', 'hostname': '"""ingest.lightstep.com"""', 'port': '(443)', 'https': '(True)'}), "(http_propagator=B3HTTPPropagator, hostname=\n 'ingest.lightstep.com', port=443, https=True)\n", (96, 190), False, 'from ddtrace import tracer\n'), ((204, 311), 'ddtrace.tracer.set_tags', 'tracer.set_tags', (["{'lightstep.service_name': 'lightstep-py', 'lightstep.access_token':\n '<access-token>'}"], {}), "({'lightstep.service_name': 'lightstep-py',\n 'lightstep.access_token': '<access-token>'})\n", (219, 311), False, 'from ddtrace import tracer\n'), ((614, 629), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (619, 629), False, 'from flask import Flask, request, render_template\n'), ((689, 705), 'kitchen_service.KitchenService', 'KitchenService', ([], {}), '()\n', (703, 705), False, 'from kitchen_service import KitchenService\n'), ((725, 742), 'kitchen_consumer.KitchenConsumer', 'KitchenConsumer', ([], {}), '()\n', (740, 742), False, 'from kitchen_consumer import KitchenConsumer\n'), ((785, 814), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (800, 814), False, 'from flask import Flask, request, render_template\n'), ((992, 999), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (997, 999), False, 'from uuid import uuid4\n'), ((1514, 1580), 'donut.Donut', 'Donut', (["request.form['flavor']", "request.form['order_id']", 'NEW_ORDER'], {}), "(request.form['flavor'], request.form['order_id'], NEW_ORDER)\n", (1519, 1580), False, 'from donut import Donut\n'), ((1035, 1054), 'flask.request.form.keys', 'request.form.keys', ([], {}), '()\n', (1052, 1054), False, 'from flask import Flask, request, render_template\n'), ((1349, 1368), 'flask.request.form.keys', 'request.form.keys', ([], {}), '()\n', (1366, 1368), False, 'from flask import Flask, request, render_template\n')]
|
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from app.models.base_model_ import Model
from app import util
class Transaction(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id: str=None, date_time: datetime=None, amount: float=None, account: str=None, transaction_type: str=None, reference: str=None, description: str=None, balance: str=None, dispute: str='in progress', self_url: str=None, account_url: str=None): # noqa: E501
"""Transaction - a model defined in Swagger
:param id: The id of this Transaction. # noqa: E501
:type id: str
:param date_time: The date_time of this Transaction. # noqa: E501
:type date_time: datetime
:param amount: The amount of this Transaction. # noqa: E501
:type amount: float
:param account: The account of this Transaction. # noqa: E501
:type account: str
:param transaction_type: The transaction_type of this Transaction. # noqa: E501
:type transaction_type: str
:param reference: The reference of this Transaction. # noqa: E501
:type reference: str
:param description: The description of this Transaction. # noqa: E501
:type description: str
:param balance: The balance of this Transaction. # noqa: E501
:type balance: str
:param dispute: The dispute of this Transaction. # noqa: E501
:type dispute: str
:param self_url: The self_url of this Transaction. # noqa: E501
:type self_url: str
:param account_url: The account_url of this Transaction. # noqa: E501
:type account_url: str
"""
self.swagger_types = {
'id': str,
'date_time': datetime,
'amount': float,
'account': str,
'transaction_type': str,
'reference': str,
'description': str,
'balance': str,
'dispute': str,
'self_url': str,
'account_url': str
}
self.attribute_map = {
'id': 'id',
'date_time': 'date_time',
'amount': 'amount',
'account': 'account',
'transaction_type': 'transaction_type',
'reference': 'reference',
'description': 'description',
'balance': 'balance',
'dispute': 'dispute',
'self_url': 'self_url',
'account_url': 'account_url'
}
self._id = id
self._date_time = date_time
self._amount = amount
self._account = account
self._transaction_type = transaction_type
self._reference = reference
self._description = description
self._balance = balance
self._dispute = dispute
self._self_url = self_url
self._account_url = account_url
@classmethod
def from_dict(cls, dikt) -> 'Transaction':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The Transaction of this Transaction. # noqa: E501
:rtype: Transaction
"""
return util.deserialize_model(dikt, cls)
@property
def id(self) -> str:
"""Gets the id of this Transaction.
:return: The id of this Transaction.
:rtype: str
"""
return self._id
@id.setter
def id(self, id: str):
"""Sets the id of this Transaction.
:param id: The id of this Transaction.
:type id: str
"""
self._id = id
@property
def date_time(self) -> datetime:
"""Gets the date_time of this Transaction.
:return: The date_time of this Transaction.
:rtype: datetime
"""
return self._date_time
@date_time.setter
def date_time(self, date_time: datetime):
"""Sets the date_time of this Transaction.
:param date_time: The date_time of this Transaction.
:type date_time: datetime
"""
if date_time is None:
raise ValueError("Invalid value for `date_time`, must not be `None`") # noqa: E501
self._date_time = date_time
@property
def amount(self) -> float:
"""Gets the amount of this Transaction.
:return: The amount of this Transaction.
:rtype: float
"""
return self._amount
@amount.setter
def amount(self, amount: float):
"""Sets the amount of this Transaction.
:param amount: The amount of this Transaction.
:type amount: float
"""
if amount is None:
raise ValueError("Invalid value for `amount`, must not be `None`") # noqa: E501
self._amount = amount
@property
def account(self) -> str:
"""Gets the account of this Transaction.
:return: The account of this Transaction.
:rtype: str
"""
return self._account
@account.setter
def account(self, account: str):
"""Sets the account of this Transaction.
:param account: The account of this Transaction.
:type account: str
"""
self._account = account
@property
def transaction_type(self) -> str:
"""Gets the transaction_type of this Transaction.
Transaction type # noqa: E501
:return: The transaction_type of this Transaction.
:rtype: str
"""
return self._transaction_type
@transaction_type.setter
def transaction_type(self, transaction_type: str):
"""Sets the transaction_type of this Transaction.
Transaction type # noqa: E501
:param transaction_type: The transaction_type of this Transaction.
:type transaction_type: str
"""
allowed_values = ["debit", "credit"] # noqa: E501
if transaction_type not in allowed_values:
raise ValueError(
"Invalid value for `transaction_type` ({0}), must be one of {1}"
.format(transaction_type, allowed_values)
)
self._transaction_type = transaction_type
@property
def reference(self) -> str:
"""Gets the reference of this Transaction.
:return: The reference of this Transaction.
:rtype: str
"""
return self._reference
@reference.setter
def reference(self, reference: str):
"""Sets the reference of this Transaction.
:param reference: The reference of this Transaction.
:type reference: str
"""
if reference is None:
raise ValueError("Invalid value for `reference`, must not be `None`") # noqa: E501
self._reference = reference
@property
def description(self) -> str:
"""Gets the description of this Transaction.
:return: The description of this Transaction.
:rtype: str
"""
return self._description
@description.setter
def description(self, description: str):
"""Sets the description of this Transaction.
:param description: The description of this Transaction.
:type description: str
"""
if description is None:
raise ValueError("Invalid value for `description`, must not be `None`") # noqa: E501
self._description = description
@property
def balance(self) -> str:
"""Gets the balance of this Transaction.
:return: The balance of this Transaction.
:rtype: str
"""
return self._balance
@balance.setter
def balance(self, balance: str):
"""Sets the balance of this Transaction.
:param balance: The balance of this Transaction.
:type balance: str
"""
if balance is None:
raise ValueError("Invalid value for `balance`, must not be `None`") # noqa: E501
self._balance = balance
@property
def dispute(self) -> str:
"""Gets the dispute of this Transaction.
If transaction is under dispute # noqa: E501
:return: The dispute of this Transaction.
:rtype: str
"""
return self._dispute
@dispute.setter
def dispute(self, dispute: str):
"""Sets the dispute of this Transaction.
If transaction is under dispute # noqa: E501
:param dispute: The dispute of this Transaction.
:type dispute: str
"""
allowed_values = ["in progress", "cleared", "reported", "under investigation"] # noqa: E501
if dispute not in allowed_values:
raise ValueError(
"Invalid value for `dispute` ({0}), must be one of {1}"
.format(dispute, allowed_values)
)
self._dispute = dispute
@property
def self_url(self) -> str:
"""Gets the self_url of this Transaction.
:return: The self_url of this Transaction.
:rtype: str
"""
return self._self_url
@self_url.setter
def self_url(self, self_url: str):
"""Sets the self_url of this Transaction.
:param self_url: The self_url of this Transaction.
:type self_url: str
"""
if self_url is None:
raise ValueError("Invalid value for `self_url`, must not be `None`") # noqa: E501
self._self_url = self_url
@property
def account_url(self) -> str:
"""Gets the account_url of this Transaction.
:return: The account_url of this Transaction.
:rtype: str
"""
return self._account_url
@account_url.setter
def account_url(self, account_url: str):
"""Sets the account_url of this Transaction.
:param account_url: The account_url of this Transaction.
:type account_url: str
"""
if account_url is None:
raise ValueError("Invalid value for `account_url`, must not be `None`") # noqa: E501
self._account_url = account_url
|
[
"app.util.deserialize_model"
] |
[((3324, 3357), 'app.util.deserialize_model', 'util.deserialize_model', (['dikt', 'cls'], {}), '(dikt, cls)\n', (3346, 3357), False, 'from app import util\n')]
|
import tornado.ioloop
import tornado.web
import tornado.template
import tornado.httpserver
import datetime
import pytz
from SQL.table_user import User
class BaseHandler(tornado.web.RequestHandler):
def get_current_user(self): # Sets current_user in every template
name = self.get_secure_cookie('user')
print('GetCurrentUser ' + str(name))
if not name:
return None
data = tornado.escape.json_decode(name)
usr = self.database.query(User).filter(User.username==str(data[0])).first()
if usr is None:
return None
if usr.session_id is None:
return None
time = (datetime.datetime.now(pytz.utc).replace(tzinfo=None) - usr.session_date).total_seconds()
if usr.session_id == data[1] and time < 3600:
return usr
return None
def initialize(self, database):
self.database = database
|
[
"datetime.datetime.now"
] |
[((702, 733), 'datetime.datetime.now', 'datetime.datetime.now', (['pytz.utc'], {}), '(pytz.utc)\n', (723, 733), False, 'import datetime\n')]
|
from django.db import models
from usuarios.models import Usuario
from jornadas.models import Jornada_Corporacion
class Votacion_Log(models.Model):
jornada_corporacion = models.ForeignKey(Jornada_Corporacion)
usuario = models.ForeignKey(Usuario)
fecha_votacion = models.DateTimeField(auto_now_add=True, blank=True)
is_active = models.BooleanField(default=True)
class Meta:
ordering = ["fecha_votacion"]
db_table = 'votacion_log'
def __str__(self):
return 'Usuario %s Votó por %s a %s ' % (self.usuario.cedula_usuario, self.jornada_corporacion.corporacion.name_corporation,
self.fecha_votacion)
|
[
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.BooleanField"
] |
[((176, 214), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Jornada_Corporacion'], {}), '(Jornada_Corporacion)\n', (193, 214), False, 'from django.db import models\n'), ((229, 255), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Usuario'], {}), '(Usuario)\n', (246, 255), False, 'from django.db import models\n'), ((277, 328), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'blank': '(True)'}), '(auto_now_add=True, blank=True)\n', (297, 328), False, 'from django.db import models\n'), ((345, 378), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (364, 378), False, 'from django.db import models\n')]
|
import logging
import threading
import schedule
import time
from fetch_papers import fetch_papers_main
from twitter_daemon import main_twitter_fetcher
from fetch_citations_and_references import update_all_papers
from logger import logger_config
def run_threaded(job_func):
job_thread = threading.Thread(target=job_func)
job_thread.start()
if __name__ == '__main__':
logger_config(info_filename='background_tasks.log')
logger = logging.getLogger(__name__)
logger.info('Start background tasks')
schedule.every(30).minutes.do(run_threaded, main_twitter_fetcher)
schedule.every(3).hours.do(run_threaded, fetch_papers_main)
schedule.every().saturday.at("00:10").do(run_threaded, update_all_papers)
while True:
schedule.run_pending()
time.sleep(1)
|
[
"schedule.run_pending",
"threading.Thread",
"time.sleep",
"logger.logger_config",
"schedule.every",
"logging.getLogger"
] |
[((293, 326), 'threading.Thread', 'threading.Thread', ([], {'target': 'job_func'}), '(target=job_func)\n', (309, 326), False, 'import threading\n'), ((382, 433), 'logger.logger_config', 'logger_config', ([], {'info_filename': '"""background_tasks.log"""'}), "(info_filename='background_tasks.log')\n", (395, 433), False, 'from logger import logger_config\n'), ((447, 474), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (464, 474), False, 'import logging\n'), ((756, 778), 'schedule.run_pending', 'schedule.run_pending', ([], {}), '()\n', (776, 778), False, 'import schedule\n'), ((787, 800), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (797, 800), False, 'import time\n'), ((523, 541), 'schedule.every', 'schedule.every', (['(30)'], {}), '(30)\n', (537, 541), False, 'import schedule\n'), ((593, 610), 'schedule.every', 'schedule.every', (['(3)'], {}), '(3)\n', (607, 610), False, 'import schedule\n'), ((657, 673), 'schedule.every', 'schedule.every', ([], {}), '()\n', (671, 673), False, 'import schedule\n')]
|
from benchmark import Benchmark, benchmark
import astropy.units as u
import pytest
@benchmark(
{
"log.final.star.LXUVStellar": {"value": 3.120390e21, "unit": u.W},
"log.final.b.EnvelopeMass": {"value": 0, "unit": u.Mearth},
"log.final.b.Radius": {"value": 6.378100e06, "unit": u.m},
"log.final.c.EnvelopeMass": {"value": 3.298788, "unit": u.Mearth},
"log.final.c.Radius": {"value": 3.372667e+07, "unit": u.m},
"log.final.c.KTide": {"value": 0.849139}
}
)
class TestAtmEscKepler36(Benchmark):
pass
|
[
"benchmark.benchmark"
] |
[((86, 469), 'benchmark.benchmark', 'benchmark', (["{'log.final.star.LXUVStellar': {'value': 3.12039e+21, 'unit': u.W},\n 'log.final.b.EnvelopeMass': {'value': 0, 'unit': u.Mearth},\n 'log.final.b.Radius': {'value': 6378100.0, 'unit': u.m},\n 'log.final.c.EnvelopeMass': {'value': 3.298788, 'unit': u.Mearth},\n 'log.final.c.Radius': {'value': 33726670.0, 'unit': u.m},\n 'log.final.c.KTide': {'value': 0.849139}}"], {}), "({'log.final.star.LXUVStellar': {'value': 3.12039e+21, 'unit': u.W\n }, 'log.final.b.EnvelopeMass': {'value': 0, 'unit': u.Mearth},\n 'log.final.b.Radius': {'value': 6378100.0, 'unit': u.m},\n 'log.final.c.EnvelopeMass': {'value': 3.298788, 'unit': u.Mearth},\n 'log.final.c.Radius': {'value': 33726670.0, 'unit': u.m},\n 'log.final.c.KTide': {'value': 0.849139}})\n", (95, 469), False, 'from benchmark import Benchmark, benchmark\n')]
|
# Generated by Django 3.2.1 on 2021-05-10 01:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('annotation', '0030_auto_20210429_1528'),
]
operations = [
migrations.AddField(
model_name='variantannotationversion',
name='distance',
field=models.IntegerField(default=5000),
),
]
|
[
"django.db.models.IntegerField"
] |
[((357, 390), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(5000)'}), '(default=5000)\n', (376, 390), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
from flask import Blueprint, render_template, flash, redirect, url_for, request, abort
from flask_bootstrap import __version__ as FLASK_BOOTSTRAP_VERSION
from flask_login import (
LoginManager,
current_user,
login_required,
login_user,
logout_user,
)
from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator
from urllib.parse import urlparse, urljoin
from werkzeug.security import generate_password_hash, check_password_hash
# Local imports
from nav import nav
from user import User
def is_safe_url(target):
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ('http', 'https') and ref_url.netloc == test_url.netloc
frontend = Blueprint('frontend', __name__)
# We're adding a navbar as well through flask-navbar. In our example, the
# navbar has an usual amount of Link-Elements, more commonly you will have a
# lot more View instances.
def create_frontend_top():
if current_user is not None and current_user.is_authenticated:
return Navbar(
View('Minecraft Monitor', '.index'),
Text('Logged in as {}'.format(current_user.name)),
View('Logout', '.logout'),
View('Player Details', '.players'),
View('Server Details', '.server'),
View('Server Admin', '.admin'))
else:
return Navbar(
View('Minecraft Monitor', '.index'),
View('Login', '.login'),
View('Player Details', '.players'),
View('Server Details', '.server'),
View('Server Admin', '.admin'))
nav.register_element('frontend_top', create_frontend_top)
# Our index-page just shows a quick explanation. Check out the template
# "templates/index.html" documentation for more details.
@frontend.route('/')
def index():
return render_template('index.html')
@frontend.route('/login', methods=['POST', 'GET'])
def login():
email = request.form.get('email')
password = request.form.get('password')
next = request.form.get('next')
if email is not None:
user = User.find(email)
if user is None:
flash('Unrecognised email', category='error')
elif not user.check_password_hash(password):
flash('Incorrect password', category='error')
else:
login_user(user)
flash('Successfully logged in as {}'.format(current_user.name))
if next is None or not is_safe_url(next):
return render_template('login.html')
return redirect(next or url_for('frontend.index'))
@frontend.route('/logout')
@login_required
def logout():
logout_user()
flash('Logged out')
return render_template('login.html')
@frontend.route('/server')
def server():
return render_template('server.html')
@frontend.route('/players')
def players():
return render_template('players.html')
@frontend.route('/admin', methods=['POST', 'GET'])
@login_required
def admin():
return render_template('admin.html')
@frontend.route('/third_party')
def third_party():
return render_template('third_party.html')
'''
@frontend.route('/start_server')
def start_server():
print('starting server...')
return 'starting...'
@frontend.route('/stop_server')
def stop_server():
print('stopping server...')
return 'stopping...'
'''
|
[
"flask.flash",
"flask.Blueprint",
"urllib.parse.urljoin",
"flask.request.form.get",
"flask_login.login_user",
"flask_nav.elements.View",
"flask_login.logout_user",
"flask.url_for",
"flask.render_template",
"user.User.find",
"nav.nav.register_element",
"urllib.parse.urlparse"
] |
[((774, 805), 'flask.Blueprint', 'Blueprint', (['"""frontend"""', '__name__'], {}), "('frontend', __name__)\n", (783, 805), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((1573, 1630), 'nav.nav.register_element', 'nav.register_element', (['"""frontend_top"""', 'create_frontend_top'], {}), "('frontend_top', create_frontend_top)\n", (1593, 1630), False, 'from nav import nav\n'), ((584, 610), 'urllib.parse.urlparse', 'urlparse', (['request.host_url'], {}), '(request.host_url)\n', (592, 610), False, 'from urllib.parse import urlparse, urljoin\n'), ((1804, 1833), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (1819, 1833), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((1909, 1934), 'flask.request.form.get', 'request.form.get', (['"""email"""'], {}), "('email')\n", (1925, 1934), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((1948, 1976), 'flask.request.form.get', 'request.form.get', (['"""password"""'], {}), "('password')\n", (1964, 1976), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((1986, 2010), 'flask.request.form.get', 'request.form.get', (['"""next"""'], {}), "('next')\n", (2002, 2010), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2550, 2563), 'flask_login.logout_user', 'logout_user', ([], {}), '()\n', (2561, 2563), False, 'from flask_login import LoginManager, current_user, login_required, login_user, logout_user\n'), ((2566, 2585), 'flask.flash', 'flash', (['"""Logged out"""'], {}), "('Logged out')\n", (2571, 2585), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2595, 2624), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (2610, 2624), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2676, 2706), 'flask.render_template', 'render_template', (['"""server.html"""'], {}), "('server.html')\n", (2691, 2706), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2760, 2791), 'flask.render_template', 'render_template', (['"""players.html"""'], {}), "('players.html')\n", (2775, 2791), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2882, 2911), 'flask.render_template', 'render_template', (['"""admin.html"""'], {}), "('admin.html')\n", (2897, 2911), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2973, 3008), 'flask.render_template', 'render_template', (['"""third_party.html"""'], {}), "('third_party.html')\n", (2988, 3008), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((633, 666), 'urllib.parse.urljoin', 'urljoin', (['request.host_url', 'target'], {}), '(request.host_url, target)\n', (640, 666), False, 'from urllib.parse import urlparse, urljoin\n'), ((2047, 2063), 'user.User.find', 'User.find', (['email'], {}), '(email)\n', (2056, 2063), False, 'from user import User\n'), ((2403, 2432), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (2418, 2432), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((1102, 1137), 'flask_nav.elements.View', 'View', (['"""Minecraft Monitor"""', '""".index"""'], {}), "('Minecraft Monitor', '.index')\n", (1106, 1137), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1202, 1227), 'flask_nav.elements.View', 'View', (['"""Logout"""', '""".logout"""'], {}), "('Logout', '.logout')\n", (1206, 1227), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1235, 1269), 'flask_nav.elements.View', 'View', (['"""Player Details"""', '""".players"""'], {}), "('Player Details', '.players')\n", (1239, 1269), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1277, 1310), 'flask_nav.elements.View', 'View', (['"""Server Details"""', '""".server"""'], {}), "('Server Details', '.server')\n", (1281, 1310), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1318, 1348), 'flask_nav.elements.View', 'View', (['"""Server Admin"""', '""".admin"""'], {}), "('Server Admin', '.admin')\n", (1322, 1348), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1383, 1418), 'flask_nav.elements.View', 'View', (['"""Minecraft Monitor"""', '""".index"""'], {}), "('Minecraft Monitor', '.index')\n", (1387, 1418), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1426, 1449), 'flask_nav.elements.View', 'View', (['"""Login"""', '""".login"""'], {}), "('Login', '.login')\n", (1430, 1449), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1457, 1491), 'flask_nav.elements.View', 'View', (['"""Player Details"""', '""".players"""'], {}), "('Player Details', '.players')\n", (1461, 1491), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1499, 1532), 'flask_nav.elements.View', 'View', (['"""Server Details"""', '""".server"""'], {}), "('Server Details', '.server')\n", (1503, 1532), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((1540, 1570), 'flask_nav.elements.View', 'View', (['"""Server Admin"""', '""".admin"""'], {}), "('Server Admin', '.admin')\n", (1544, 1570), False, 'from flask_nav.elements import Navbar, View, Subgroup, Link, Text, Separator\n'), ((2091, 2136), 'flask.flash', 'flash', (['"""Unrecognised email"""'], {'category': '"""error"""'}), "('Unrecognised email', category='error')\n", (2096, 2136), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2460, 2485), 'flask.url_for', 'url_for', (['"""frontend.index"""'], {}), "('frontend.index')\n", (2467, 2485), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2192, 2237), 'flask.flash', 'flash', (['"""Incorrect password"""'], {'category': '"""error"""'}), "('Incorrect password', category='error')\n", (2197, 2237), False, 'from flask import Blueprint, render_template, flash, redirect, url_for, request, abort\n'), ((2254, 2270), 'flask_login.login_user', 'login_user', (['user'], {}), '(user)\n', (2264, 2270), False, 'from flask_login import LoginManager, current_user, login_required, login_user, logout_user\n')]
|
# Computes expected results for `testGRU()` in `Tests/TensorFlowTests/LayerTests.swift`.
# Requires 'tensorflow>=2.0.0a0' (e.g. "pip install tensorflow==2.2.0").
import sys
import numpy
import tensorflow as tf
# Set random seed for repetable results
tf.random.set_seed(0)
def indented(s):
return '\n'.join([' ' + l for l in s.split('\n')])
def swift_tensor(name, tensor):
if hasattr(tensor, 'numpy'):
tensor = tensor.numpy()
def format_float(x):
formatted = numpy.format_float_positional(x, unique=True)
if formatted[-1] == '.':
return formatted + '0'
return formatted
formatter = {
'float_kind': format_float
}
return 'let {} = Tensor<Float>(\n{}\n)'.format(
name,
indented(numpy.array2string(tensor, separator=',', formatter=formatter)))
units = 4
input_dim = 3
input_length = 4
go_backwards = "go_backwards" in sys.argv
# Initialize the keras model with the GRU.
gru = tf.keras.layers.GRU(
input_dim=input_dim,
units=units,
activation="tanh", recurrent_activation="sigmoid",
return_sequences=True, return_state=True,
go_backwards=go_backwards)
x_input = tf.keras.Input(shape=[input_length, input_dim])
initial_state = tf.keras.Input(shape=[units])
initial_state_input = [initial_state]
output = gru(x_input, initial_state=initial_state_input)
model = tf.keras.Model(inputs=[x_input, initial_state_input], outputs=[output])
[kernel, recurrent_kernel, bias] = gru.get_weights()
update_kernel = kernel[:, :units]
update_recurrent_kernel = recurrent_kernel[:, :units]
reset_kernel = kernel[:, units: units * 2]
reset_recurrent_kernel = recurrent_kernel[:, units: units * 2]
new_kernel = kernel[:, units * 2:]
new_recurrent_kernel = recurrent_kernel[:, units * 2:]
update_bias = bias[0][:units]
update_recurrent_bias = bias[1][:units]
reset_bias = bias[0][units: units * 2]
reset_recurrent_bias = bias[1][units: units * 2]
new_bias = bias[0][units * 2:]
new_recurrent_bias = bias[1][units * 2:]
# Print the GRU weights.
print(swift_tensor('updateKernel', update_kernel))
print(swift_tensor('resetKernel', reset_kernel))
print(swift_tensor('outputKernel', new_kernel))
print(swift_tensor('updateRecurrentKernel', update_recurrent_kernel))
print(swift_tensor('resetRecurrentKernel', reset_recurrent_kernel))
print(swift_tensor('outputRecurrentKernel', new_recurrent_kernel))
print(swift_tensor('updateBias', update_bias))
print(swift_tensor('resetBias', reset_bias))
print(swift_tensor('outputBias', new_bias))
print(swift_tensor('updateRecurrentBias', update_recurrent_bias))
print(swift_tensor('resetRecurrentBias', reset_recurrent_bias))
print(swift_tensor('outputRecurrentBias', new_recurrent_bias))
# Initialize input data and print it.
x = tf.keras.initializers.GlorotUniform()(shape=[1, input_length, input_dim])
initial_state = [
tf.keras.initializers.GlorotUniform()(shape=[1, units]),
]
print(swift_tensor('x', x))
print(swift_tensor('initialState', initial_state[0]))
# Run forwards and backwards pass and print the results.
with tf.GradientTape() as tape:
tape.watch(x)
tape.watch(initial_state)
[[states, final_state]] = model([x, initial_state])
sum_output = tf.reduce_sum(states[0][-1])
[grad_model, grad_x, grad_initial_state] = tape.gradient(sum_output, [model.variables, x, initial_state])
[grad_kernel, grad_recurrent_kernel, grad_bias] = grad_model
[grad_initial_state] = grad_initial_state
grad_update_kernel = grad_kernel[:, :units]
grad_update_recurrent_kernel = grad_recurrent_kernel[:, :units]
grad_reset_kernel = grad_kernel[:, units: units * 2]
grad_reset_recurrent_kernel = grad_recurrent_kernel[:, units: units * 2]
grad_new_kernel = grad_kernel[:, units * 2:]
grad_new_recurrent_kernel = grad_recurrent_kernel[:, units * 2:]
grad_update_bias = grad_bias[0][:units]
grad_update_recurrent_bias = grad_bias[1][:units]
grad_reset_bias = grad_bias[0][units: units * 2]
grad_reset_recurrent_bias = grad_bias[1][units: units * 2]
grad_new_bias = grad_bias[0][units * 2:]
grad_new_recurrent_bias = grad_bias[1][units * 2:]
print(swift_tensor('expectedSum', sum_output))
print(swift_tensor('expectedStates', states))
print(swift_tensor('expectedFinalState', final_state))
print(swift_tensor('expectedGradX', grad_x))
print(swift_tensor('expectedGradInitialState', grad_initial_state))
print(swift_tensor('expectedGradUpdateKernel', grad_update_kernel))
print(swift_tensor('expectedGradResetKernel', grad_reset_kernel))
print(swift_tensor('expectedGradOutputKernel', grad_new_kernel))
print(swift_tensor('expectedGradUpdateRecurrentKernel', grad_update_recurrent_kernel))
print(swift_tensor('expectedGradResetRecurrentKernel', grad_reset_recurrent_kernel))
print(swift_tensor('expectedGradOutputRecurrentKernel', grad_new_recurrent_kernel))
print(swift_tensor('expectedGradUpdateBias', grad_update_bias))
print(swift_tensor('expectedGradResetBias', grad_reset_bias))
print(swift_tensor('expectedGradOutputBias', grad_new_bias))
print(swift_tensor('expectedGradUpdateRecurrentBias', grad_update_recurrent_bias))
print(swift_tensor('expectedGradResetRecurrentBias', grad_reset_recurrent_bias))
print(swift_tensor('expectedGradOutputRecurrentBias', grad_new_recurrent_bias))
|
[
"tensorflow.random.set_seed",
"tensorflow.reduce_sum",
"numpy.format_float_positional",
"tensorflow.keras.layers.GRU",
"tensorflow.keras.Input",
"numpy.array2string",
"tensorflow.keras.Model",
"tensorflow.keras.initializers.GlorotUniform",
"tensorflow.GradientTape"
] |
[((252, 273), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['(0)'], {}), '(0)\n', (270, 273), True, 'import tensorflow as tf\n'), ((973, 1155), 'tensorflow.keras.layers.GRU', 'tf.keras.layers.GRU', ([], {'input_dim': 'input_dim', 'units': 'units', 'activation': '"""tanh"""', 'recurrent_activation': '"""sigmoid"""', 'return_sequences': '(True)', 'return_state': '(True)', 'go_backwards': 'go_backwards'}), "(input_dim=input_dim, units=units, activation='tanh',\n recurrent_activation='sigmoid', return_sequences=True, return_state=\n True, go_backwards=go_backwards)\n", (992, 1155), True, 'import tensorflow as tf\n'), ((1180, 1227), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '[input_length, input_dim]'}), '(shape=[input_length, input_dim])\n', (1194, 1227), True, 'import tensorflow as tf\n'), ((1245, 1274), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '[units]'}), '(shape=[units])\n', (1259, 1274), True, 'import tensorflow as tf\n'), ((1379, 1450), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': '[x_input, initial_state_input]', 'outputs': '[output]'}), '(inputs=[x_input, initial_state_input], outputs=[output])\n', (1393, 1450), True, 'import tensorflow as tf\n'), ((2771, 2808), 'tensorflow.keras.initializers.GlorotUniform', 'tf.keras.initializers.GlorotUniform', ([], {}), '()\n', (2806, 2808), True, 'import tensorflow as tf\n'), ((3071, 3088), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (3086, 3088), True, 'import tensorflow as tf\n'), ((3219, 3247), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['states[0][-1]'], {}), '(states[0][-1])\n', (3232, 3247), True, 'import tensorflow as tf\n'), ((493, 538), 'numpy.format_float_positional', 'numpy.format_float_positional', (['x'], {'unique': '(True)'}), '(x, unique=True)\n', (522, 538), False, 'import numpy\n'), ((2867, 2904), 'tensorflow.keras.initializers.GlorotUniform', 'tf.keras.initializers.GlorotUniform', ([], {}), '()\n', (2902, 2904), True, 'import tensorflow as tf\n'), ((774, 836), 'numpy.array2string', 'numpy.array2string', (['tensor'], {'separator': '""","""', 'formatter': 'formatter'}), "(tensor, separator=',', formatter=formatter)\n", (792, 836), False, 'import numpy\n')]
|
import matplotlib.pyplot as plt
from skimage import exposure
import numpy as np
def plot_img_and_mask(img, mask):
img = np.array(img)
img = (img - img.min()) / (img.max() - img.min())
img = exposure.equalize_adapthist(img)
fig, ax = plt.subplots(1, 1, figsize=(10, 10))
ax.set_title('Input image')
ax.imshow(img, cmap="gray")
ax.contour(mask, colors="red")
plt.xticks([]), plt.yticks([])
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.yticks",
"numpy.array",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.subplots",
"skimage.exposure.equalize_adapthist"
] |
[((126, 139), 'numpy.array', 'np.array', (['img'], {}), '(img)\n', (134, 139), True, 'import numpy as np\n'), ((204, 236), 'skimage.exposure.equalize_adapthist', 'exposure.equalize_adapthist', (['img'], {}), '(img)\n', (231, 236), False, 'from skimage import exposure\n'), ((252, 288), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(10, 10)'}), '(1, 1, figsize=(10, 10))\n', (264, 288), True, 'import matplotlib.pyplot as plt\n'), ((427, 437), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (435, 437), True, 'import matplotlib.pyplot as plt\n'), ((392, 406), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]'], {}), '([])\n', (402, 406), True, 'import matplotlib.pyplot as plt\n'), ((408, 422), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[]'], {}), '([])\n', (418, 422), True, 'import matplotlib.pyplot as plt\n')]
|
import chex
import jax.numpy as jnp
import jax.random as jr
import numpyro
import numpyro.distributions as dist
import pytest
from numpyro.contrib.tfp import distributions as tfd
from numpyro.distributions import constraints
from gpjax.gps import Prior
from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params
from gpjax.kernels import RBF
from gpjax.likelihoods import Gaussian
from gpjax.parameters import initialise
# TODO: test conjugate posterior
def _get_conjugate_posterior_params() -> dict:
kernel = RBF()
prior = Prior(kernel=kernel)
lik = Gaussian()
posterior = prior * lik
params = initialise(posterior)
return params
def test_numpyro_dict_priors_defaults_numpyro():
demo_priors = {
"lengthscale": dist.LogNormal(loc=0.0, scale=1.0),
"variance": dist.LogNormal(loc=0.0, scale=1.0),
"obs_noise": dist.LogNormal(loc=0.0, scale=1.0),
}
numpyro_params = numpyro_dict_params(demo_priors)
assert set(numpyro_params) == set(demo_priors.keys())
for ikey, iparam in demo_priors.items():
# check keys exist for param
assert set(numpyro_params[ikey].keys()) == set(("prior", "param_type"))
# check init value is the same as initial value
chex.assert_equal(numpyro_params[ikey]["prior"], iparam)
def test_numpyro_dict_priors_defaults_tfp():
demo_priors = {
"lengthscale": tfd.LogNormal(loc=0.0, scale=1.0),
"variance": tfd.LogNormal(loc=0.0, scale=1.0),
"obs_noise": tfd.LogNormal(loc=0.0, scale=1.0),
}
numpyro_params = numpyro_dict_params(demo_priors)
assert set(numpyro_params) == set(demo_priors.keys())
for ikey, iparam in demo_priors.items():
# check keys exist for param
assert set(numpyro_params[ikey].keys()) == set(("prior", "param_type"))
# check init value is the same as initial value
chex.assert_equal(numpyro_params[ikey]["prior"], iparam)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_all(prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# add constraint
new_numpyro_params = add_priors(numpyro_params, prior)
for iparams in new_numpyro_params.values():
# check if constraint in new dictionary
chex.assert_equal(iparams["param_type"], "prior")
chex.assert_equal(iparams["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
@pytest.mark.parametrize(
"variable",
["lengthscale", "obs_noise", "variance"],
)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_str(variable, prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# add constraint
new_numpyro_params = add_priors(numpyro_params, variable, prior)
# check if constraint in new dictionary
chex.assert_equal(new_numpyro_params[variable]["param_type"], "prior")
chex.assert_equal(new_numpyro_params[variable]["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
@pytest.mark.parametrize(
"variable",
["lengthscale", "obs_noise", "variance"],
)
@pytest.mark.parametrize(
"prior",
[
dist.Gamma(concentration=1.0, rate=1.0),
dist.HalfCauchy(scale=1.0),
dist.LogNormal(loc=0.0, scale=1.0),
tfd.Gamma(concentration=1.0, rate=1.0),
tfd.HalfCauchy(loc=0.0, scale=1.0),
tfd.LogNormal(loc=0.0, scale=1.0),
],
)
def test_numpyro_add_priors_dict(variable, prior):
gpjax_params = _get_conjugate_posterior_params()
numpyro_params = numpyro_dict_params(gpjax_params)
# create new dictionary
new_param_dict = {str(variable): prior}
# add constraint
new_numpyro_params = add_priors(numpyro_params, new_param_dict)
# check if constraint in new dictionary
chex.assert_equal(new_numpyro_params[variable]["param_type"], "prior")
chex.assert_equal(new_numpyro_params[variable]["prior"], prior)
# check we didn't modify original dictionary
chex.assert_equal(gpjax_params, _get_conjugate_posterior_params())
|
[
"gpjax.interfaces.numpyro.add_priors",
"numpyro.contrib.tfp.distributions.LogNormal",
"numpyro.contrib.tfp.distributions.Gamma",
"numpyro.distributions.Gamma",
"numpyro.distributions.LogNormal",
"gpjax.parameters.initialise",
"gpjax.interfaces.numpyro.numpyro_dict_params",
"chex.assert_equal",
"numpyro.distributions.HalfCauchy",
"numpyro.contrib.tfp.distributions.HalfCauchy",
"pytest.mark.parametrize",
"gpjax.likelihoods.Gaussian",
"gpjax.gps.Prior",
"gpjax.kernels.RBF"
] |
[((2852, 2929), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""variable"""', "['lengthscale', 'obs_noise', 'variance']"], {}), "('variable', ['lengthscale', 'obs_noise', 'variance'])\n", (2875, 2929), False, 'import pytest\n'), ((3821, 3898), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""variable"""', "['lengthscale', 'obs_noise', 'variance']"], {}), "('variable', ['lengthscale', 'obs_noise', 'variance'])\n", (3844, 3898), False, 'import pytest\n'), ((544, 549), 'gpjax.kernels.RBF', 'RBF', ([], {}), '()\n', (547, 549), False, 'from gpjax.kernels import RBF\n'), ((562, 582), 'gpjax.gps.Prior', 'Prior', ([], {'kernel': 'kernel'}), '(kernel=kernel)\n', (567, 582), False, 'from gpjax.gps import Prior\n'), ((593, 603), 'gpjax.likelihoods.Gaussian', 'Gaussian', ([], {}), '()\n', (601, 603), False, 'from gpjax.likelihoods import Gaussian\n'), ((645, 666), 'gpjax.parameters.initialise', 'initialise', (['posterior'], {}), '(posterior)\n', (655, 666), False, 'from gpjax.parameters import initialise\n'), ((957, 989), 'gpjax.interfaces.numpyro.numpyro_dict_params', 'numpyro_dict_params', (['demo_priors'], {}), '(demo_priors)\n', (976, 989), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((1597, 1629), 'gpjax.interfaces.numpyro.numpyro_dict_params', 'numpyro_dict_params', (['demo_priors'], {}), '(demo_priors)\n', (1616, 1629), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((2407, 2440), 'gpjax.interfaces.numpyro.numpyro_dict_params', 'numpyro_dict_params', (['gpjax_params'], {}), '(gpjax_params)\n', (2426, 2440), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((2488, 2521), 'gpjax.interfaces.numpyro.add_priors', 'add_priors', (['numpyro_params', 'prior'], {}), '(numpyro_params, prior)\n', (2498, 2521), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((3384, 3417), 'gpjax.interfaces.numpyro.numpyro_dict_params', 'numpyro_dict_params', (['gpjax_params'], {}), '(gpjax_params)\n', (3403, 3417), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((3465, 3508), 'gpjax.interfaces.numpyro.add_priors', 'add_priors', (['numpyro_params', 'variable', 'prior'], {}), '(numpyro_params, variable, prior)\n', (3475, 3508), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((3558, 3628), 'chex.assert_equal', 'chex.assert_equal', (["new_numpyro_params[variable]['param_type']", '"""prior"""'], {}), "(new_numpyro_params[variable]['param_type'], 'prior')\n", (3575, 3628), False, 'import chex\n'), ((3633, 3696), 'chex.assert_equal', 'chex.assert_equal', (["new_numpyro_params[variable]['prior']", 'prior'], {}), "(new_numpyro_params[variable]['prior'], prior)\n", (3650, 3696), False, 'import chex\n'), ((4354, 4387), 'gpjax.interfaces.numpyro.numpyro_dict_params', 'numpyro_dict_params', (['gpjax_params'], {}), '(gpjax_params)\n', (4373, 4387), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((4508, 4550), 'gpjax.interfaces.numpyro.add_priors', 'add_priors', (['numpyro_params', 'new_param_dict'], {}), '(numpyro_params, new_param_dict)\n', (4518, 4550), False, 'from gpjax.interfaces.numpyro import add_constraints, add_priors, numpyro_dict_params\n'), ((4600, 4670), 'chex.assert_equal', 'chex.assert_equal', (["new_numpyro_params[variable]['param_type']", '"""prior"""'], {}), "(new_numpyro_params[variable]['param_type'], 'prior')\n", (4617, 4670), False, 'import chex\n'), ((4675, 4738), 'chex.assert_equal', 'chex.assert_equal', (["new_numpyro_params[variable]['prior']", 'prior'], {}), "(new_numpyro_params[variable]['prior'], prior)\n", (4692, 4738), False, 'import chex\n'), ((780, 814), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (794, 814), True, 'import numpyro.distributions as dist\n'), ((836, 870), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (850, 870), True, 'import numpyro.distributions as dist\n'), ((893, 927), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (907, 927), True, 'import numpyro.distributions as dist\n'), ((1275, 1331), 'chex.assert_equal', 'chex.assert_equal', (["numpyro_params[ikey]['prior']", 'iparam'], {}), "(numpyro_params[ikey]['prior'], iparam)\n", (1292, 1331), False, 'import chex\n'), ((1423, 1456), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (1436, 1456), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((1478, 1511), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (1491, 1511), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((1534, 1567), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (1547, 1567), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((1915, 1971), 'chex.assert_equal', 'chex.assert_equal', (["numpyro_params[ikey]['prior']", 'iparam'], {}), "(numpyro_params[ikey]['prior'], iparam)\n", (1932, 1971), False, 'import chex\n'), ((2627, 2676), 'chex.assert_equal', 'chex.assert_equal', (["iparams['param_type']", '"""prior"""'], {}), "(iparams['param_type'], 'prior')\n", (2644, 2676), False, 'import chex\n'), ((2685, 2727), 'chex.assert_equal', 'chex.assert_equal', (["iparams['prior']", 'prior'], {}), "(iparams['prior'], prior)\n", (2702, 2727), False, 'import chex\n'), ((2027, 2066), 'numpyro.distributions.Gamma', 'dist.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (2037, 2066), True, 'import numpyro.distributions as dist\n'), ((2076, 2102), 'numpyro.distributions.HalfCauchy', 'dist.HalfCauchy', ([], {'scale': '(1.0)'}), '(scale=1.0)\n', (2091, 2102), True, 'import numpyro.distributions as dist\n'), ((2112, 2146), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (2126, 2146), True, 'import numpyro.distributions as dist\n'), ((2156, 2194), 'numpyro.contrib.tfp.distributions.Gamma', 'tfd.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (2165, 2194), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((2204, 2238), 'numpyro.contrib.tfp.distributions.HalfCauchy', 'tfd.HalfCauchy', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (2218, 2238), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((2248, 2281), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (2261, 2281), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((2994, 3033), 'numpyro.distributions.Gamma', 'dist.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (3004, 3033), True, 'import numpyro.distributions as dist\n'), ((3043, 3069), 'numpyro.distributions.HalfCauchy', 'dist.HalfCauchy', ([], {'scale': '(1.0)'}), '(scale=1.0)\n', (3058, 3069), True, 'import numpyro.distributions as dist\n'), ((3079, 3113), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (3093, 3113), True, 'import numpyro.distributions as dist\n'), ((3123, 3161), 'numpyro.contrib.tfp.distributions.Gamma', 'tfd.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (3132, 3161), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((3171, 3205), 'numpyro.contrib.tfp.distributions.HalfCauchy', 'tfd.HalfCauchy', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (3185, 3205), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((3215, 3248), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (3228, 3248), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((3963, 4002), 'numpyro.distributions.Gamma', 'dist.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (3973, 4002), True, 'import numpyro.distributions as dist\n'), ((4012, 4038), 'numpyro.distributions.HalfCauchy', 'dist.HalfCauchy', ([], {'scale': '(1.0)'}), '(scale=1.0)\n', (4027, 4038), True, 'import numpyro.distributions as dist\n'), ((4048, 4082), 'numpyro.distributions.LogNormal', 'dist.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (4062, 4082), True, 'import numpyro.distributions as dist\n'), ((4092, 4130), 'numpyro.contrib.tfp.distributions.Gamma', 'tfd.Gamma', ([], {'concentration': '(1.0)', 'rate': '(1.0)'}), '(concentration=1.0, rate=1.0)\n', (4101, 4130), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((4140, 4174), 'numpyro.contrib.tfp.distributions.HalfCauchy', 'tfd.HalfCauchy', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (4154, 4174), True, 'from numpyro.contrib.tfp import distributions as tfd\n'), ((4184, 4217), 'numpyro.contrib.tfp.distributions.LogNormal', 'tfd.LogNormal', ([], {'loc': '(0.0)', 'scale': '(1.0)'}), '(loc=0.0, scale=1.0)\n', (4197, 4217), True, 'from numpyro.contrib.tfp import distributions as tfd\n')]
|
#!/usr/bin/env python3
# The MIT License (MIT)
# Copyright (c) 2016,2017 Massachusetts Institute of Technology
#
# Author: <NAME>
# This software has been created in projects supported by the US National
# Science Foundation and NASA (PI: Pankratius)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# import data fetcher and AutoParams
from skdaccess.geo.groundwater import DataFetcher as WDF
from skdaccess.framework.param_class import *
# Create a data fetcher of stations within
# 35 < Latitude < 38, and -119 < Longitude < -118
# in the time period 2007-01-01 to 2016-12-31
fullDF = WDF([AutoParam(35), AutoParam(38), AutoParam(-119), AutoParam(-118)],
'2007-01-01','2016-12-31',cutoff=0.0)
# Access data wrapper
fullDW = fullDF.output()
# Access metadata
meta_data = WDF.getStationMetadata()
# Get an iterator to the data
dataIt = fullDW.getIterator()
# The iterator returns the data label and the data.
label_1, data_1 = next(dataIt)
label_2, data_2 = next(dataIt)
# Try to plot the first two groundwater stations:
try:
import matplotlib.pyplot as plt
plt.figure().set_size_inches(14,4)
plt.ylabel('Median Depth to Water Level')
plt.title(label_1)
plt.plot(data_1['Median Depth to Water']);
plt.figure().set_size_inches(14,4)
plt.ylabel('Median Depth to Water Level')
plt.title(label_2);
plt.plot(data_2['Median Depth to Water'],color='red')
plt.show()
except ImportError as e:
pass
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.ylabel",
"skdaccess.geo.groundwater.DataFetcher.getStationMetadata"
] |
[((1799, 1823), 'skdaccess.geo.groundwater.DataFetcher.getStationMetadata', 'WDF.getStationMetadata', ([], {}), '()\n', (1821, 1823), True, 'from skdaccess.geo.groundwater import DataFetcher as WDF\n'), ((2140, 2181), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Median Depth to Water Level"""'], {}), "('Median Depth to Water Level')\n", (2150, 2181), True, 'import matplotlib.pyplot as plt\n'), ((2186, 2204), 'matplotlib.pyplot.title', 'plt.title', (['label_1'], {}), '(label_1)\n', (2195, 2204), True, 'import matplotlib.pyplot as plt\n'), ((2209, 2250), 'matplotlib.pyplot.plot', 'plt.plot', (["data_1['Median Depth to Water']"], {}), "(data_1['Median Depth to Water'])\n", (2217, 2250), True, 'import matplotlib.pyplot as plt\n'), ((2295, 2336), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Median Depth to Water Level"""'], {}), "('Median Depth to Water Level')\n", (2305, 2336), True, 'import matplotlib.pyplot as plt\n'), ((2341, 2359), 'matplotlib.pyplot.title', 'plt.title', (['label_2'], {}), '(label_2)\n', (2350, 2359), True, 'import matplotlib.pyplot as plt\n'), ((2365, 2419), 'matplotlib.pyplot.plot', 'plt.plot', (["data_2['Median Depth to Water']"], {'color': '"""red"""'}), "(data_2['Median Depth to Water'], color='red')\n", (2373, 2419), True, 'import matplotlib.pyplot as plt\n'), ((2424, 2434), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2432, 2434), True, 'import matplotlib.pyplot as plt\n'), ((2101, 2113), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2111, 2113), True, 'import matplotlib.pyplot as plt\n'), ((2256, 2268), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2266, 2268), True, 'import matplotlib.pyplot as plt\n')]
|
import os
STOCK_DIR = os.path.dirname(os.path.realpath(__file__))
|
[
"os.path.realpath"
] |
[((39, 65), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (55, 65), False, 'import os\n')]
|
# coding: utf-8
"""
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
import re
class ExperimentBillingEntryDTO(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, name=None, project_name=None, trashed=None, state=None, time_of_completion=None, time_of_creation=None, project_id=None, organization_name=None, group_id=None, is_notebook=None, id=None, short_id=None, time_of_entered_running_state=None, worker_type=None, environment=None, responding=None, project_deleted=None, organization_id=None, owner=None, deleted=None):
"""
ExperimentBillingEntryDTO - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'name': 'str',
'project_name': 'str',
'trashed': 'bool',
'state': 'ExperimentStateDTO',
'time_of_completion': 'datetime',
'time_of_creation': 'datetime',
'project_id': 'str',
'organization_name': 'str',
'group_id': 'str',
'is_notebook': 'bool',
'id': 'str',
'short_id': 'str',
'time_of_entered_running_state': 'datetime',
'worker_type': 'str',
'environment': 'str',
'responding': 'bool',
'project_deleted': 'bool',
'organization_id': 'str',
'owner': 'str',
'deleted': 'bool'
}
self.attribute_map = {
'name': 'name',
'project_name': 'projectName',
'trashed': 'trashed',
'state': 'state',
'time_of_completion': 'timeOfCompletion',
'time_of_creation': 'timeOfCreation',
'project_id': 'projectId',
'organization_name': 'organizationName',
'group_id': 'groupId',
'is_notebook': 'isNotebook',
'id': 'id',
'short_id': 'shortId',
'time_of_entered_running_state': 'timeOfEnteredRunningState',
'worker_type': 'workerType',
'environment': 'environment',
'responding': 'responding',
'project_deleted': 'projectDeleted',
'organization_id': 'organizationId',
'owner': 'owner',
'deleted': 'deleted'
}
self._name = name
self._project_name = project_name
self._trashed = trashed
self._state = state
self._time_of_completion = time_of_completion
self._time_of_creation = time_of_creation
self._project_id = project_id
self._organization_name = organization_name
self._group_id = group_id
self._is_notebook = is_notebook
self._id = id
self._short_id = short_id
self._time_of_entered_running_state = time_of_entered_running_state
self._worker_type = worker_type
self._environment = environment
self._responding = responding
self._project_deleted = project_deleted
self._organization_id = organization_id
self._owner = owner
self._deleted = deleted
@property
def name(self):
"""
Gets the name of this ExperimentBillingEntryDTO.
:return: The name of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this ExperimentBillingEntryDTO.
:param name: The name of this ExperimentBillingEntryDTO.
:type: str
"""
self._name = name
@property
def project_name(self):
"""
Gets the project_name of this ExperimentBillingEntryDTO.
:return: The project_name of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._project_name
@project_name.setter
def project_name(self, project_name):
"""
Sets the project_name of this ExperimentBillingEntryDTO.
:param project_name: The project_name of this ExperimentBillingEntryDTO.
:type: str
"""
self._project_name = project_name
@property
def trashed(self):
"""
Gets the trashed of this ExperimentBillingEntryDTO.
:return: The trashed of this ExperimentBillingEntryDTO.
:rtype: bool
"""
return self._trashed
@trashed.setter
def trashed(self, trashed):
"""
Sets the trashed of this ExperimentBillingEntryDTO.
:param trashed: The trashed of this ExperimentBillingEntryDTO.
:type: bool
"""
self._trashed = trashed
@property
def state(self):
"""
Gets the state of this ExperimentBillingEntryDTO.
:return: The state of this ExperimentBillingEntryDTO.
:rtype: ExperimentStateDTO
"""
return self._state
@state.setter
def state(self, state):
"""
Sets the state of this ExperimentBillingEntryDTO.
:param state: The state of this ExperimentBillingEntryDTO.
:type: ExperimentStateDTO
"""
self._state = state
@property
def time_of_completion(self):
"""
Gets the time_of_completion of this ExperimentBillingEntryDTO.
:return: The time_of_completion of this ExperimentBillingEntryDTO.
:rtype: datetime
"""
return self._time_of_completion
@time_of_completion.setter
def time_of_completion(self, time_of_completion):
"""
Sets the time_of_completion of this ExperimentBillingEntryDTO.
:param time_of_completion: The time_of_completion of this ExperimentBillingEntryDTO.
:type: datetime
"""
self._time_of_completion = time_of_completion
@property
def time_of_creation(self):
"""
Gets the time_of_creation of this ExperimentBillingEntryDTO.
:return: The time_of_creation of this ExperimentBillingEntryDTO.
:rtype: datetime
"""
return self._time_of_creation
@time_of_creation.setter
def time_of_creation(self, time_of_creation):
"""
Sets the time_of_creation of this ExperimentBillingEntryDTO.
:param time_of_creation: The time_of_creation of this ExperimentBillingEntryDTO.
:type: datetime
"""
self._time_of_creation = time_of_creation
@property
def project_id(self):
"""
Gets the project_id of this ExperimentBillingEntryDTO.
:return: The project_id of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._project_id
@project_id.setter
def project_id(self, project_id):
"""
Sets the project_id of this ExperimentBillingEntryDTO.
:param project_id: The project_id of this ExperimentBillingEntryDTO.
:type: str
"""
self._project_id = project_id
@property
def organization_name(self):
"""
Gets the organization_name of this ExperimentBillingEntryDTO.
:return: The organization_name of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._organization_name
@organization_name.setter
def organization_name(self, organization_name):
"""
Sets the organization_name of this ExperimentBillingEntryDTO.
:param organization_name: The organization_name of this ExperimentBillingEntryDTO.
:type: str
"""
self._organization_name = organization_name
@property
def group_id(self):
"""
Gets the group_id of this ExperimentBillingEntryDTO.
:return: The group_id of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._group_id
@group_id.setter
def group_id(self, group_id):
"""
Sets the group_id of this ExperimentBillingEntryDTO.
:param group_id: The group_id of this ExperimentBillingEntryDTO.
:type: str
"""
self._group_id = group_id
@property
def is_notebook(self):
"""
Gets the is_notebook of this ExperimentBillingEntryDTO.
:return: The is_notebook of this ExperimentBillingEntryDTO.
:rtype: bool
"""
return self._is_notebook
@is_notebook.setter
def is_notebook(self, is_notebook):
"""
Sets the is_notebook of this ExperimentBillingEntryDTO.
:param is_notebook: The is_notebook of this ExperimentBillingEntryDTO.
:type: bool
"""
self._is_notebook = is_notebook
@property
def id(self):
"""
Gets the id of this ExperimentBillingEntryDTO.
:return: The id of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ExperimentBillingEntryDTO.
:param id: The id of this ExperimentBillingEntryDTO.
:type: str
"""
self._id = id
@property
def short_id(self):
"""
Gets the short_id of this ExperimentBillingEntryDTO.
:return: The short_id of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._short_id
@short_id.setter
def short_id(self, short_id):
"""
Sets the short_id of this ExperimentBillingEntryDTO.
:param short_id: The short_id of this ExperimentBillingEntryDTO.
:type: str
"""
self._short_id = short_id
@property
def time_of_entered_running_state(self):
"""
Gets the time_of_entered_running_state of this ExperimentBillingEntryDTO.
:return: The time_of_entered_running_state of this ExperimentBillingEntryDTO.
:rtype: datetime
"""
return self._time_of_entered_running_state
@time_of_entered_running_state.setter
def time_of_entered_running_state(self, time_of_entered_running_state):
"""
Sets the time_of_entered_running_state of this ExperimentBillingEntryDTO.
:param time_of_entered_running_state: The time_of_entered_running_state of this ExperimentBillingEntryDTO.
:type: datetime
"""
self._time_of_entered_running_state = time_of_entered_running_state
@property
def worker_type(self):
"""
Gets the worker_type of this ExperimentBillingEntryDTO.
:return: The worker_type of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._worker_type
@worker_type.setter
def worker_type(self, worker_type):
"""
Sets the worker_type of this ExperimentBillingEntryDTO.
:param worker_type: The worker_type of this ExperimentBillingEntryDTO.
:type: str
"""
self._worker_type = worker_type
@property
def environment(self):
"""
Gets the environment of this ExperimentBillingEntryDTO.
:return: The environment of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._environment
@environment.setter
def environment(self, environment):
"""
Sets the environment of this ExperimentBillingEntryDTO.
:param environment: The environment of this ExperimentBillingEntryDTO.
:type: str
"""
self._environment = environment
@property
def responding(self):
"""
Gets the responding of this ExperimentBillingEntryDTO.
:return: The responding of this ExperimentBillingEntryDTO.
:rtype: bool
"""
return self._responding
@responding.setter
def responding(self, responding):
"""
Sets the responding of this ExperimentBillingEntryDTO.
:param responding: The responding of this ExperimentBillingEntryDTO.
:type: bool
"""
self._responding = responding
@property
def project_deleted(self):
"""
Gets the project_deleted of this ExperimentBillingEntryDTO.
:return: The project_deleted of this ExperimentBillingEntryDTO.
:rtype: bool
"""
return self._project_deleted
@project_deleted.setter
def project_deleted(self, project_deleted):
"""
Sets the project_deleted of this ExperimentBillingEntryDTO.
:param project_deleted: The project_deleted of this ExperimentBillingEntryDTO.
:type: bool
"""
self._project_deleted = project_deleted
@property
def organization_id(self):
"""
Gets the organization_id of this ExperimentBillingEntryDTO.
:return: The organization_id of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._organization_id
@organization_id.setter
def organization_id(self, organization_id):
"""
Sets the organization_id of this ExperimentBillingEntryDTO.
:param organization_id: The organization_id of this ExperimentBillingEntryDTO.
:type: str
"""
self._organization_id = organization_id
@property
def owner(self):
"""
Gets the owner of this ExperimentBillingEntryDTO.
:return: The owner of this ExperimentBillingEntryDTO.
:rtype: str
"""
return self._owner
@owner.setter
def owner(self, owner):
"""
Sets the owner of this ExperimentBillingEntryDTO.
:param owner: The owner of this ExperimentBillingEntryDTO.
:type: str
"""
self._owner = owner
@property
def deleted(self):
"""
Gets the deleted of this ExperimentBillingEntryDTO.
:return: The deleted of this ExperimentBillingEntryDTO.
:rtype: bool
"""
return self._deleted
@deleted.setter
def deleted(self, deleted):
"""
Sets the deleted of this ExperimentBillingEntryDTO.
:param deleted: The deleted of this ExperimentBillingEntryDTO.
:type: bool
"""
self._deleted = deleted
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"six.iteritems"
] |
[((15395, 15424), 'six.iteritems', 'iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (15404, 15424), False, 'from six import iteritems\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-08 23:28
from django.conf import settings
from django.db import migrations, models
from django.utils.translation.trans_real import get_supported_language_variant
import pybb.util
from pybb.compat import get_image_field_class
class Migration(migrations.Migration):
dependencies = [
("pybb", "0004_slugs_required"),
]
operations = [
migrations.AlterField(
model_name="post",
name="user_ip",
field=models.GenericIPAddressField(
blank=True, default="0.0.0.0", null=True, verbose_name="User IP"
),
),
migrations.AlterField(
model_name="profile",
name="avatar",
field=get_image_field_class()(
blank=True,
null=True,
upload_to=pybb.util.FilePathGenerator(to="pybb/avatar"),
verbose_name="Avatar",
),
),
migrations.AlterField(
model_name="profile",
name="language",
field=models.CharField(
blank=True,
choices=settings.LANGUAGES,
default=get_supported_language_variant(
settings.LANGUAGE_CODE, strict=True
),
max_length=10,
verbose_name="Language",
),
),
migrations.AlterField(
model_name="profile",
name="time_zone",
field=models.FloatField(
choices=[
(-12.0, "-12"),
(-11.0, "-11"),
(-10.0, "-10"),
(-9.5, "-09.5"),
(-9.0, "-09"),
(-8.5, "-08.5"),
(-8.0, "-08 PST"),
(-7.0, "-07 MST"),
(-6.0, "-06 CST"),
(-5.0, "-05 EST"),
(-4.0, "-04 AST"),
(-3.5, "-03.5"),
(-3.0, "-03 ADT"),
(-2.0, "-02"),
(-1.0, "-01"),
(0.0, "00 GMT"),
(1.0, "+01 CET"),
(2.0, "+02"),
(3.0, "+03"),
(3.5, "+03.5"),
(4.0, "+04"),
(4.5, "+04.5"),
(5.0, "+05"),
(5.5, "+05.5"),
(6.0, "+06"),
(6.5, "+06.5"),
(7.0, "+07"),
(8.0, "+08"),
(9.0, "+09"),
(9.5, "+09.5"),
(10.0, "+10"),
(10.5, "+10.5"),
(11.0, "+11"),
(11.5, "+11.5"),
(12.0, "+12"),
(13.0, "+13"),
(14.0, "+14"),
],
default=3.0,
verbose_name="Time zone",
),
),
]
|
[
"pybb.compat.get_image_field_class",
"django.db.models.GenericIPAddressField",
"django.db.models.FloatField",
"django.utils.translation.trans_real.get_supported_language_variant"
] |
[((529, 627), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'default': '"""0.0.0.0"""', 'null': '(True)', 'verbose_name': '"""User IP"""'}), "(blank=True, default='0.0.0.0', null=True,\n verbose_name='User IP')\n", (557, 627), False, 'from django.db import migrations, models\n'), ((1538, 2238), 'django.db.models.FloatField', 'models.FloatField', ([], {'choices': "[(-12.0, '-12'), (-11.0, '-11'), (-10.0, '-10'), (-9.5, '-09.5'), (-9.0,\n '-09'), (-8.5, '-08.5'), (-8.0, '-08 PST'), (-7.0, '-07 MST'), (-6.0,\n '-06 CST'), (-5.0, '-05 EST'), (-4.0, '-04 AST'), (-3.5, '-03.5'), (-\n 3.0, '-03 ADT'), (-2.0, '-02'), (-1.0, '-01'), (0.0, '00 GMT'), (1.0,\n '+01 CET'), (2.0, '+02'), (3.0, '+03'), (3.5, '+03.5'), (4.0, '+04'), (\n 4.5, '+04.5'), (5.0, '+05'), (5.5, '+05.5'), (6.0, '+06'), (6.5,\n '+06.5'), (7.0, '+07'), (8.0, '+08'), (9.0, '+09'), (9.5, '+09.5'), (\n 10.0, '+10'), (10.5, '+10.5'), (11.0, '+11'), (11.5, '+11.5'), (12.0,\n '+12'), (13.0, '+13'), (14.0, '+14')]", 'default': '(3.0)', 'verbose_name': '"""Time zone"""'}), "(choices=[(-12.0, '-12'), (-11.0, '-11'), (-10.0, '-10'),\n (-9.5, '-09.5'), (-9.0, '-09'), (-8.5, '-08.5'), (-8.0, '-08 PST'), (-\n 7.0, '-07 MST'), (-6.0, '-06 CST'), (-5.0, '-05 EST'), (-4.0, '-04 AST'\n ), (-3.5, '-03.5'), (-3.0, '-03 ADT'), (-2.0, '-02'), (-1.0, '-01'), (\n 0.0, '00 GMT'), (1.0, '+01 CET'), (2.0, '+02'), (3.0, '+03'), (3.5,\n '+03.5'), (4.0, '+04'), (4.5, '+04.5'), (5.0, '+05'), (5.5, '+05.5'), (\n 6.0, '+06'), (6.5, '+06.5'), (7.0, '+07'), (8.0, '+08'), (9.0, '+09'),\n (9.5, '+09.5'), (10.0, '+10'), (10.5, '+10.5'), (11.0, '+11'), (11.5,\n '+11.5'), (12.0, '+12'), (13.0, '+13'), (14.0, '+14')], default=3.0,\n verbose_name='Time zone')\n", (1555, 2238), False, 'from django.db import migrations, models\n'), ((776, 799), 'pybb.compat.get_image_field_class', 'get_image_field_class', ([], {}), '()\n', (797, 799), False, 'from pybb.compat import get_image_field_class\n'), ((1220, 1287), 'django.utils.translation.trans_real.get_supported_language_variant', 'get_supported_language_variant', (['settings.LANGUAGE_CODE'], {'strict': '(True)'}), '(settings.LANGUAGE_CODE, strict=True)\n', (1250, 1287), False, 'from django.utils.translation.trans_real import get_supported_language_variant\n')]
|
import msvcrt
import multiprocessing as mp
import os
import queue
import time
import psutil
from datetime import datetime, timedelta
from rlbot import version
from rlbot.botmanager.helper_process_manager import HelperProcessManager
from rlbot.base_extension import BaseExtension
from rlbot.botmanager.bot_manager_flatbuffer import BotManagerFlatbuffer
from rlbot.botmanager.bot_manager_independent import BotManagerIndependent
from rlbot.botmanager.bot_manager_struct import BotManagerStruct
from rlbot.parsing.rlbot_config_parser import create_bot_config_layout, parse_configurations, EXTENSION_PATH_KEY
from rlbot.utils.class_importer import import_class_with_base, import_agent
from rlbot.utils.logging_utils import get_logger, DEFAULT_LOGGER
from rlbot.utils import process_configuration
from rlbot.utils.structures.game_interface import GameInterface
from rlbot.utils.structures.quick_chats import QuickChatManager
from rlbot.utils.structures.start_match_structures import MatchSettings
# By default, look for rlbot.cfg in the current working directory.
DEFAULT_RLBOT_CONFIG_LOCATION = os.path.realpath('./rlbot.cfg')
RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration'
class SetupManager:
has_started = False
num_participants = None
names = None
teams = None
python_files = None
parameters = None
start_match_configuration = None
agent_metadata_queue = None
extension = None
sub_processes = []
def __init__(self):
self.logger = get_logger(DEFAULT_LOGGER)
self.game_interface = GameInterface(self.logger)
self.quick_chat_manager = QuickChatManager(self.game_interface)
self.quit_event = mp.Event()
self.helper_process_manager = HelperProcessManager(self.quit_event)
self.bot_quit_callbacks = []
self.agent_metadata_map = {}
def startup(self):
if self.has_started:
return
version.print_current_release_notes()
self.game_interface.inject_dll()
self.game_interface.load_interface()
self.agent_metadata_queue = mp.Queue()
self.has_started = True
def load_config(self, framework_config=None, config_location=DEFAULT_RLBOT_CONFIG_LOCATION, bot_configs=None, looks_configs=None):
"""
:param framework_config: A config object that indicates what bots to run. May come from parsing a rlbot.cfg.
:param config_location: The location of the rlbot.cfg file, which will be used to resolve relative paths.
:param bot_configs: Overrides for bot configurations.
:param looks_configs: Overrides for looks configurations.
"""
self.logger.debug('reading the configs')
# Set up RLBot.cfg
if framework_config is None:
framework_config = create_bot_config_layout()
framework_config.parse_file(config_location, max_index=10)
if bot_configs is None:
bot_configs = {}
if looks_configs is None:
looks_configs = {}
# Open anonymous shared memory for entire GameInputPacket and map buffer
self.start_match_configuration = MatchSettings()
self.num_participants, self.names, self.teams, self.python_files, self.parameters = parse_configurations(
self.start_match_configuration, framework_config, config_location, bot_configs, looks_configs)
self.game_interface.participants = self.num_participants
self.game_interface.start_match_configuration = self.start_match_configuration
extension_path = framework_config.get(RLBOT_CONFIGURATION_HEADER, EXTENSION_PATH_KEY)
if extension_path is not None and extension_path != "None":
self.load_extension(extension_path)
def launch_bot_processes(self):
self.logger.debug("Launching bot processes")
self.kill_sub_processes()
# Launch processes
for i in range(self.num_participants):
if self.start_match_configuration.player_configuration[i].rlbot_controlled:
queue_holder = self.quick_chat_manager.create_queue_for_bot(i, self.teams[i])
callback = mp.Event()
self.bot_quit_callbacks.append(callback)
process = mp.Process(target=SetupManager.run_agent,
args=(self.quit_event, callback, self.parameters[i],
str(self.start_match_configuration.player_configuration[i].name),
self.teams[i], i, self.python_files[i], self.agent_metadata_queue, queue_holder))
process.start()
self.sub_processes.append(process)
self.logger.debug("Successfully started bot processes")
def run(self):
self.quick_chat_manager.start_manager(self.quit_event)
self.logger.debug("Successfully started quick chat manager")
self.game_interface.start_match()
self.logger.info("Match has started")
self.logger.info("Press any character to exit")
while not self.quit_event.is_set():
if msvcrt.kbhit():
msvcrt.getch()
self.shut_down()
break
try:
single_agent_metadata = self.agent_metadata_queue.get(timeout=1)
self.helper_process_manager.start_or_update_helper_process(single_agent_metadata)
self.agent_metadata_map[single_agent_metadata.index] = single_agent_metadata
process_configuration.configure_processes(self.agent_metadata_map, self.logger)
except queue.Empty:
pass
except Exception as ex:
self.logger.error(ex)
pass
def shut_down(self, time_limit=5, kill_all_pids=False):
self.logger.info("Shutting Down")
self.quit_event.set()
end_time = datetime.now() + timedelta(seconds=time_limit)
# Wait for all processes to terminate before terminating main process
terminated = False
while not terminated:
terminated = True
for callback in self.bot_quit_callbacks:
if not callback.is_set():
terminated = False
time.sleep(0.1)
if datetime.now() > end_time:
self.logger.info("Taking too long to quit, trying harder...")
self.kill_sub_processes()
break
if kill_all_pids:
self.kill_process_ids()
self.logger.info("Shut down complete!")
def load_extension(self, extension_filename):
extension_class = import_class_with_base(extension_filename, BaseExtension).get_loaded_class()
self.extension = extension_class(self)
self.game_interface.set_extension(self.extension)
@staticmethod
def run_agent(terminate_event, callback_event, config_file, name, team, index, python_file,
agent_telemetry_queue, queue_holder):
agent_class_wrapper = import_agent(python_file)
if hasattr(agent_class_wrapper.get_loaded_class(), "run_independently"):
bm = BotManagerIndependent(terminate_event, callback_event, config_file, name, team,
index, agent_class_wrapper, agent_telemetry_queue, queue_holder)
elif hasattr(agent_class_wrapper.get_loaded_class(), "get_output_flatbuffer"):
bm = BotManagerFlatbuffer(terminate_event, callback_event, config_file, name, team,
index, agent_class_wrapper, agent_telemetry_queue, queue_holder)
else:
bm = BotManagerStruct(terminate_event, callback_event, config_file, name, team,
index, agent_class_wrapper, agent_telemetry_queue, queue_holder)
bm.run()
def kill_sub_processes(self):
for process in self.sub_processes:
process.terminate()
self.sub_processes = []
def kill_process_ids(self):
pids = process_configuration.extract_all_pids(self.agent_metadata_map)
for pid in pids:
try:
parent = psutil.Process(pid)
for child in parent.children(recursive=True): # or parent.children() for recursive=False
self.logger.info("Killing {} (child of {})".format(child.pid, pid))
try:
child.kill()
except psutil._exceptions.NoSuchProcess:
self.logger.info("Already dead.")
self.logger.info("Killing {}".format(pid))
try:
parent.kill()
except psutil._exceptions.NoSuchProcess:
self.logger.info("Already dead.")
except psutil.NoSuchProcess:
self.logger.info("Can't fetch parent process, already dead.")
|
[
"rlbot.utils.class_importer.import_class_with_base",
"multiprocessing.Queue",
"rlbot.parsing.rlbot_config_parser.parse_configurations",
"msvcrt.kbhit",
"rlbot.version.print_current_release_notes",
"rlbot.utils.process_configuration.configure_processes",
"rlbot.parsing.rlbot_config_parser.create_bot_config_layout",
"datetime.timedelta",
"rlbot.utils.structures.game_interface.GameInterface",
"multiprocessing.Event",
"rlbot.utils.structures.quick_chats.QuickChatManager",
"datetime.datetime.now",
"rlbot.botmanager.helper_process_manager.HelperProcessManager",
"os.path.realpath",
"time.sleep",
"rlbot.utils.class_importer.import_agent",
"rlbot.utils.structures.start_match_structures.MatchSettings",
"psutil.Process",
"rlbot.utils.logging_utils.get_logger",
"msvcrt.getch",
"rlbot.utils.process_configuration.extract_all_pids",
"rlbot.botmanager.bot_manager_struct.BotManagerStruct",
"rlbot.botmanager.bot_manager_flatbuffer.BotManagerFlatbuffer",
"rlbot.botmanager.bot_manager_independent.BotManagerIndependent"
] |
[((1093, 1124), 'os.path.realpath', 'os.path.realpath', (['"""./rlbot.cfg"""'], {}), "('./rlbot.cfg')\n", (1109, 1124), False, 'import os\n'), ((1490, 1516), 'rlbot.utils.logging_utils.get_logger', 'get_logger', (['DEFAULT_LOGGER'], {}), '(DEFAULT_LOGGER)\n', (1500, 1516), False, 'from rlbot.utils.logging_utils import get_logger, DEFAULT_LOGGER\n'), ((1547, 1573), 'rlbot.utils.structures.game_interface.GameInterface', 'GameInterface', (['self.logger'], {}), '(self.logger)\n', (1560, 1573), False, 'from rlbot.utils.structures.game_interface import GameInterface\n'), ((1608, 1645), 'rlbot.utils.structures.quick_chats.QuickChatManager', 'QuickChatManager', (['self.game_interface'], {}), '(self.game_interface)\n', (1624, 1645), False, 'from rlbot.utils.structures.quick_chats import QuickChatManager\n'), ((1672, 1682), 'multiprocessing.Event', 'mp.Event', ([], {}), '()\n', (1680, 1682), True, 'import multiprocessing as mp\n'), ((1721, 1758), 'rlbot.botmanager.helper_process_manager.HelperProcessManager', 'HelperProcessManager', (['self.quit_event'], {}), '(self.quit_event)\n', (1741, 1758), False, 'from rlbot.botmanager.helper_process_manager import HelperProcessManager\n'), ((1913, 1950), 'rlbot.version.print_current_release_notes', 'version.print_current_release_notes', ([], {}), '()\n', (1948, 1950), False, 'from rlbot import version\n'), ((2073, 2083), 'multiprocessing.Queue', 'mp.Queue', ([], {}), '()\n', (2081, 2083), True, 'import multiprocessing as mp\n'), ((3127, 3142), 'rlbot.utils.structures.start_match_structures.MatchSettings', 'MatchSettings', ([], {}), '()\n', (3140, 3142), False, 'from rlbot.utils.structures.start_match_structures import MatchSettings\n'), ((3236, 3355), 'rlbot.parsing.rlbot_config_parser.parse_configurations', 'parse_configurations', (['self.start_match_configuration', 'framework_config', 'config_location', 'bot_configs', 'looks_configs'], {}), '(self.start_match_configuration, framework_config,\n config_location, bot_configs, looks_configs)\n', (3256, 3355), False, 'from rlbot.parsing.rlbot_config_parser import create_bot_config_layout, parse_configurations, EXTENSION_PATH_KEY\n'), ((7020, 7045), 'rlbot.utils.class_importer.import_agent', 'import_agent', (['python_file'], {}), '(python_file)\n', (7032, 7045), False, 'from rlbot.utils.class_importer import import_class_with_base, import_agent\n'), ((8027, 8090), 'rlbot.utils.process_configuration.extract_all_pids', 'process_configuration.extract_all_pids', (['self.agent_metadata_map'], {}), '(self.agent_metadata_map)\n', (8065, 8090), False, 'from rlbot.utils import process_configuration\n'), ((2780, 2806), 'rlbot.parsing.rlbot_config_parser.create_bot_config_layout', 'create_bot_config_layout', ([], {}), '()\n', (2804, 2806), False, 'from rlbot.parsing.rlbot_config_parser import create_bot_config_layout, parse_configurations, EXTENSION_PATH_KEY\n'), ((5101, 5115), 'msvcrt.kbhit', 'msvcrt.kbhit', ([], {}), '()\n', (5113, 5115), False, 'import msvcrt\n'), ((5889, 5903), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5901, 5903), False, 'from datetime import datetime, timedelta\n'), ((5906, 5935), 'datetime.timedelta', 'timedelta', ([], {'seconds': 'time_limit'}), '(seconds=time_limit)\n', (5915, 5935), False, 'from datetime import datetime, timedelta\n'), ((6248, 6263), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (6258, 6263), False, 'import time\n'), ((7145, 7293), 'rlbot.botmanager.bot_manager_independent.BotManagerIndependent', 'BotManagerIndependent', (['terminate_event', 'callback_event', 'config_file', 'name', 'team', 'index', 'agent_class_wrapper', 'agent_telemetry_queue', 'queue_holder'], {}), '(terminate_event, callback_event, config_file, name,\n team, index, agent_class_wrapper, agent_telemetry_queue, queue_holder)\n', (7166, 7293), False, 'from rlbot.botmanager.bot_manager_independent import BotManagerIndependent\n'), ((4137, 4147), 'multiprocessing.Event', 'mp.Event', ([], {}), '()\n', (4145, 4147), True, 'import multiprocessing as mp\n'), ((5133, 5147), 'msvcrt.getch', 'msvcrt.getch', ([], {}), '()\n', (5145, 5147), False, 'import msvcrt\n'), ((5508, 5587), 'rlbot.utils.process_configuration.configure_processes', 'process_configuration.configure_processes', (['self.agent_metadata_map', 'self.logger'], {}), '(self.agent_metadata_map, self.logger)\n', (5549, 5587), False, 'from rlbot.utils import process_configuration\n'), ((6279, 6293), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (6291, 6293), False, 'from datetime import datetime, timedelta\n'), ((6636, 6693), 'rlbot.utils.class_importer.import_class_with_base', 'import_class_with_base', (['extension_filename', 'BaseExtension'], {}), '(extension_filename, BaseExtension)\n', (6658, 6693), False, 'from rlbot.utils.class_importer import import_class_with_base, import_agent\n'), ((7433, 7580), 'rlbot.botmanager.bot_manager_flatbuffer.BotManagerFlatbuffer', 'BotManagerFlatbuffer', (['terminate_event', 'callback_event', 'config_file', 'name', 'team', 'index', 'agent_class_wrapper', 'agent_telemetry_queue', 'queue_holder'], {}), '(terminate_event, callback_event, config_file, name,\n team, index, agent_class_wrapper, agent_telemetry_queue, queue_holder)\n', (7453, 7580), False, 'from rlbot.botmanager.bot_manager_flatbuffer import BotManagerFlatbuffer\n'), ((7646, 7789), 'rlbot.botmanager.bot_manager_struct.BotManagerStruct', 'BotManagerStruct', (['terminate_event', 'callback_event', 'config_file', 'name', 'team', 'index', 'agent_class_wrapper', 'agent_telemetry_queue', 'queue_holder'], {}), '(terminate_event, callback_event, config_file, name, team,\n index, agent_class_wrapper, agent_telemetry_queue, queue_holder)\n', (7662, 7789), False, 'from rlbot.botmanager.bot_manager_struct import BotManagerStruct\n'), ((8158, 8177), 'psutil.Process', 'psutil.Process', (['pid'], {}), '(pid)\n', (8172, 8177), False, 'import psutil\n')]
|
import sys
import os
import glob
import numpy as np
import pytest
from pyDeltaRCM.model import DeltaModel
from pyDeltaRCM import shared_tools
# utilities for file writing
def create_temporary_file(tmp_path, file_name):
d = tmp_path / 'configs'
d.mkdir(parents=True, exist_ok=True)
p = d / file_name
f = open(p, "w")
return p, f
def write_parameter_to_file(f, varname, varvalue):
f.write(varname + ': ' + str(varvalue) + '\n')
def write_matrix_to_file(f, keys, lists):
# assert len(keys) == len(lists)
f.write('matrix' + ': ' + '\n')
for i in range(len(keys)):
f.write(' ' + keys[i] + ': ' + '\n')
for j in range(len(lists[i])):
f.write(' ' + '- ' + str(lists[i][j]) + '\n')
def write_set_to_file(f, set_list):
f.write('set' + ': ' + '\n')
for i, _set in enumerate(set_list):
f.write(' - {')
for j, (k, v) in enumerate(_set.items()):
f.write(k + ': ' + str(v) + ', ')
f.write('}' + '\n')
def yaml_from_dict(tmp_path, file_name, _dict=None):
p, f = create_temporary_file(tmp_path, file_name)
if (_dict is None):
_dict = {'out_dir': tmp_path / 'out_dir'}
elif ('out_dir' not in _dict.keys()):
_dict['out_dir'] = tmp_path / 'out_dir'
for k in _dict.keys():
write_parameter_to_file(f, k, _dict[k])
f.close()
return p
@pytest.fixture(scope='function')
def test_DeltaModel(tmp_path):
file_name = 'user_parameters.yaml'
p, f = create_temporary_file(tmp_path, file_name)
write_parameter_to_file(f, 'out_dir', tmp_path / 'out_dir')
write_parameter_to_file(f, 'Length', 10.0)
write_parameter_to_file(f, 'Width', 10.0)
write_parameter_to_file(f, 'seed', 0)
write_parameter_to_file(f, 'dx', 1.0)
write_parameter_to_file(f, 'L0_meters', 1.0)
write_parameter_to_file(f, 'S0', 0.0002)
write_parameter_to_file(f, 'itermax', 1)
write_parameter_to_file(f, 'Np_water', 10)
write_parameter_to_file(f, 'u0', 1.0)
write_parameter_to_file(f, 'N0_meters', 2.0)
write_parameter_to_file(f, 'h0', 1.0)
write_parameter_to_file(f, 'H_SL', 0.0)
write_parameter_to_file(f, 'SLR', 0.001)
write_parameter_to_file(f, 'Np_sed', 10)
write_parameter_to_file(f, 'f_bedload', 0.5)
write_parameter_to_file(f, 'C0_percent', 0.1)
write_parameter_to_file(f, 'toggle_subsidence', False)
write_parameter_to_file(f, 'subsidence_rate', 0.0)
write_parameter_to_file(f, 'start_subsidence', 50.)
write_parameter_to_file(f, 'save_eta_figs', False)
write_parameter_to_file(f, 'save_stage_figs', False)
write_parameter_to_file(f, 'save_depth_figs', False)
write_parameter_to_file(f, 'save_discharge_figs', False)
write_parameter_to_file(f, 'save_velocity_figs', False)
write_parameter_to_file(f, 'save_eta_grids', False)
write_parameter_to_file(f, 'save_stage_grids', False)
write_parameter_to_file(f, 'save_depth_grids', False)
write_parameter_to_file(f, 'save_discharge_grids', False)
write_parameter_to_file(f, 'save_velocity_grids', False)
write_parameter_to_file(f, 'save_dt', 500)
f.close()
_delta = DeltaModel(input_file=p)
return _delta
class FastIteratingDeltaModel:
"""A Fast iterating DeltaModel
This class is useful in patching the DeltaModel for timing tests. The
patched DeltaModel uses the random number generation internally, so it
will verify functionality in any checkpointing scenarios, and overwriting
only the `solve_water_and_sediment_timestep` method removes most of the jitting compilation
time and much of the actual computation time.
"""
def solve_water_and_sediment_timestep(self):
"""PATCH"""
def _get_random_field(shp):
"""Get a field or randoms using the shared function.
It is critical to use the `shared_tools.get_random_uniform` for
reproducibility.
"""
field = np.zeros(shp, dtype=np.float32)
for i in range(shp[0]):
for j in range(shp[1]):
field[i, j] = shared_tools.get_random_uniform(1)
return field
shp = self.eta.shape
self.eta += _get_random_field(shp)
self.uw += _get_random_field(shp)
self.ux += _get_random_field(shp)
self.uy += _get_random_field(shp)
self.depth += _get_random_field(shp)
self.stage += _get_random_field(shp)
def read_endtime_from_log(log_folder):
_logs = glob.glob(os.path.join(log_folder, '*.log'))
assert len(_logs) == 1 # log file exists
with open(_logs[0], 'r') as _logfile:
_lines = _logfile.readlines()
_t = 0
for i, _line in enumerate(_lines):
if 'Time: ' in _line:
_t = _line.split(' ')[6]
_t = _t.strip(' ;')
return float(_t)
|
[
"pyDeltaRCM.model.DeltaModel",
"numpy.zeros",
"pytest.fixture",
"pyDeltaRCM.shared_tools.get_random_uniform",
"os.path.join"
] |
[((1390, 1422), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1404, 1422), False, 'import pytest\n'), ((3169, 3193), 'pyDeltaRCM.model.DeltaModel', 'DeltaModel', ([], {'input_file': 'p'}), '(input_file=p)\n', (3179, 3193), False, 'from pyDeltaRCM.model import DeltaModel\n'), ((4530, 4563), 'os.path.join', 'os.path.join', (['log_folder', '"""*.log"""'], {}), "(log_folder, '*.log')\n", (4542, 4563), False, 'import os\n'), ((3976, 4007), 'numpy.zeros', 'np.zeros', (['shp'], {'dtype': 'np.float32'}), '(shp, dtype=np.float32)\n', (3984, 4007), True, 'import numpy as np\n'), ((4118, 4152), 'pyDeltaRCM.shared_tools.get_random_uniform', 'shared_tools.get_random_uniform', (['(1)'], {}), '(1)\n', (4149, 4152), False, 'from pyDeltaRCM import shared_tools\n')]
|
#!/usr/bin/env python
# Сконвертировать .env файл в строку для gclou
import re
import sys
from pathlib import Path
BASE_DIR = Path(__file__).parent.parent.absolute()
def convert(filename):
result = {}
r = re.compile(r"^[\w]*=.*")
for line in open(filename, "r").readlines():
if re.match(r, line):
key, value = line.strip().split("=")
result[key] = value
return ",".join([f'{key}="{value}"' for key, value in result.items()])
if __name__ == "__main__":
_env = Path(sys.argv[1] or BASE_DIR.joinpath(".env"))
print(convert(_env))
sys.exit(0)
|
[
"pathlib.Path",
"re.match",
"sys.exit",
"re.compile"
] |
[((217, 241), 're.compile', 're.compile', (['"""^[\\\\w]*=.*"""'], {}), "('^[\\\\w]*=.*')\n", (227, 241), False, 'import re\n'), ((593, 604), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (601, 604), False, 'import sys\n'), ((302, 319), 're.match', 're.match', (['r', 'line'], {}), '(r, line)\n', (310, 319), False, 'import re\n'), ((128, 142), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (132, 142), False, 'from pathlib import Path\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of OTC Tool released under MIT license.
# Copyright (C) 2016 T-systems <NAME>, <NAME>
from otcclient.core.OtcConfig import OtcConfig
import requests
requests.packages.urllib3.disable_warnings() # @UndefinedVariable
def delete( requestUrl):
try:
response = httpcall(requestUrl,delete=True)
ret = response.text
except Exception as e:
print (str(e))
finally:
pass
return ret
def get( requestUrl):
ret = None
try:
response = httpcall(requestUrl)
token = response.headers.get('X-Subject-Token')
if token != None and len( response.headers.get('X-Subject-Token')) > 0:
OtcConfig.TOKEN = response.headers.get("X-Subject-Token")
ret = response.text
except Exception as e:
print (str(e))
finally:
pass
return ret
def post( requestUrl, postbody):
ret = None
try:
response = httpcall(requestUrl, datastr=str(postbody))
token = response.headers.get('X-Subject-Token')
if token != None and len( response.headers.get('X-Subject-Token')) > 0:
OtcConfig.TOKEN = response.headers.get("X-Subject-Token")
ret = response.text
except Exception as e:
print (str(e))
finally:
pass
return ret
def put( requestUrl, postbody):
ret = None
try:
response = httpcall(requestUrl, datastr=str(postbody), put=True)
token = response.headers.get('X-Subject-Token')
if token != None and len( response.headers.get('X-Subject-Token')) > 0:
OtcConfig.TOKEN = response.headers.get("X-Subject-Token")
ret = response.text
except Exception as e:
print (str(e))
finally:
pass
return ret
def httpcall( url, datastr=None, delete=None, put=None):
s = requests.session()
headers = {'Content-Type': 'application/json', 'Accept': 'application/json' }
if len(OtcConfig.TOKEN) > 0:
headers['X-Auth-Token'] = OtcConfig.TOKEN
if put:
data = datastr
response=s.put(url, data, headers=headers, verify=False)
elif datastr:
data = datastr
response=s.post(url, data, headers=headers, verify=False)
elif delete:
response=s.delete(url,headers=headers, verify=False)
else:
response=s.get(url, headers=headers, verify=False)
return response
|
[
"requests.session",
"requests.packages.urllib3.disable_warnings"
] |
[((218, 262), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (260, 262), False, 'import requests\n'), ((2053, 2071), 'requests.session', 'requests.session', ([], {}), '()\n', (2069, 2071), False, 'import requests\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWebAppPremierAddOnSlotResult',
'AwaitableGetWebAppPremierAddOnSlotResult',
'get_web_app_premier_add_on_slot',
]
@pulumi.output_type
class GetWebAppPremierAddOnSlotResult:
"""
Premier add-on.
"""
def __init__(__self__, id=None, kind=None, location=None, marketplace_offer=None, marketplace_publisher=None, name=None, product=None, sku=None, tags=None, type=None, vendor=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if kind and not isinstance(kind, str):
raise TypeError("Expected argument 'kind' to be a str")
pulumi.set(__self__, "kind", kind)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if marketplace_offer and not isinstance(marketplace_offer, str):
raise TypeError("Expected argument 'marketplace_offer' to be a str")
pulumi.set(__self__, "marketplace_offer", marketplace_offer)
if marketplace_publisher and not isinstance(marketplace_publisher, str):
raise TypeError("Expected argument 'marketplace_publisher' to be a str")
pulumi.set(__self__, "marketplace_publisher", marketplace_publisher)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if product and not isinstance(product, str):
raise TypeError("Expected argument 'product' to be a str")
pulumi.set(__self__, "product", product)
if sku and not isinstance(sku, str):
raise TypeError("Expected argument 'sku' to be a str")
pulumi.set(__self__, "sku", sku)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if vendor and not isinstance(vendor, str):
raise TypeError("Expected argument 'vendor' to be a str")
pulumi.set(__self__, "vendor", vendor)
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def kind(self) -> Optional[str]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> str:
"""
Resource Location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="marketplaceOffer")
def marketplace_offer(self) -> Optional[str]:
"""
Premier add on Marketplace offer.
"""
return pulumi.get(self, "marketplace_offer")
@property
@pulumi.getter(name="marketplacePublisher")
def marketplace_publisher(self) -> Optional[str]:
"""
Premier add on Marketplace publisher.
"""
return pulumi.get(self, "marketplace_publisher")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def product(self) -> Optional[str]:
"""
Premier add on Product.
"""
return pulumi.get(self, "product")
@property
@pulumi.getter
def sku(self) -> Optional[str]:
"""
Premier add on SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def vendor(self) -> Optional[str]:
"""
Premier add on Vendor.
"""
return pulumi.get(self, "vendor")
class AwaitableGetWebAppPremierAddOnSlotResult(GetWebAppPremierAddOnSlotResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWebAppPremierAddOnSlotResult(
id=self.id,
kind=self.kind,
location=self.location,
marketplace_offer=self.marketplace_offer,
marketplace_publisher=self.marketplace_publisher,
name=self.name,
product=self.product,
sku=self.sku,
tags=self.tags,
type=self.type,
vendor=self.vendor)
def get_web_app_premier_add_on_slot(name: Optional[str] = None,
premier_add_on_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
slot: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWebAppPremierAddOnSlotResult:
"""
Premier add-on.
:param str name: Name of the app.
:param str premier_add_on_name: Add-on name.
:param str resource_group_name: Name of the resource group to which the resource belongs.
:param str slot: Name of the deployment slot. If a slot is not specified, the API will get the named add-on for the production slot.
"""
__args__ = dict()
__args__['name'] = name
__args__['premierAddOnName'] = premier_add_on_name
__args__['resourceGroupName'] = resource_group_name
__args__['slot'] = slot
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:web/v20200601:getWebAppPremierAddOnSlot', __args__, opts=opts, typ=GetWebAppPremierAddOnSlotResult).value
return AwaitableGetWebAppPremierAddOnSlotResult(
id=__ret__.id,
kind=__ret__.kind,
location=__ret__.location,
marketplace_offer=__ret__.marketplace_offer,
marketplace_publisher=__ret__.marketplace_publisher,
name=__ret__.name,
product=__ret__.product,
sku=__ret__.sku,
tags=__ret__.tags,
type=__ret__.type,
vendor=__ret__.vendor)
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] |
[((3141, 3179), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""marketplaceOffer"""'}), "(name='marketplaceOffer')\n", (3154, 3179), False, 'import pulumi\n'), ((3369, 3411), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""marketplacePublisher"""'}), "(name='marketplacePublisher')\n", (3382, 3411), False, 'import pulumi\n'), ((855, 885), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (865, 885), False, 'import pulumi\n'), ((1009, 1043), 'pulumi.set', 'pulumi.set', (['__self__', '"""kind"""', 'kind'], {}), "(__self__, 'kind', kind)\n", (1019, 1043), False, 'import pulumi\n'), ((1179, 1221), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (1189, 1221), False, 'import pulumi\n'), ((1384, 1444), 'pulumi.set', 'pulumi.set', (['__self__', '"""marketplace_offer"""', 'marketplace_offer'], {}), "(__self__, 'marketplace_offer', marketplace_offer)\n", (1394, 1444), False, 'import pulumi\n'), ((1619, 1687), 'pulumi.set', 'pulumi.set', (['__self__', '"""marketplace_publisher"""', 'marketplace_publisher'], {}), "(__self__, 'marketplace_publisher', marketplace_publisher)\n", (1629, 1687), False, 'import pulumi\n'), ((1811, 1845), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (1821, 1845), False, 'import pulumi\n'), ((1978, 2018), 'pulumi.set', 'pulumi.set', (['__self__', '"""product"""', 'product'], {}), "(__self__, 'product', product)\n", (1988, 2018), False, 'import pulumi\n'), ((2139, 2171), 'pulumi.set', 'pulumi.set', (['__self__', '"""sku"""', 'sku'], {}), "(__self__, 'sku', sku)\n", (2149, 2171), False, 'import pulumi\n'), ((2297, 2331), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (2307, 2331), False, 'import pulumi\n'), ((2455, 2489), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (2465, 2489), False, 'import pulumi\n'), ((2619, 2657), 'pulumi.set', 'pulumi.set', (['__self__', '"""vendor"""', 'vendor'], {}), "(__self__, 'vendor', vendor)\n", (2629, 2657), False, 'import pulumi\n'), ((2777, 2799), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (2787, 2799), False, 'import pulumi\n'), ((2936, 2960), 'pulumi.get', 'pulumi.get', (['self', '"""kind"""'], {}), "(self, 'kind')\n", (2946, 2960), False, 'import pulumi\n'), ((3092, 3120), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (3102, 3120), False, 'import pulumi\n'), ((3311, 3348), 'pulumi.get', 'pulumi.get', (['self', '"""marketplace_offer"""'], {}), "(self, 'marketplace_offer')\n", (3321, 3348), False, 'import pulumi\n'), ((3551, 3592), 'pulumi.get', 'pulumi.get', (['self', '"""marketplace_publisher"""'], {}), "(self, 'marketplace_publisher')\n", (3561, 3592), False, 'import pulumi\n'), ((3716, 3740), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (3726, 3740), False, 'import pulumi\n'), ((3886, 3913), 'pulumi.get', 'pulumi.get', (['self', '"""product"""'], {}), "(self, 'product')\n", (3896, 3913), False, 'import pulumi\n'), ((4051, 4074), 'pulumi.get', 'pulumi.get', (['self', '"""sku"""'], {}), "(self, 'sku')\n", (4061, 4074), False, 'import pulumi\n'), ((4222, 4246), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (4232, 4246), False, 'import pulumi\n'), ((4370, 4394), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (4380, 4394), False, 'import pulumi\n'), ((4538, 4564), 'pulumi.get', 'pulumi.get', (['self', '"""vendor"""'], {}), "(self, 'vendor')\n", (4548, 4564), False, 'import pulumi\n'), ((6179, 6201), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (6199, 6201), False, 'import pulumi\n'), ((6293, 6432), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:web/v20200601:getWebAppPremierAddOnSlot"""', '__args__'], {'opts': 'opts', 'typ': 'GetWebAppPremierAddOnSlotResult'}), "('azure-native:web/v20200601:getWebAppPremierAddOnSlot',\n __args__, opts=opts, typ=GetWebAppPremierAddOnSlotResult)\n", (6314, 6432), False, 'import pulumi\n')]
|
import json
import logging
import boto3
import botocore
from django.conf import settings
logger = logging.getLogger(__name__)
def s3_resource():
return boto3.resource(
's3', aws_access_key_id=settings.AWS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_KEY
)
def dynamo_client():
return boto3.client(
'dynamodb', region_name=settings.AWS_REGION,
aws_access_key_id=settings.AWS_KEY_ID,
aws_secret_access_key=settings.AWS_SECRET_KEY
)
def get_files(bucket, path):
s3resource = s3_resource()
try:
obj = s3resource.Object(bucket,
"{}/directoryList.json".format(path))
file_sizes = json.loads(obj.get()['Body'].read())['fileSizes']
files = list(file_sizes.keys())
files.sort()
return files
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'NoSuchKey':
return None
raise e
def get_report(run_id):
logger.info("Getting report at {}/{}.json".format(settings.PIPELINE_REPORT_BUCKET, run_id))
s3resource = s3_resource()
try:
obj = s3resource.Object(settings.PIPELINE_REPORT_BUCKET,
"{}.json".format(run_id))
return json.loads(obj.get()['Body'].read())
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == 'NoSuchKey':
return None
raise e
def get_run(run_id, branch):
dbclient = dynamo_client()
if branch == 'master':
table = "{}".format(settings.DYNAMO_PIPELINE_TABLE)
else:
table = "{}-{}".format(settings.DYNAMO_PIPELINE_TABLE, branch)
run = dbclient.get_item(TableName=table, Key={'run_id': {'S': run_id}})
return run['Item']
def list_runs(branch):
dbclient = dynamo_client()
if branch == 'master':
table = "{}".format(settings.DYNAMO_PIPELINE_TABLE)
else:
table = "{}-{}".format(settings.DYNAMO_PIPELINE_TABLE, branch)
lst = dbclient.scan(TableName=table)
lst['Items'].sort(key=lambda x: x['run_start']['S'], reverse=True)
return lst['Items']
|
[
"boto3.resource",
"logging.getLogger",
"boto3.client"
] |
[((100, 127), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (117, 127), False, 'import logging\n'), ((159, 269), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {'aws_access_key_id': 'settings.AWS_KEY_ID', 'aws_secret_access_key': 'settings.AWS_SECRET_KEY'}), "('s3', aws_access_key_id=settings.AWS_KEY_ID,\n aws_secret_access_key=settings.AWS_SECRET_KEY)\n", (173, 269), False, 'import boto3\n'), ((322, 470), 'boto3.client', 'boto3.client', (['"""dynamodb"""'], {'region_name': 'settings.AWS_REGION', 'aws_access_key_id': 'settings.AWS_KEY_ID', 'aws_secret_access_key': 'settings.AWS_SECRET_KEY'}), "('dynamodb', region_name=settings.AWS_REGION, aws_access_key_id\n =settings.AWS_KEY_ID, aws_secret_access_key=settings.AWS_SECRET_KEY)\n", (334, 470), False, 'import boto3\n')]
|
import logging
from typing import Any, Callable, Dict
logger = logging.getLogger('dependency injection')
class Injector:
providers: Dict[Any, Callable] = {}
def provide(self, token: Any, provider: Callable):
self.providers[token] = provider
def get(self, token: Any):
if token not in self.providers:
logger.error(f'Cannot find provider for {token}')
value = self.providers[token]()
if value is None:
logger.error(f'Provider for {token} returned None')
return value
|
[
"logging.getLogger"
] |
[((64, 105), 'logging.getLogger', 'logging.getLogger', (['"""dependency injection"""'], {}), "('dependency injection')\n", (81, 105), False, 'import logging\n')]
|
from app import settings
from app.utils.nessus import Batch
from sqlalchemy import create_engine
server = settings.NESSUS_SERVER
username = settings.NESSUS_USERNAME
password = settings.NESSUS_PASSWORD
folder_exclude = settings.NESSUS_FOLDER_EXCLUDE
scan_exclude = settings.NESSUS_SCAN_EXCLUDE
nessus_table = settings.OMNIANA_NESSUS_TABLE
nessus_history_table = settings.OMNIANA_HISTORY_TABLE
database = settings.NESSUS_SQLALCHEMY_PATH
engine = create_engine('sqlite:///'+database, echo=False)
Batch.run_batch(engine=engine,nessus_server=server,nessus_username=username,
nessus_password=password,nessus_folder_exclude=folder_exclude,
nessus_scan_exclude=scan_exclude,nessus_table=nessus_table,
nessus_history_table=nessus_history_table)
|
[
"sqlalchemy.create_engine",
"app.utils.nessus.Batch.run_batch"
] |
[((447, 497), 'sqlalchemy.create_engine', 'create_engine', (["('sqlite:///' + database)"], {'echo': '(False)'}), "('sqlite:///' + database, echo=False)\n", (460, 497), False, 'from sqlalchemy import create_engine\n'), ((497, 758), 'app.utils.nessus.Batch.run_batch', 'Batch.run_batch', ([], {'engine': 'engine', 'nessus_server': 'server', 'nessus_username': 'username', 'nessus_password': 'password', 'nessus_folder_exclude': 'folder_exclude', 'nessus_scan_exclude': 'scan_exclude', 'nessus_table': 'nessus_table', 'nessus_history_table': 'nessus_history_table'}), '(engine=engine, nessus_server=server, nessus_username=\n username, nessus_password=password, nessus_folder_exclude=\n folder_exclude, nessus_scan_exclude=scan_exclude, nessus_table=\n nessus_table, nessus_history_table=nessus_history_table)\n', (512, 758), False, 'from app.utils.nessus import Batch\n')]
|
# for more up to date documentation go to https://automation.trendmicro.com/xdr/home
# Tested with XDR V2.0 API, Trend Micro XDR Product Manager Team, November 2nd 2020
import requests
import json
import tmconfig # tmconfig.py with your api keys / tokens
url_base = tmconfig.region['us'] # use the right region
token = tmconfig.xdr_token # get your account API token
header = {'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json;charset=utf-8'}
# wrapper for XDR get requests
def callgetapi(url_path, query_params):
try:
r = requests.get(url_base + url_path, params=query_params, headers=header)
# print(r.status_code)
if r.status_code != 200:
raise Exception(str(r.status_code) + " " + r.text)
if 'application/json' in r.headers.get('Content-Type', ''):
return json.dumps(r.json(), indent=4)
else:
return r.text
except Exception as err:
print("callgetapi : " + str(err))
exit(-1)
# Get a workbench detail
def getWorkbench(id):
try:
url_path = '/v2.0/xdr/workbench/workbenches/{workbenchId}'
url_path = url_path.format(**{'workbenchId': id})
query_params = {}
return callgetapi(url_path, query_params)
except Exception as err:
print("getWorkbench : " + id + " " + str(err))
exit(-1)
# Get a list of workbenches ids for a time range
def listWorkbenchIds(strstart, strend,intoffset, intlimit):
# lstWorkbenches = listWorkbenchIds('2020-08-29T13:52:30.000Z', '2020-10-14T13:52:40.000Z', offset, limit)
try:
ids = []
url_path = '/v2.0/siem/events'
query_params = {'source': 'all', 'startDateTime': strstart,
'endDateTime': strend,
'sortBy': '-createdTime',
'offset': intoffset,
'limit': intlimit }
json2 = json.loads(callgetapi(url_path, query_params))
if "totalCount" in json2['data']:
#print("Key exist in JSON data")
totalcount = json2['data']['totalCount']
count_all = int(totalcount)
countpages = int(float(count_all + (intlimit - 1)) / float(intlimit))
x = 0
for page in range(1,countpages + 1):
for sub in json2['data']['workbenchRecords']:
# print(getWorkbench(sub['workbenchId']))
ids.append(sub['workbenchId'])
#print(str(x))
x = x + 1
query_params = {'source': 'all', 'startDateTime': strstart,
'endDateTime': strend,
'sortBy': '-createdTime',
'offset': x,
'limit': intlimit}
json2 = json.loads(callgetapi(url_path, query_params))
return ids
except Exception as err:
print("listWorkbenchIds from " + strstart + " to " + strend + ' ' + str(err))
exit(-1)
# Get a list of workbenches for a time range
def listWorkbenches(strstart, strend,intoffset, intlimit):
#lstWorkbenches = listWorkbenches('2020-08-29T13:52:30.000Z', '2020-10-14T13:52:40.000Z', offset, limit)
try:
wb = []
url_path = '/v2.0/siem/events'
# todo add filters on investigationStatus
# arraynull
# If All status is selected, leave it as null; otherwise, put an array of integer(s); [0]: New;
# [1]: In Progress; [2]: Resolved: True Positive; [3]: Resolved: False Positive
query_params = {'source': 'all', 'startDateTime': strstart,
'endDateTime': strend,
'sortBy': '-createdTime',
'offset': intoffset,
'limit': intlimit }
json2 = json.loads(callgetapi(url_path, query_params))
if "totalCount" in json2['data']:
#print("Key exist in JSON data")
totalcount = json2['data']['totalCount']
count_all = int(totalcount)
countpages = int(float(count_all + (intlimit - 1)) / float(intlimit))
x = 0
for page in range(1,countpages + 1):
wb.append(json2)
x = x + intoffset
query_params = {'source': 'all', 'startDateTime': strstart,
'endDateTime': strend,
'sortBy': '-createdTime',
'offset': x,
'limit': intlimit}
json2 = json.loads(callgetapi(url_path, query_params))
return wb
except Exception as err:
print("listWorkbenches from " + strstart + " to " + strend + ' ' + str(err))
exit(-1)
# Test #1
print("List Workbench ID's ")
offset = 0
limit = 50
lstWorkbenches = listWorkbenchIds('2020-08-29T13:52:30.000Z', '2020-10-14T13:52:40.000Z', offset, limit)
print(str(len(lstWorkbenches)))
print(lstWorkbenches)
# Test #2
print("List Workbenches by going through WB Id list")
for wb in lstWorkbenches:
print(getWorkbench(wb))
# test #3
print("Test Call Workbenches List")
lstWorkbenches = listWorkbenches('2020-08-29T13:52:30.000Z', '2020-10-14T13:52:40.000Z', offset, 25)
print(str(len(lstWorkbenches)))
print(lstWorkbenches)
for wb in lstWorkbenches:
print(json.dumps(wb, indent=4))
|
[
"requests.get",
"json.dumps"
] |
[((573, 643), 'requests.get', 'requests.get', (['(url_base + url_path)'], {'params': 'query_params', 'headers': 'header'}), '(url_base + url_path, params=query_params, headers=header)\n', (585, 643), False, 'import requests\n'), ((5530, 5554), 'json.dumps', 'json.dumps', (['wb'], {'indent': '(4)'}), '(wb, indent=4)\n', (5540, 5554), False, 'import json\n')]
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 CERN.
#
# Invenio-Records-Resources is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see LICENSE file for more
# details.
"""File schema."""
from marshmallow import Schema
from marshmallow.fields import UUID, Dict, Number, Str
from marshmallow_utils.fields import GenMethod, Links, SanitizedUnicode
class FileSchema(Schema):
"""Service schema for files."""
key = SanitizedUnicode(dump_only=True)
created = Str(dump_only=True)
updated = Str(dump_only=True)
status = GenMethod('dump_status')
metadata = Dict(dump_only=True)
checksum = Str(dump_only=True, attribute='file.checksum')
storage_class = Str(dump_only=True, attribute='file.storage_class')
mimetype = Str(dump_only=True, attribute='file.mimetype')
size = Number(attribute='file.size')
version_id = UUID(attribute='file.version_id')
file_id = UUID(attribute='file.file_id')
bucket_id = UUID(attribute='file.bucket_id')
links = Links()
def dump_status(self, obj):
"""Dump file status."""
return 'completed' if obj.file else 'pending'
|
[
"marshmallow_utils.fields.SanitizedUnicode",
"marshmallow_utils.fields.GenMethod",
"marshmallow.fields.Dict",
"marshmallow.fields.UUID",
"marshmallow.fields.Number",
"marshmallow_utils.fields.Links",
"marshmallow.fields.Str"
] |
[((471, 503), 'marshmallow_utils.fields.SanitizedUnicode', 'SanitizedUnicode', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (487, 503), False, 'from marshmallow_utils.fields import GenMethod, Links, SanitizedUnicode\n'), ((518, 537), 'marshmallow.fields.Str', 'Str', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (521, 537), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((552, 571), 'marshmallow.fields.Str', 'Str', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (555, 571), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((586, 610), 'marshmallow_utils.fields.GenMethod', 'GenMethod', (['"""dump_status"""'], {}), "('dump_status')\n", (595, 610), False, 'from marshmallow_utils.fields import GenMethod, Links, SanitizedUnicode\n'), ((627, 647), 'marshmallow.fields.Dict', 'Dict', ([], {'dump_only': '(True)'}), '(dump_only=True)\n', (631, 647), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((664, 710), 'marshmallow.fields.Str', 'Str', ([], {'dump_only': '(True)', 'attribute': '"""file.checksum"""'}), "(dump_only=True, attribute='file.checksum')\n", (667, 710), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((731, 782), 'marshmallow.fields.Str', 'Str', ([], {'dump_only': '(True)', 'attribute': '"""file.storage_class"""'}), "(dump_only=True, attribute='file.storage_class')\n", (734, 782), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((798, 844), 'marshmallow.fields.Str', 'Str', ([], {'dump_only': '(True)', 'attribute': '"""file.mimetype"""'}), "(dump_only=True, attribute='file.mimetype')\n", (801, 844), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((856, 885), 'marshmallow.fields.Number', 'Number', ([], {'attribute': '"""file.size"""'}), "(attribute='file.size')\n", (862, 885), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((903, 936), 'marshmallow.fields.UUID', 'UUID', ([], {'attribute': '"""file.version_id"""'}), "(attribute='file.version_id')\n", (907, 936), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((951, 981), 'marshmallow.fields.UUID', 'UUID', ([], {'attribute': '"""file.file_id"""'}), "(attribute='file.file_id')\n", (955, 981), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((998, 1030), 'marshmallow.fields.UUID', 'UUID', ([], {'attribute': '"""file.bucket_id"""'}), "(attribute='file.bucket_id')\n", (1002, 1030), False, 'from marshmallow.fields import UUID, Dict, Number, Str\n'), ((1044, 1051), 'marshmallow_utils.fields.Links', 'Links', ([], {}), '()\n', (1049, 1051), False, 'from marshmallow_utils.fields import GenMethod, Links, SanitizedUnicode\n')]
|
#!/usr/bin/env python
# coding: utf-8
# Many thanks for <EMAIL> & <EMAIL> for their orginal work and allowing me to share!
import argparse
import numpy as np
import torch
import torch.nn as nn
import joblib
from sklearn.metrics import roc_auc_score
from sklearn.preprocessing import RobustScaler
from torch.utils.data import DataLoader, TensorDataset
from uda_model import UDAModel
def write_epm_file(preds, truth, epm_fname):
import uproot3
pred_tags = preds.squeeze()
epm_tags = np.where(pred_tags > 0.5, 1, -1).astype(np.int32)
true_id = np.where(truth.squeeze() == 0, -511, 511).astype(np.int32)
eta = np.where(pred_tags > 0.5, 1 - pred_tags, pred_tags)
with uproot3.recreate(f"{epm_fname}.root", compression=None) as file:
file["DecayTree"] = uproot3.newtree({"B_TRUEID": np.int32, "tag": np.int32, "eta": np.float64})
t = file["DecayTree"]
t["B_TRUEID"].newbasket(true_id)
t["tag"].newbasket(epm_tags)
t["eta"].newbasket(eta)
# logging utilities: pretty-printing of shapes and tag frequencies
def format_shapes(features, tags, idx, borders):
return f"features {tuple(features.size())} tags {tuple(tags.size())} idx {tuple(idx.size())} borders ({len(borders)},)"
def format_tag_frequencies(tags):
return ', '.join(map(lambda x: f"{x[0]}({x[1]})", torch.stack(torch.unique(tags, return_counts=True)).type(torch.int).t()))
# like itertools.cycle but reshuffling a DataLoader instance in each cycle
class ShuffleCycle(object):
def __init__(self, dataloader):
self.dataloader = dataloader
self.iter = iter(self.dataloader)
def __iter__(self):
return self
def __next__(self):
try:
return next(self.iter)
except StopIteration:
self.iter = iter(self.dataloader)
return next(self.iter)
def train_model(files, validation_files, model_out_name, scaler_out_name, n_epochs, train_frac, batch_size, make_epm_output, gamma=10, dc_weight=0.2):
print("Starting Training")
# some torch setup
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
torch.backends.cudnn.benchmark = True
torch.manual_seed(25031992)
torch.cuda.manual_seed(25031992)
features = np.concatenate([f["features"] for f in files])
tags = np.concatenate([f["B_TRUEID"] for f in files]).reshape((-1, 1))
tags = np.where(tags == 521, 1, 0).astype(np.int32)
evt_borders = files[0]["evt_borders"]
for f in files[1:]:
evt_borders = np.concatenate((evt_borders, f["evt_borders"][1:] + evt_borders[-1]))
assert evt_borders[-1] == len(features)
# probnnmu has a bin at -1, for particles that don't have muon info
# map that to 0
features[features[:, 3] == -1, 3] = 0
# scale data, and safe scaler for later use
scaler = RobustScaler()
features = scaler.fit_transform(features)
joblib.dump(scaler, f"{scaler_out_name}.bin")
borders = np.array(list(zip(evt_borders[:-1], evt_borders[1:])))
idx_vec = np.zeros(len(features), dtype=np.int64)
for i, (b, e) in enumerate(borders):
idx_vec[b:e] = i
evt_split = int(len(borders) * train_frac)
track_split = evt_borders[evt_split]
train_tags = torch.tensor(tags[:evt_split], dtype=torch.float32).to(device)
train_feat = torch.tensor(features[:track_split]).to(device)
train_idx = torch.tensor(idx_vec[:track_split]).to(device)
test_tags_np = tags[evt_split:]
test_tags = torch.tensor(test_tags_np, dtype=torch.float32).to(device)
test_feat = torch.tensor(features[track_split:]).to(device)
test_idx = torch.tensor(idx_vec[track_split:]).to(device)
train_borders = [
(x[0, 0], x[-1, 1])
for x in np.array_split(borders[:evt_split], len(borders[:evt_split]) // batch_size)
]
test_borders = [
(x[0, 0], x[-1, 1])
for x in np.array_split(borders[evt_split:] - borders[evt_split][0], len(borders[evt_split:]) // batch_size)
]
# UDA: process the validation_files equivalently; use "B_ID" instead of "B_TRUEID" and do not split
val_features = np.concatenate([f["features"] for f in validation_files])
val_tags = np.concatenate([f["B_ID"] for f in validation_files]).reshape((-1, 1))
val_tags = np.where(val_tags == 521, 1, 0).astype(np.int32)
val_evt_borders = validation_files[0]["evt_borders"]
for f in validation_files[1:]:
val_evt_borders = np.concatenate((val_evt_borders, f["evt_borders"][1:] + val_evt_borders[-1]))
assert val_evt_borders[-1] == len(val_features)
val_features[val_features[:, 3] == -1, 3] = 0 # probnnmu (see above)
val_features = scaler.transform(val_features)
val_borders = np.array(list(zip(val_evt_borders[:-1], val_evt_borders[1:])))
val_idx_vec = np.zeros(len(val_features), dtype=np.int64)
for i, (b, e) in enumerate(val_borders):
val_idx_vec[b:e] = i
val_evt_split = int(len(val_borders) * train_frac)
val_track_split = val_evt_borders[val_evt_split]
val_train_tags = torch.tensor(val_tags[:val_evt_split], dtype=torch.float32).to(device)
val_train_feat = torch.tensor(val_features[:val_track_split]).to(device)
val_train_idx = torch.tensor(val_idx_vec[:val_track_split]).to(device)
val_test_tags_np = val_tags[val_evt_split:]
val_test_tags = torch.tensor(val_test_tags_np, dtype=torch.float32).to(device)
val_test_feat = torch.tensor(val_features[val_track_split:]).to(device)
val_test_idx = torch.tensor(val_idx_vec[val_track_split:]).to(device)
val_train_borders = [
(x[0, 0], x[-1, 1])
for x in np.array_split(val_borders[:val_evt_split], len(val_borders[:val_evt_split]) // batch_size)
]
val_test_borders = [
(x[0, 0], x[-1, 1])
for x in np.array_split(val_borders[val_evt_split:] - val_borders[val_evt_split][0], len(val_borders[val_evt_split:]) // batch_size)
]
print(
f"MC training shapes: {format_shapes(train_feat, train_tags, train_idx, train_borders)}",
f"MC testing shapes: {format_shapes(test_feat, test_tags, test_idx, test_borders)}",
f"Data training shapes: {format_shapes(val_train_feat, val_train_tags, val_train_idx, val_train_borders)}",
f"Data testing shapes: {format_shapes(val_test_feat, val_test_tags, val_test_idx, val_test_borders)}",
f"MC training tag frequencies: {format_tag_frequencies(train_tags)}",
f"MC testing tag frequencies: {format_tag_frequencies(test_tags)}",
f"Data training tag frequencies: {format_tag_frequencies(val_train_tags)}",
f"Data testing tag frequencies: {format_tag_frequencies(val_test_tags)}",
sep="\n"
) # log some general statistics about the data sources
model = UDAModel().to(device)
optimizer = torch.optim.Adam(model.parameters())
scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, factor=0.5, min_lr=1e-5, patience=5)
all_train_loss = []
all_test_loss = []
all_test_acc = []
mypreds = np.zeros((len(test_tags), 1))
all_val_loss = []
all_val_acc = []
valpreds = np.zeros((len(val_test_tags), 1))
all_train_domain_loss = []
all_test_domain_loss = []
# torch data loaders reshuffle the data in each epoch
train_dl = DataLoader(
TensorDataset(torch.tensor(train_borders, dtype=torch.int)),
shuffle = True,
batch_size = None
)
val_train_dl = DataLoader(
TensorDataset(torch.tensor(val_train_borders, dtype=torch.int)),
shuffle = True,
batch_size = None
)
for epoch in range(n_epochs):
model.train()
trainloss = 0
fullloss = 0
# progress and alpha value for the Gradient Reversal Layer
p = float(epoch) / n_epochs
alpha = 2. / (1. + np.exp(-gamma * p)) - 1
for batch_idx, (batch_border, val_batch_border) in enumerate(zip(train_dl, ShuffleCycle(val_train_dl))):
beg, end = batch_border[0].numpy() # unpack the borders from train_dl
val_beg, val_end = val_batch_border[0].numpy()
optimizer.zero_grad()
data = train_feat[beg:end]
idx = train_idx[beg:end] - train_idx[beg]
e_beg, e_end = train_idx[[beg, end - 1]]
# one past the last event is the boundary
e_end += 1
target = train_tags[e_beg:e_end]
output, uda_output = model(data, idx, alpha)
loss = nn.functional.binary_cross_entropy_with_logits(output, target)
trainloss += loss.detach().cpu().numpy()
# UDA: feed the real data into the model
data = val_train_feat[val_beg:val_end]
idx = val_train_idx[val_beg:val_end] - val_train_idx[val_beg]
_, val_uda_output = model(data, idx, alpha)
# UDA: add the domain loss
loss += nn.functional.binary_cross_entropy_with_logits(
uda_output,
torch.zeros_like(uda_output) # expect zeros
) * dc_weight / 2
loss += nn.functional.binary_cross_entropy_with_logits(
val_uda_output,
torch.ones_like(val_uda_output) # expect ones
) * dc_weight / 2
fullloss += loss.detach().cpu().numpy()
loss.backward()
optimizer.step()
# averaged trainloss of epoch
all_train_loss.append(trainloss / (batch_idx + 1))
all_train_domain_loss.append((fullloss - trainloss) / (batch_idx + 1) / dc_weight)
model.eval()
test_loss = 0 # validation loss on source domain (= MC) data
domain_loss = 0 # validation loss of the domain classifier
for batch_idx, (beg, end) in enumerate(test_borders):
data = test_feat[beg:end]
# indices for the index_add inside the forward()
idx = test_idx[beg:end] - test_idx[beg]
# minus to make the test_idx start at 0 since we are indexing into
# the split off test_tags array
e_beg, e_end = test_idx[[beg, end - 1]] - test_idx[0]
# one past the last event is the boundary
e_end += 1
target = test_tags[e_beg:e_end]
with torch.no_grad():
output, uda_output = model(data, idx, alpha)
mypreds[e_beg:e_end] = torch.sigmoid(output.detach()).cpu().numpy()
test_loss += nn.functional.binary_cross_entropy_with_logits(output, target).detach().cpu().numpy()
domain_loss += nn.functional.binary_cross_entropy_with_logits(
uda_output,
torch.zeros_like(uda_output) # expect zeros
).detach().cpu().numpy()
test_acc = np.mean((mypreds > 0.5) == test_tags_np)
all_test_loss.append(test_loss / (batch_idx + 1))
all_test_acc.append(test_acc)
# process the validation_files equivalently
val_loss = 0 # validation loss on target domain (= real) data
for val_batch_idx, (beg, end) in enumerate(val_test_borders):
data = val_test_feat[beg:end]
idx = val_test_idx[beg:end] - val_test_idx[beg]
e_beg, e_end = val_test_idx[[beg, end - 1]] - val_test_idx[0]
e_end += 1
target = val_test_tags[e_beg:e_end]
with torch.no_grad():
output, uda_output = model(data, idx, alpha)
valpreds[e_beg:e_end] = torch.sigmoid(output.detach()).cpu().numpy()
val_loss += nn.functional.binary_cross_entropy_with_logits(output, target).detach().cpu().numpy()
domain_loss += nn.functional.binary_cross_entropy_with_logits(
uda_output,
torch.ones_like(uda_output) # expect ones
).detach().cpu().numpy()
val_acc = np.mean((valpreds > 0.5) == val_test_tags_np)
all_val_loss.append(val_loss / (val_batch_idx + 1))
all_val_acc.append(val_acc)
all_test_domain_loss.append(domain_loss / (len(test_borders) + len(val_test_borders)))
scheduler.step(test_loss / (batch_idx + 1))
print(
f"Epoch: {epoch}/{n_epochs} | MC loss {test_loss/(batch_idx+1):.5f} | MC AUC: {roc_auc_score(test_tags_np, mypreds):.5f} | MC ACC: {test_acc:.5f}",
f"| data loss {val_loss/(val_batch_idx+1):.5f} | data AUC: {roc_auc_score(val_test_tags_np, valpreds):.5f} | data ACC: {val_acc:.5f}",
end="\r",
)
print("Training complete")
print(f"Minimum MC testing loss: {min(all_test_loss):.5f} in epoch: {np.argmin(all_test_loss)}")
print(f"Maximum MC testing ACC: {max(all_test_acc):.5f} in epoch: {np.argmax(all_test_acc)}")
print(f"Minimum data loss: {min(all_val_loss):.5f} in epoch: {np.argmin(all_val_loss)}")
print(f"Maximum data ACC: {max(all_val_acc):.5f} in epoch: {np.argmax(all_val_acc)}")
# done training so let's set it to eval
model.eval()
torch.save(model.state_dict(), f"{model_out_name}.pt")
if make_epm_output:
print("Writing output for EPM")
try:
write_epm_file(mypreds, test_tags_np, f"{model_out_name}_epm")
except ImportError:
print("Option make-epm-output requires uproot3 package to be available.\n Writing of EPM output skipped!")
print("Making plots.")
import matplotlib
import matplotlib.pyplot as plt
matplotlib.rcParams.update({"font.size": 22})
plt.figure(figsize=(16, 9))
plt.plot(all_train_loss, label="MC training loss")
plt.plot(all_test_loss, label="MC validation loss")
plt.plot(all_val_loss, label="data validation loss")
plt.plot(all_train_domain_loss, label="domain training loss", linestyle="dashed")
plt.plot(all_test_domain_loss, label="domain validation loss", linestyle="dashed")
plt.legend()
plt.xlabel("Epoch")
plt.ylim(0.6, 0.8)
plt.grid()
plt.savefig("uda_Loss_vs_Epoch.png")
plt.figure(figsize=(16, 9))
plt.plot(all_test_acc, label="MC validation accuracy")
plt.plot(all_val_acc, label="data validation accuracy")
plt.legend()
plt.xlabel("Epoch")
plt.ylim(0.48, 0.64)
plt.grid()
plt.savefig("uda_Accuracy_vs_Epoch.png")
def restricted_float(x):
try:
x = float(x)
except ValueError:
raise argparse.ArgumentTypeError(f"{x} not a floating-point literal")
if x <= 0.0 or x > 1.0:
raise argparse.ArgumentTypeError(f"{x} not in range (0.0, 1.0]")
return x
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Train Model for Flavour Tagging.")
parser.add_argument("filenames", nargs="+", help="Files that contain training data. *.npz files expected)")
parser.add_argument_group("required named arguments").add_argument(
"-validate",
nargs="+",
help="Files that contain validation data of the target domain",
required=True
) # https://stackoverflow.com/a/24181138/11567260
parser.add_argument(
"-model-out-name",
default="uda_model",
help="File name to save weights into. Default is model.pt",
)
parser.add_argument(
"-scaler-out-name",
default=None,
help="File name to save scaler into. Default is MODELNAME_scaler.bin",
)
parser.add_argument("-epochs", dest="n_epochs", default=300, type=int, help="Batch size")
parser.add_argument(
"-train-frac",
default=0.75,
type=restricted_float,
help="Fraction of data to use for training",
)
parser.add_argument("-batch-size", default=1000, type=int, help="Batch size")
parser.add_argument("--make-epm-output", action="store_false", help="Write tagged validataion data into root file for EPM")
args = parser.parse_args()
files = [np.load(f) for f in args.filenames]
validation_files = [np.load(f) for f in args.validate]
if args.scaler_out_name == None:
args.scaler_out_name = args.model_out_name + "_scaler"
train_model(
files, validation_files, args.model_out_name, args.scaler_out_name, args.n_epochs, args.train_frac, args.batch_size, args.make_epm_output
)
|
[
"numpy.load",
"uda_model.UDAModel",
"argparse.ArgumentParser",
"numpy.argmax",
"joblib.dump",
"numpy.argmin",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.exp",
"uproot3.newtree",
"torch.no_grad",
"argparse.ArgumentTypeError",
"matplotlib.rcParams.update",
"torch.optim.lr_scheduler.ReduceLROnPlateau",
"uproot3.recreate",
"torch.nn.functional.binary_cross_entropy_with_logits",
"torch.unique",
"torch.zeros_like",
"matplotlib.pyplot.ylim",
"torch.manual_seed",
"matplotlib.pyplot.legend",
"torch.cuda.manual_seed",
"sklearn.metrics.roc_auc_score",
"torch.cuda.is_available",
"matplotlib.pyplot.grid",
"numpy.concatenate",
"torch.ones_like",
"matplotlib.pyplot.plot",
"sklearn.preprocessing.RobustScaler",
"numpy.where",
"torch.tensor",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.savefig"
] |
[((631, 682), 'numpy.where', 'np.where', (['(pred_tags > 0.5)', '(1 - pred_tags)', 'pred_tags'], {}), '(pred_tags > 0.5, 1 - pred_tags, pred_tags)\n', (639, 682), True, 'import numpy as np\n'), ((2200, 2227), 'torch.manual_seed', 'torch.manual_seed', (['(25031992)'], {}), '(25031992)\n', (2217, 2227), False, 'import torch\n'), ((2232, 2264), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(25031992)'], {}), '(25031992)\n', (2254, 2264), False, 'import torch\n'), ((2281, 2327), 'numpy.concatenate', 'np.concatenate', (["[f['features'] for f in files]"], {}), "([f['features'] for f in files])\n", (2295, 2327), True, 'import numpy as np\n'), ((2859, 2873), 'sklearn.preprocessing.RobustScaler', 'RobustScaler', ([], {}), '()\n', (2871, 2873), False, 'from sklearn.preprocessing import RobustScaler\n'), ((2924, 2969), 'joblib.dump', 'joblib.dump', (['scaler', 'f"""{scaler_out_name}.bin"""'], {}), "(scaler, f'{scaler_out_name}.bin')\n", (2935, 2969), False, 'import joblib\n'), ((4144, 4201), 'numpy.concatenate', 'np.concatenate', (["[f['features'] for f in validation_files]"], {}), "([f['features'] for f in validation_files])\n", (4158, 4201), True, 'import numpy as np\n'), ((6881, 6977), 'torch.optim.lr_scheduler.ReduceLROnPlateau', 'torch.optim.lr_scheduler.ReduceLROnPlateau', (['optimizer'], {'factor': '(0.5)', 'min_lr': '(1e-05)', 'patience': '(5)'}), '(optimizer, factor=0.5, min_lr=\n 1e-05, patience=5)\n', (6923, 6977), False, 'import torch\n'), ((13427, 13472), 'matplotlib.rcParams.update', 'matplotlib.rcParams.update', (["{'font.size': 22}"], {}), "({'font.size': 22})\n", (13453, 13472), False, 'import matplotlib\n'), ((13478, 13505), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 9)'}), '(figsize=(16, 9))\n', (13488, 13505), True, 'import matplotlib.pyplot as plt\n'), ((13510, 13560), 'matplotlib.pyplot.plot', 'plt.plot', (['all_train_loss'], {'label': '"""MC training loss"""'}), "(all_train_loss, label='MC training loss')\n", (13518, 13560), True, 'import matplotlib.pyplot as plt\n'), ((13565, 13616), 'matplotlib.pyplot.plot', 'plt.plot', (['all_test_loss'], {'label': '"""MC validation loss"""'}), "(all_test_loss, label='MC validation loss')\n", (13573, 13616), True, 'import matplotlib.pyplot as plt\n'), ((13621, 13673), 'matplotlib.pyplot.plot', 'plt.plot', (['all_val_loss'], {'label': '"""data validation loss"""'}), "(all_val_loss, label='data validation loss')\n", (13629, 13673), True, 'import matplotlib.pyplot as plt\n'), ((13678, 13764), 'matplotlib.pyplot.plot', 'plt.plot', (['all_train_domain_loss'], {'label': '"""domain training loss"""', 'linestyle': '"""dashed"""'}), "(all_train_domain_loss, label='domain training loss', linestyle=\n 'dashed')\n", (13686, 13764), True, 'import matplotlib.pyplot as plt\n'), ((13764, 13851), 'matplotlib.pyplot.plot', 'plt.plot', (['all_test_domain_loss'], {'label': '"""domain validation loss"""', 'linestyle': '"""dashed"""'}), "(all_test_domain_loss, label='domain validation loss', linestyle=\n 'dashed')\n", (13772, 13851), True, 'import matplotlib.pyplot as plt\n'), ((13851, 13863), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (13861, 13863), True, 'import matplotlib.pyplot as plt\n'), ((13868, 13887), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (13878, 13887), True, 'import matplotlib.pyplot as plt\n'), ((13892, 13910), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0.6)', '(0.8)'], {}), '(0.6, 0.8)\n', (13900, 13910), True, 'import matplotlib.pyplot as plt\n'), ((13915, 13925), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (13923, 13925), True, 'import matplotlib.pyplot as plt\n'), ((13930, 13966), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""uda_Loss_vs_Epoch.png"""'], {}), "('uda_Loss_vs_Epoch.png')\n", (13941, 13966), True, 'import matplotlib.pyplot as plt\n'), ((13972, 13999), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 9)'}), '(figsize=(16, 9))\n', (13982, 13999), True, 'import matplotlib.pyplot as plt\n'), ((14004, 14058), 'matplotlib.pyplot.plot', 'plt.plot', (['all_test_acc'], {'label': '"""MC validation accuracy"""'}), "(all_test_acc, label='MC validation accuracy')\n", (14012, 14058), True, 'import matplotlib.pyplot as plt\n'), ((14063, 14118), 'matplotlib.pyplot.plot', 'plt.plot', (['all_val_acc'], {'label': '"""data validation accuracy"""'}), "(all_val_acc, label='data validation accuracy')\n", (14071, 14118), True, 'import matplotlib.pyplot as plt\n'), ((14123, 14135), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (14133, 14135), True, 'import matplotlib.pyplot as plt\n'), ((14140, 14159), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epoch"""'], {}), "('Epoch')\n", (14150, 14159), True, 'import matplotlib.pyplot as plt\n'), ((14164, 14184), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0.48)', '(0.64)'], {}), '(0.48, 0.64)\n', (14172, 14184), True, 'import matplotlib.pyplot as plt\n'), ((14189, 14199), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (14197, 14199), True, 'import matplotlib.pyplot as plt\n'), ((14204, 14244), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""uda_Accuracy_vs_Epoch.png"""'], {}), "('uda_Accuracy_vs_Epoch.png')\n", (14215, 14244), True, 'import matplotlib.pyplot as plt\n'), ((14562, 14633), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train Model for Flavour Tagging."""'}), "(description='Train Model for Flavour Tagging.')\n", (14585, 14633), False, 'import argparse\n'), ((693, 748), 'uproot3.recreate', 'uproot3.recreate', (['f"""{epm_fname}.root"""'], {'compression': 'None'}), "(f'{epm_fname}.root', compression=None)\n", (709, 748), False, 'import uproot3\n'), ((786, 861), 'uproot3.newtree', 'uproot3.newtree', (["{'B_TRUEID': np.int32, 'tag': np.int32, 'eta': np.float64}"], {}), "({'B_TRUEID': np.int32, 'tag': np.int32, 'eta': np.float64})\n", (801, 861), False, 'import uproot3\n'), ((2548, 2617), 'numpy.concatenate', 'np.concatenate', (["(evt_borders, f['evt_borders'][1:] + evt_borders[-1])"], {}), "((evt_borders, f['evt_borders'][1:] + evt_borders[-1]))\n", (2562, 2617), True, 'import numpy as np\n'), ((4471, 4548), 'numpy.concatenate', 'np.concatenate', (["(val_evt_borders, f['evt_borders'][1:] + val_evt_borders[-1])"], {}), "((val_evt_borders, f['evt_borders'][1:] + val_evt_borders[-1]))\n", (4485, 4548), True, 'import numpy as np\n'), ((10770, 10810), 'numpy.mean', 'np.mean', (['((mypreds > 0.5) == test_tags_np)'], {}), '((mypreds > 0.5) == test_tags_np)\n', (10777, 10810), True, 'import numpy as np\n'), ((11851, 11896), 'numpy.mean', 'np.mean', (['((valpreds > 0.5) == val_test_tags_np)'], {}), '((valpreds > 0.5) == val_test_tags_np)\n', (11858, 11896), True, 'import numpy as np\n'), ((14446, 14504), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['f"""{x} not in range (0.0, 1.0]"""'], {}), "(f'{x} not in range (0.0, 1.0]')\n", (14472, 14504), False, 'import argparse\n'), ((15831, 15841), 'numpy.load', 'np.load', (['f'], {}), '(f)\n', (15838, 15841), True, 'import numpy as np\n'), ((15891, 15901), 'numpy.load', 'np.load', (['f'], {}), '(f)\n', (15898, 15901), True, 'import numpy as np\n'), ((498, 530), 'numpy.where', 'np.where', (['(pred_tags > 0.5)', '(1)', '(-1)'], {}), '(pred_tags > 0.5, 1, -1)\n', (506, 530), True, 'import numpy as np\n'), ((2116, 2141), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (2139, 2141), False, 'import torch\n'), ((2339, 2385), 'numpy.concatenate', 'np.concatenate', (["[f['B_TRUEID'] for f in files]"], {}), "([f['B_TRUEID'] for f in files])\n", (2353, 2385), True, 'import numpy as np\n'), ((2414, 2441), 'numpy.where', 'np.where', (['(tags == 521)', '(1)', '(0)'], {}), '(tags == 521, 1, 0)\n', (2422, 2441), True, 'import numpy as np\n'), ((3267, 3318), 'torch.tensor', 'torch.tensor', (['tags[:evt_split]'], {'dtype': 'torch.float32'}), '(tags[:evt_split], dtype=torch.float32)\n', (3279, 3318), False, 'import torch\n'), ((3347, 3383), 'torch.tensor', 'torch.tensor', (['features[:track_split]'], {}), '(features[:track_split])\n', (3359, 3383), False, 'import torch\n'), ((3411, 3446), 'torch.tensor', 'torch.tensor', (['idx_vec[:track_split]'], {}), '(idx_vec[:track_split])\n', (3423, 3446), False, 'import torch\n'), ((3511, 3558), 'torch.tensor', 'torch.tensor', (['test_tags_np'], {'dtype': 'torch.float32'}), '(test_tags_np, dtype=torch.float32)\n', (3523, 3558), False, 'import torch\n'), ((3586, 3622), 'torch.tensor', 'torch.tensor', (['features[track_split:]'], {}), '(features[track_split:])\n', (3598, 3622), False, 'import torch\n'), ((3649, 3684), 'torch.tensor', 'torch.tensor', (['idx_vec[track_split:]'], {}), '(idx_vec[track_split:])\n', (3661, 3684), False, 'import torch\n'), ((4217, 4270), 'numpy.concatenate', 'np.concatenate', (["[f['B_ID'] for f in validation_files]"], {}), "([f['B_ID'] for f in validation_files])\n", (4231, 4270), True, 'import numpy as np\n'), ((4303, 4334), 'numpy.where', 'np.where', (['(val_tags == 521)', '(1)', '(0)'], {}), '(val_tags == 521, 1, 0)\n', (4311, 4334), True, 'import numpy as np\n'), ((5074, 5133), 'torch.tensor', 'torch.tensor', (['val_tags[:val_evt_split]'], {'dtype': 'torch.float32'}), '(val_tags[:val_evt_split], dtype=torch.float32)\n', (5086, 5133), False, 'import torch\n'), ((5166, 5210), 'torch.tensor', 'torch.tensor', (['val_features[:val_track_split]'], {}), '(val_features[:val_track_split])\n', (5178, 5210), False, 'import torch\n'), ((5242, 5285), 'torch.tensor', 'torch.tensor', (['val_idx_vec[:val_track_split]'], {}), '(val_idx_vec[:val_track_split])\n', (5254, 5285), False, 'import torch\n'), ((5366, 5417), 'torch.tensor', 'torch.tensor', (['val_test_tags_np'], {'dtype': 'torch.float32'}), '(val_test_tags_np, dtype=torch.float32)\n', (5378, 5417), False, 'import torch\n'), ((5449, 5493), 'torch.tensor', 'torch.tensor', (['val_features[val_track_split:]'], {}), '(val_features[val_track_split:])\n', (5461, 5493), False, 'import torch\n'), ((5524, 5567), 'torch.tensor', 'torch.tensor', (['val_idx_vec[val_track_split:]'], {}), '(val_idx_vec[val_track_split:])\n', (5536, 5567), False, 'import torch\n'), ((6790, 6800), 'uda_model.UDAModel', 'UDAModel', ([], {}), '()\n', (6798, 6800), False, 'from uda_model import UDAModel\n'), ((7352, 7396), 'torch.tensor', 'torch.tensor', (['train_borders'], {'dtype': 'torch.int'}), '(train_borders, dtype=torch.int)\n', (7364, 7396), False, 'import torch\n'), ((7508, 7556), 'torch.tensor', 'torch.tensor', (['val_train_borders'], {'dtype': 'torch.int'}), '(val_train_borders, dtype=torch.int)\n', (7520, 7556), False, 'import torch\n'), ((8507, 8569), 'torch.nn.functional.binary_cross_entropy_with_logits', 'nn.functional.binary_cross_entropy_with_logits', (['output', 'target'], {}), '(output, target)\n', (8553, 8569), True, 'import torch.nn as nn\n'), ((14339, 14402), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['f"""{x} not a floating-point literal"""'], {}), "(f'{x} not a floating-point literal')\n", (14365, 14402), False, 'import argparse\n'), ((10280, 10295), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (10293, 10295), False, 'import torch\n'), ((11366, 11381), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (11379, 11381), False, 'import torch\n'), ((12603, 12627), 'numpy.argmin', 'np.argmin', (['all_test_loss'], {}), '(all_test_loss)\n', (12612, 12627), True, 'import numpy as np\n'), ((12703, 12726), 'numpy.argmax', 'np.argmax', (['all_test_acc'], {}), '(all_test_acc)\n', (12712, 12726), True, 'import numpy as np\n'), ((12796, 12819), 'numpy.argmin', 'np.argmin', (['all_val_loss'], {}), '(all_val_loss)\n', (12805, 12819), True, 'import numpy as np\n'), ((12888, 12910), 'numpy.argmax', 'np.argmax', (['all_val_acc'], {}), '(all_val_acc)\n', (12897, 12910), True, 'import numpy as np\n'), ((7848, 7866), 'numpy.exp', 'np.exp', (['(-gamma * p)'], {}), '(-gamma * p)\n', (7854, 7866), True, 'import numpy as np\n'), ((12250, 12286), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['test_tags_np', 'mypreds'], {}), '(test_tags_np, mypreds)\n', (12263, 12286), False, 'from sklearn.metrics import roc_auc_score\n'), ((12391, 12432), 'sklearn.metrics.roc_auc_score', 'roc_auc_score', (['val_test_tags_np', 'valpreds'], {}), '(val_test_tags_np, valpreds)\n', (12404, 12432), False, 'from sklearn.metrics import roc_auc_score\n'), ((9012, 9040), 'torch.zeros_like', 'torch.zeros_like', (['uda_output'], {}), '(uda_output)\n', (9028, 9040), False, 'import torch\n'), ((9202, 9233), 'torch.ones_like', 'torch.ones_like', (['val_uda_output'], {}), '(val_uda_output)\n', (9217, 9233), False, 'import torch\n'), ((1344, 1382), 'torch.unique', 'torch.unique', (['tags'], {'return_counts': '(True)'}), '(tags, return_counts=True)\n', (1356, 1382), False, 'import torch\n'), ((10464, 10526), 'torch.nn.functional.binary_cross_entropy_with_logits', 'nn.functional.binary_cross_entropy_with_logits', (['output', 'target'], {}), '(output, target)\n', (10510, 10526), True, 'import torch.nn as nn\n'), ((11549, 11611), 'torch.nn.functional.binary_cross_entropy_with_logits', 'nn.functional.binary_cross_entropy_with_logits', (['output', 'target'], {}), '(output, target)\n', (11595, 11611), True, 'import torch.nn as nn\n'), ((10669, 10697), 'torch.zeros_like', 'torch.zeros_like', (['uda_output'], {}), '(uda_output)\n', (10685, 10697), False, 'import torch\n'), ((11754, 11781), 'torch.ones_like', 'torch.ones_like', (['uda_output'], {}), '(uda_output)\n', (11769, 11781), False, 'import torch\n')]
|
"""
The ComputationalBasisPOVMEffect class and supporting functionality.
"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import functools as _functools
import itertools as _itertools
import numpy as _np
from pygsti.modelmembers.povms.effect import POVMEffect as _POVMEffect
from pygsti.modelmembers import term as _term
from pygsti.evotypes import Evotype as _Evotype
from pygsti.baseobjs import statespace as _statespace
from pygsti.baseobjs.basis import Basis as _Basis
from pygsti.baseobjs.polynomial import Polynomial as _Polynomial
try:
from pygsti.tools import fastcalc as _fastcalc
except ImportError:
_fastcalc = None
class ComputationalBasisPOVMEffect(_POVMEffect):
"""
A static POVM effect that is tensor product of 1-qubit Z-eigenstates.
This is called a "computational basis state" in many contexts.
Parameters
----------
zvals : iterable
A list or other iterable of integer 0 or 1 outcomes specifying
which computational basis element this object represents. The
length of `zvals` gives the total number of qubits.
basis : Basis or {'pp','gm','std'}, optional
The basis used to construct the Hilbert-Schmidt space representation
of this state as a super-ket.
evotype : Evotype or str, optional
The evolution type. The special value `"default"` is equivalent
to specifying the value of `pygsti.evotypes.Evotype.default_evotype`.
state_space : StateSpace, optional
The state space for this operation. If `None` a default state space
with the appropriate number of qubits is used.
"""
@classmethod
def from_state_vector(cls, vec, basis='pp', evotype="default", state_space=None):
"""
Create a new ComputationalBasisPOVMEffect from a dense vector.
Parameters
----------
vec : numpy.ndarray
A state vector specifying a computational basis state in the
standard basis. This vector has length 4^n for n qubits.
basis : Basis or {'pp','gm','std'}, optional
The basis of `vec` as a super-ket.
evotype : Evotype or str, optional
The evolution type of the resulting effect vector. The special
value `"default"` is equivalent to specifying the value of
`pygsti.evotypes.Evotype.default_evotype`.
state_space : StateSpace, optional
The state space for this operation. If `None` a default state space
with the appropriate number of qubits is used.
Returns
-------
ComputationalBasisPOVMEffect
"""
#if evotype in ('stabilizer', 'statevec'):
# nqubits = int(round(_np.log2(len(vec))))
# v0 = _np.array((1, 0), complex) # '0' qubit state as complex state vec
# v1 = _np.array((0, 1), complex) # '1' qubit state as complex state vec
#else:
nqubits = int(round(_np.log2(len(vec)) / 2))
v0 = 1.0 / _np.sqrt(2) * _np.array((1, 0, 0, 1), 'd') # '0' qubit state as Pauli dmvec
v1 = 1.0 / _np.sqrt(2) * _np.array((1, 0, 0, -1), 'd') # '1' qubit state as Pauli dmvec
v = (v0, v1)
for zvals in _itertools.product(*([(0, 1)] * nqubits)):
testvec = _functools.reduce(_np.kron, [v[i] for i in zvals])
if _np.allclose(testvec, vec.flat):
return cls(zvals, basis, evotype, state_space)
raise ValueError(("Given `vec` is not a z-basis product state - "
"cannot construct ComputationalBasisPOVMEffect"))
@classmethod
def from_pure_vector(cls, purevec, basis='pp', evotype="default", state_space=None):
"""
TODO: update docstring
Create a new StabilizerEffectVec from a pure-state vector.
Currently, purevec must be a single computational basis state (it
cannot be a superpostion of multiple of them).
Parameters
----------
purevec : numpy.ndarray
A complex-valued state vector specifying a pure state in the
standard computational basis. This vector has length 2^n for
n qubits.
basis : Basis or {'pp','gm','std'}, optional
The basis of `vec` as a super-ket.
evotype : Evotype or str, optional
The evolution type of the resulting effect vector. The special
value `"default"` is equivalent to specifying the value of
`pygsti.evotypes.Evotype.default_evotype`.
state_space : StateSpace, optional
The state space for this operation. If `None` a default state space
with the appropriate number of qubits is used.
Returns
-------
ComputationalBasisPOVMEffect
"""
nqubits = int(round(_np.log2(len(purevec))))
v = (_np.array([1, 0], 'd'), _np.array([0, 1], 'd')) # (v0,v1)
for zvals in _itertools.product(*([(0, 1)] * nqubits)):
testvec = _functools.reduce(_np.kron, [v[i] for i in zvals])
if _np.allclose(testvec, purevec.flat):
return cls(zvals, basis, evotype, state_space)
raise ValueError(("Given `purevec` must be a z-basis product state - "
"cannot construct StabilizerEffectVec"))
def __init__(self, zvals, basis='pp', evotype="default", state_space=None):
zvals = _np.ascontiguousarray(_np.array(zvals, _np.int64))
state_space = _statespace.default_space_for_num_qubits(len(zvals)) if (state_space is None) \
else _statespace.StateSpace.cast(state_space)
basis = _Basis.cast(basis, state_space.dim) # basis for Hilbert-Schmidt (superop) space
evotype = _Evotype.cast(evotype)
self._evotype = evotype # set this before call to _State.__init__ so self.to_dense() can work...
rep = evotype.create_computational_effect_rep(zvals, basis, state_space)
_POVMEffect.__init__(self, rep, evotype)
def to_dense(self, on_space='minimal', scratch=None):
"""
Return this POVM effect vector as a (dense) numpy array.
The memory in `scratch` maybe used when it is not-None.
Parameters
----------
on_space : {'minimal', 'Hilbert', 'HilbertSchmidt'}
The space that the returned dense operation acts upon. For unitary matrices and bra/ket vectors,
use `'Hilbert'`. For superoperator matrices and super-bra/super-ket vectors use `'HilbertSchmidt'`.
`'minimal'` means that `'Hilbert'` is used if possible given this operator's evolution type, and
otherwise `'HilbertSchmidt'` is used.
scratch : numpy.ndarray, optional
scratch space available for use.
Returns
-------
numpy.ndarray
"""
return self._rep.to_dense(on_space)
def taylor_order_terms(self, order, max_polynomial_vars=100, return_coeff_polys=False):
"""
Get the `order`-th order Taylor-expansion terms of this POVM effect vector.
This function either constructs or returns a cached list of the terms at
the given order. Each term is "rank-1", meaning that it is a state
preparation followed by or POVM effect preceded by actions on a
density matrix `rho` of the form:
`rho -> A rho B`
The coefficients of these terms are typically polynomials of the
POVMEffect's parameters, where the polynomial's variable indices index the
*global* parameters of the POVMEffect's parent (usually a :class:`Model`)
, not the POVMEffect's local parameter array (i.e. that returned from
`to_vector`).
Parameters
----------
order : int
The order of terms to get.
max_polynomial_vars : int, optional
maximum number of variables the created polynomials can have.
return_coeff_polys : bool
Whether a parallel list of locally-indexed (using variable indices
corresponding to *this* object's parameters rather than its parent's)
polynomial coefficients should be returned as well.
Returns
-------
terms : list
A list of :class:`RankOneTerm` objects.
coefficients : list
Only present when `return_coeff_polys == True`.
A list of *compact* polynomial objects, meaning that each element
is a `(vtape,ctape)` 2-tuple formed by concatenating together the
output of :method:`Polynomial.compact`.
"""
if order == 0: # only 0-th order term exists
coeff = _Polynomial({(): 1.0}, max_polynomial_vars)
terms = [_term.RankOnePolynomialEffectTerm.create_from(coeff, self, self,
self._evotype, self.state_space)]
if return_coeff_polys:
coeffs_as_compact_polys = coeff.compact(complex_coeff_tape=True)
return terms, coeffs_as_compact_polys
else:
return terms # Cache terms in FUTURE?
else:
if return_coeff_polys:
vtape = _np.empty(0, _np.int64)
ctape = _np.empty(0, complex)
return [], (vtape, ctape)
else:
return []
@property
def num_params(self):
"""
Get the number of independent parameters which specify this POVM effect vector.
Returns
-------
int
the number of independent parameters.
"""
return 0 # no parameters
def to_vector(self):
"""
Get the POVM effect vector parameters as an array of values.
Returns
-------
numpy array
The parameters as a 1D array with length num_params().
"""
return _np.array([], 'd') # no parameters
def from_vector(self, v, close=False, dirty_value=True):
"""
Initialize the POVM effect vector using a 1D array of parameters.
Parameters
----------
v : numpy array
The 1D vector of POVM effect vector parameters. Length
must == num_params()
close : bool, optional
Whether `v` is close to this POVM effect vector's current
set of parameters. Under some circumstances, when this
is true this call can be completed more quickly.
dirty_value : bool, optional
The value to set this object's "dirty flag" to before exiting this
call. This is passed as an argument so it can be updated *recursively*.
Leave this set to `True` unless you know what you're doing.
Returns
-------
None
"""
assert(len(v) == 0) # should be no parameters, and nothing to do
def __str__(self):
nQubits = len(self._rep.zvals)
s = "Computational Z-basis POVM effect vec for %d qubits w/z-values: %s" % (nQubits, str(self._rep.zvals))
return s
|
[
"pygsti.modelmembers.povms.effect.POVMEffect.__init__",
"numpy.empty",
"numpy.allclose",
"pygsti.baseobjs.basis.Basis.cast",
"pygsti.baseobjs.polynomial.Polynomial",
"pygsti.evotypes.Evotype.cast",
"pygsti.baseobjs.statespace.StateSpace.cast",
"numpy.array",
"pygsti.modelmembers.term.RankOnePolynomialEffectTerm.create_from",
"itertools.product",
"functools.reduce",
"numpy.sqrt"
] |
[((3858, 3899), 'itertools.product', '_itertools.product', (['*([(0, 1)] * nqubits)'], {}), '(*([(0, 1)] * nqubits))\n', (3876, 3899), True, 'import itertools as _itertools\n'), ((5581, 5622), 'itertools.product', '_itertools.product', (['*([(0, 1)] * nqubits)'], {}), '(*([(0, 1)] * nqubits))\n', (5599, 5622), True, 'import itertools as _itertools\n'), ((6283, 6318), 'pygsti.baseobjs.basis.Basis.cast', '_Basis.cast', (['basis', 'state_space.dim'], {}), '(basis, state_space.dim)\n', (6294, 6318), True, 'from pygsti.baseobjs.basis import Basis as _Basis\n'), ((6383, 6405), 'pygsti.evotypes.Evotype.cast', '_Evotype.cast', (['evotype'], {}), '(evotype)\n', (6396, 6405), True, 'from pygsti.evotypes import Evotype as _Evotype\n'), ((6601, 6641), 'pygsti.modelmembers.povms.effect.POVMEffect.__init__', '_POVMEffect.__init__', (['self', 'rep', 'evotype'], {}), '(self, rep, evotype)\n', (6621, 6641), True, 'from pygsti.modelmembers.povms.effect import POVMEffect as _POVMEffect\n'), ((10554, 10572), 'numpy.array', '_np.array', (['[]', '"""d"""'], {}), "([], 'd')\n", (10563, 10572), True, 'import numpy as _np\n'), ((3655, 3683), 'numpy.array', '_np.array', (['(1, 0, 0, 1)', '"""d"""'], {}), "((1, 0, 0, 1), 'd')\n", (3664, 3683), True, 'import numpy as _np\n'), ((3751, 3780), 'numpy.array', '_np.array', (['(1, 0, 0, -1)', '"""d"""'], {}), "((1, 0, 0, -1), 'd')\n", (3760, 3780), True, 'import numpy as _np\n'), ((3923, 3973), 'functools.reduce', '_functools.reduce', (['_np.kron', '[v[i] for i in zvals]'], {}), '(_np.kron, [v[i] for i in zvals])\n', (3940, 3973), True, 'import functools as _functools\n'), ((3989, 4020), 'numpy.allclose', '_np.allclose', (['testvec', 'vec.flat'], {}), '(testvec, vec.flat)\n', (4001, 4020), True, 'import numpy as _np\n'), ((5501, 5523), 'numpy.array', '_np.array', (['[1, 0]', '"""d"""'], {}), "([1, 0], 'd')\n", (5510, 5523), True, 'import numpy as _np\n'), ((5525, 5547), 'numpy.array', '_np.array', (['[0, 1]', '"""d"""'], {}), "([0, 1], 'd')\n", (5534, 5547), True, 'import numpy as _np\n'), ((5646, 5696), 'functools.reduce', '_functools.reduce', (['_np.kron', '[v[i] for i in zvals]'], {}), '(_np.kron, [v[i] for i in zvals])\n', (5663, 5696), True, 'import functools as _functools\n'), ((5712, 5747), 'numpy.allclose', '_np.allclose', (['testvec', 'purevec.flat'], {}), '(testvec, purevec.flat)\n', (5724, 5747), True, 'import numpy as _np\n'), ((6077, 6104), 'numpy.array', '_np.array', (['zvals', '_np.int64'], {}), '(zvals, _np.int64)\n', (6086, 6104), True, 'import numpy as _np\n'), ((6226, 6266), 'pygsti.baseobjs.statespace.StateSpace.cast', '_statespace.StateSpace.cast', (['state_space'], {}), '(state_space)\n', (6253, 6266), True, 'from pygsti.baseobjs import statespace as _statespace\n'), ((9314, 9357), 'pygsti.baseobjs.polynomial.Polynomial', '_Polynomial', (['{(): 1.0}', 'max_polynomial_vars'], {}), '({(): 1.0}, max_polynomial_vars)\n', (9325, 9357), True, 'from pygsti.baseobjs.polynomial import Polynomial as _Polynomial\n'), ((3641, 3652), 'numpy.sqrt', '_np.sqrt', (['(2)'], {}), '(2)\n', (3649, 3652), True, 'import numpy as _np\n'), ((3737, 3748), 'numpy.sqrt', '_np.sqrt', (['(2)'], {}), '(2)\n', (3745, 3748), True, 'import numpy as _np\n'), ((9379, 9481), 'pygsti.modelmembers.term.RankOnePolynomialEffectTerm.create_from', '_term.RankOnePolynomialEffectTerm.create_from', (['coeff', 'self', 'self', 'self._evotype', 'self.state_space'], {}), '(coeff, self, self, self.\n _evotype, self.state_space)\n', (9424, 9481), True, 'from pygsti.modelmembers import term as _term\n'), ((9862, 9885), 'numpy.empty', '_np.empty', (['(0)', '_np.int64'], {}), '(0, _np.int64)\n', (9871, 9885), True, 'import numpy as _np\n'), ((9910, 9931), 'numpy.empty', '_np.empty', (['(0)', 'complex'], {}), '(0, complex)\n', (9919, 9931), True, 'import numpy as _np\n')]
|
"""2. Predict with pre-trained Faster RCNN models
==============================================
This article shows how to play with pre-trained Faster RCNN model.
First let's import some necessary libraries:
"""
from matplotlib import pyplot as plt
import gluoncv
from gluoncv import model_zoo, data, utils
######################################################################
# Load a pretrained model
# -------------------------
#
# Let's get an Faster RCNN model trained on Pascal VOC
# dataset with ResNet-50 backbone. By specifying
# ``pretrained=True``, it will automatically download the model from the model
# zoo if necessary. For more pretrained models, please refer to
# :doc:`../../model_zoo/index`.
net = model_zoo.get_model('faster_rcnn_resnet50_v2a_voc', pretrained=True)
######################################################################
# Pre-process an image
# --------------------
#
# Next we download an image, and pre-process with preset data transforms. Here we
# specify that we resize the short edge of the image to 512 px. But you can
# feed an arbitrarily sized image.
#
# You can provide a list of image file names, such as ``[im_fname1, im_fname2,
# ...]`` to :py:func:`gluoncv.data.transforms.presets.ssd.load_test` if you
# want to load multiple image together.
#
# This function returns two results. The first is a NDArray with shape
# `(batch_size, RGB_channels, height, width)`. It can be fed into the
# model directly. The second one contains the images in numpy format to
# easy to be plotted. Since we only loaded a single image, the first dimension
# of `x` is 1.
im_fname = utils.download('https://github.com/dmlc/web-data/blob/master/' +
'gluoncv/detection/biking.jpg?raw=true',
path='biking.jpg')
x, orig_img = data.transforms.presets.rcnn.load_test(im_fname)
######################################################################
# Inference and display
# ---------------------
#
# The Faster RCNN model
#
# We can use :py:func:`gluoncv.utils.viz.plot_bbox` to visualize the
# results. We slice the results for the first image and feed them into `plot_bbox`:
box_ids, scores, bboxes = net(x)
ax = utils.viz.plot_bbox(orig_img, bboxes, scores, box_ids, class_names=net.classes)
plt.show()
|
[
"gluoncv.model_zoo.get_model",
"matplotlib.pyplot.show",
"gluoncv.utils.viz.plot_bbox",
"gluoncv.utils.download",
"gluoncv.data.transforms.presets.rcnn.load_test"
] |
[((725, 793), 'gluoncv.model_zoo.get_model', 'model_zoo.get_model', (['"""faster_rcnn_resnet50_v2a_voc"""'], {'pretrained': '(True)'}), "('faster_rcnn_resnet50_v2a_voc', pretrained=True)\n", (744, 793), False, 'from gluoncv import model_zoo, data, utils\n'), ((1625, 1753), 'gluoncv.utils.download', 'utils.download', (["('https://github.com/dmlc/web-data/blob/master/' +\n 'gluoncv/detection/biking.jpg?raw=true')"], {'path': '"""biking.jpg"""'}), "('https://github.com/dmlc/web-data/blob/master/' +\n 'gluoncv/detection/biking.jpg?raw=true', path='biking.jpg')\n", (1639, 1753), False, 'from gluoncv import model_zoo, data, utils\n'), ((1816, 1864), 'gluoncv.data.transforms.presets.rcnn.load_test', 'data.transforms.presets.rcnn.load_test', (['im_fname'], {}), '(im_fname)\n', (1854, 1864), False, 'from gluoncv import model_zoo, data, utils\n'), ((2205, 2284), 'gluoncv.utils.viz.plot_bbox', 'utils.viz.plot_bbox', (['orig_img', 'bboxes', 'scores', 'box_ids'], {'class_names': 'net.classes'}), '(orig_img, bboxes, scores, box_ids, class_names=net.classes)\n', (2224, 2284), False, 'from gluoncv import model_zoo, data, utils\n'), ((2286, 2296), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2294, 2296), True, 'from matplotlib import pyplot as plt\n')]
|
#!/usr/bin/env python3
"""
Class ManageOutput
"""
import logging
import utils.load_yaml
from language.translate import Translate
import jsonpickle
import utils.logging
logger = logging.getLogger(utils.logging.getLoggerName(__name__))
# from jinja2 import Environment, FileSystemLoader, select_autoescape
# env = Environment(
# loader = FileSystemLoader(searchpath="./"),
# autoescape=select_autoescape()
# )
class ManageOutput:
def __init__(self, config):
template_text_file = config.get("template-text-file")
self.templated_texts = utils.load_yaml.yaml_file_to_dict(template_text_file).get("output-texts")
def _getOutput(self, result, description, details = None):
output = {}
output["result"] = result
output["description"] = description
if details is not None:
output["details"] = details
return output
def setInformation(self, text_key:str, template_values:dict, details = None) -> dict:
info_text = Translate.localise(self.templated_texts, text_key, template_values)
return self._getOutput("Information", info_text, details)
def setSuccess(self, text_key:str, template_values:dict) -> dict:
success_text = Translate.localise(self.templated_texts, text_key, template_values)
return self._getOutput("Success", success_text, template_values["tm_version"])
def setError(self, text_key:str, template_values:dict) -> dict:
error_text = Translate.localise(self.templated_texts, text_key, template_values)
return self._getOutput("Error", error_text)
def tojson(self, output:dict):
return jsonpickle.encode(output, unpicklable=False)
|
[
"language.translate.Translate.localise",
"jsonpickle.encode"
] |
[((1012, 1079), 'language.translate.Translate.localise', 'Translate.localise', (['self.templated_texts', 'text_key', 'template_values'], {}), '(self.templated_texts, text_key, template_values)\n', (1030, 1079), False, 'from language.translate import Translate\n'), ((1242, 1309), 'language.translate.Translate.localise', 'Translate.localise', (['self.templated_texts', 'text_key', 'template_values'], {}), '(self.templated_texts, text_key, template_values)\n', (1260, 1309), False, 'from language.translate import Translate\n'), ((1489, 1556), 'language.translate.Translate.localise', 'Translate.localise', (['self.templated_texts', 'text_key', 'template_values'], {}), '(self.templated_texts, text_key, template_values)\n', (1507, 1556), False, 'from language.translate import Translate\n'), ((1661, 1705), 'jsonpickle.encode', 'jsonpickle.encode', (['output'], {'unpicklable': '(False)'}), '(output, unpicklable=False)\n', (1678, 1705), False, 'import jsonpickle\n')]
|
# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import typing as t
import warnings
from elastic_transport import (
BaseNode,
BinaryApiResponse,
HeadApiResponse,
NodeConfig,
NodePool,
NodeSelector,
ObjectApiResponse,
Serializer,
Transport,
)
from elastic_transport.client_utils import DEFAULT, DefaultType
from ...exceptions import ApiError, TransportError
from ...serializer import DEFAULT_SERIALIZERS
from ._base import (
BaseClient,
create_sniff_callback,
default_sniff_callback,
resolve_auth_headers,
)
from .async_search import AsyncSearchClient
from .autoscaling import AutoscalingClient
from .cat import CatClient
from .ccr import CcrClient
from .cluster import ClusterClient
from .dangling_indices import DanglingIndicesClient
from .enrich import EnrichClient
from .eql import EqlClient
from .features import FeaturesClient
from .fleet import FleetClient
from .graph import GraphClient
from .ilm import IlmClient
from .indices import IndicesClient
from .ingest import IngestClient
from .license import LicenseClient
from .logstash import LogstashClient
from .migration import MigrationClient
from .ml import MlClient
from .monitoring import MonitoringClient
from .nodes import NodesClient
from .rollup import RollupClient
from .searchable_snapshots import SearchableSnapshotsClient
from .security import SecurityClient
from .shutdown import ShutdownClient
from .slm import SlmClient
from .snapshot import SnapshotClient
from .sql import SqlClient
from .ssl import SslClient
from .tasks import TasksClient
from .text_structure import TextStructureClient
from .transform import TransformClient
from .utils import (
_TYPE_HOSTS,
CLIENT_META_SERVICE,
SKIP_IN_PATH,
_quote,
_rewrite_parameters,
client_node_configs,
)
from .watcher import WatcherClient
from .xpack import XPackClient
logger = logging.getLogger("elasticsearch")
SelfType = t.TypeVar("SelfType", bound="Elasticsearch")
class Elasticsearch(BaseClient):
"""
Elasticsearch low-level client. Provides a straightforward mapping from
Python to Elasticsearch REST APIs.
The client instance has additional attributes to update APIs in different
namespaces such as ``async_search``, ``indices``, ``security``, and more:
.. code-block:: python
client = Elasticsearch("http://localhost:9200")
# Get Document API
client.get(index="*", id="1")
# Get Index API
client.indices.get(index="*")
Transport options can be set on the client constructor or using
the :meth:`~elasticsearch.Elasticsearch.options` method:
.. code-block:: python
# Set 'api_key' on the constructor
client = Elasticsearch(
"http://localhost:9200",
api_key=("id", "api_key")
)
client.search(...)
# Set 'api_key' per request
client.options(api_key=("id", "api_key")).search(...)
"""
def __init__(
self,
hosts: t.Optional[_TYPE_HOSTS] = None,
*,
# API
cloud_id: t.Optional[str] = None,
api_key: t.Optional[t.Union[str, t.Tuple[str, str]]] = None,
basic_auth: t.Optional[t.Union[str, t.Tuple[str, str]]] = None,
bearer_auth: t.Optional[str] = None,
opaque_id: t.Optional[str] = None,
# Node
headers: t.Union[DefaultType, t.Mapping[str, str]] = DEFAULT,
connections_per_node: t.Union[DefaultType, int] = DEFAULT,
http_compress: t.Union[DefaultType, bool] = DEFAULT,
verify_certs: t.Union[DefaultType, bool] = DEFAULT,
ca_certs: t.Union[DefaultType, str] = DEFAULT,
client_cert: t.Union[DefaultType, str] = DEFAULT,
client_key: t.Union[DefaultType, str] = DEFAULT,
ssl_assert_hostname: t.Union[DefaultType, str] = DEFAULT,
ssl_assert_fingerprint: t.Union[DefaultType, str] = DEFAULT,
ssl_version: t.Union[DefaultType, int] = DEFAULT,
ssl_context: t.Union[DefaultType, t.Any] = DEFAULT,
ssl_show_warn: t.Union[DefaultType, bool] = DEFAULT,
# Transport
transport_class: t.Type[Transport] = Transport,
request_timeout: t.Union[DefaultType, None, float] = DEFAULT,
node_class: t.Union[DefaultType, t.Type[BaseNode]] = DEFAULT,
node_pool_class: t.Union[DefaultType, t.Type[NodePool]] = DEFAULT,
randomize_nodes_in_pool: t.Union[DefaultType, bool] = DEFAULT,
node_selector_class: t.Union[DefaultType, t.Type[NodeSelector]] = DEFAULT,
dead_node_backoff_factor: t.Union[DefaultType, float] = DEFAULT,
max_dead_node_backoff: t.Union[DefaultType, float] = DEFAULT,
serializer: t.Optional[Serializer] = None,
serializers: t.Union[DefaultType, t.Mapping[str, Serializer]] = DEFAULT,
default_mimetype: str = "application/json",
max_retries: t.Union[DefaultType, int] = DEFAULT,
retry_on_status: t.Union[DefaultType, int, t.Collection[int]] = DEFAULT,
retry_on_timeout: t.Union[DefaultType, bool] = DEFAULT,
sniff_on_start: t.Union[DefaultType, bool] = DEFAULT,
sniff_before_requests: t.Union[DefaultType, bool] = DEFAULT,
sniff_on_node_failure: t.Union[DefaultType, bool] = DEFAULT,
sniff_timeout: t.Union[DefaultType, None, float] = DEFAULT,
min_delay_between_sniffing: t.Union[DefaultType, None, float] = DEFAULT,
sniffed_node_callback: t.Optional[
t.Callable[[t.Dict[str, t.Any], NodeConfig], t.Optional[NodeConfig]]
] = None,
meta_header: t.Union[DefaultType, bool] = DEFAULT,
timeout: t.Union[DefaultType, None, float] = DEFAULT,
randomize_hosts: t.Union[DefaultType, bool] = DEFAULT,
host_info_callback: t.Optional[
t.Callable[
[t.Dict[str, t.Any], t.Dict[str, t.Union[str, int]]],
t.Optional[t.Dict[str, t.Union[str, int]]],
]
] = None,
sniffer_timeout: t.Union[DefaultType, None, float] = DEFAULT,
sniff_on_connection_fail: t.Union[DefaultType, bool] = DEFAULT,
http_auth: t.Union[DefaultType, t.Any] = DEFAULT,
maxsize: t.Union[DefaultType, int] = DEFAULT,
# Internal use only
_transport: t.Optional[Transport] = None,
) -> None:
if hosts is None and cloud_id is None and _transport is None:
raise ValueError("Either 'hosts' or 'cloud_id' must be specified")
if timeout is not DEFAULT:
if request_timeout is not DEFAULT:
raise ValueError(
"Can't specify both 'timeout' and 'request_timeout', "
"instead only specify 'request_timeout'"
)
warnings.warn(
"The 'timeout' parameter is deprecated in favor of 'request_timeout'",
category=DeprecationWarning,
stacklevel=2,
)
request_timeout = timeout
if serializer is not None:
if serializers is not DEFAULT:
raise ValueError(
"Can't specify both 'serializer' and 'serializers' parameters "
"together. Instead only specify one of the other."
)
serializers = {default_mimetype: serializer}
if randomize_hosts is not DEFAULT:
if randomize_nodes_in_pool is not DEFAULT:
raise ValueError(
"Can't specify both 'randomize_hosts' and 'randomize_nodes_in_pool', "
"instead only specify 'randomize_nodes_in_pool'"
)
warnings.warn(
"The 'randomize_hosts' parameter is deprecated in favor of 'randomize_nodes_in_pool'",
category=DeprecationWarning,
stacklevel=2,
)
randomize_nodes_in_pool = randomize_hosts
if sniffer_timeout is not DEFAULT:
if min_delay_between_sniffing is not DEFAULT:
raise ValueError(
"Can't specify both 'sniffer_timeout' and 'min_delay_between_sniffing', "
"instead only specify 'min_delay_between_sniffing'"
)
warnings.warn(
"The 'sniffer_timeout' parameter is deprecated in favor of 'min_delay_between_sniffing'",
category=DeprecationWarning,
stacklevel=2,
)
min_delay_between_sniffing = sniffer_timeout
if sniff_on_connection_fail is not DEFAULT:
if sniff_on_node_failure is not DEFAULT:
raise ValueError(
"Can't specify both 'sniff_on_connection_fail' and 'sniff_on_node_failure', "
"instead only specify 'sniff_on_node_failure'"
)
warnings.warn(
"The 'sniff_on_connection_fail' parameter is deprecated in favor of 'sniff_on_node_failure'",
category=DeprecationWarning,
stacklevel=2,
)
sniff_on_node_failure = sniff_on_connection_fail
if maxsize is not DEFAULT:
if connections_per_node is not DEFAULT:
raise ValueError(
"Can't specify both 'maxsize' and 'connections_per_node', "
"instead only specify 'connections_per_node'"
)
warnings.warn(
"The 'maxsize' parameter is deprecated in favor of 'connections_per_node'",
category=DeprecationWarning,
stacklevel=2,
)
connections_per_node = maxsize
# Setting min_delay_between_sniffing=True implies sniff_before_requests=True
if min_delay_between_sniffing is not DEFAULT:
sniff_before_requests = True
sniffing_options = (
sniff_timeout,
sniff_on_start,
sniff_before_requests,
sniff_on_node_failure,
sniffed_node_callback,
min_delay_between_sniffing,
sniffed_node_callback,
)
if cloud_id is not None and any(
x is not DEFAULT and x is not None for x in sniffing_options
):
raise ValueError(
"Sniffing should not be enabled when connecting to Elastic Cloud"
)
sniff_callback = None
if host_info_callback is not None:
if sniffed_node_callback is not None:
raise ValueError(
"Can't specify both 'host_info_callback' and 'sniffed_node_callback', "
"instead only specify 'sniffed_node_callback'"
)
warnings.warn(
"The 'host_info_callback' parameter is deprecated in favor of 'sniffed_node_callback'",
category=DeprecationWarning,
stacklevel=2,
)
sniff_callback = create_sniff_callback(
host_info_callback=host_info_callback
)
elif sniffed_node_callback is not None:
sniff_callback = create_sniff_callback(
sniffed_node_callback=sniffed_node_callback
)
elif (
sniff_on_start is True
or sniff_before_requests is True
or sniff_on_node_failure is True
):
sniff_callback = default_sniff_callback
if _transport is None:
node_configs = client_node_configs(
hosts,
cloud_id=cloud_id,
connections_per_node=connections_per_node,
http_compress=http_compress,
verify_certs=verify_certs,
ca_certs=ca_certs,
client_cert=client_cert,
client_key=client_key,
ssl_assert_hostname=ssl_assert_hostname,
ssl_assert_fingerprint=ssl_assert_fingerprint,
ssl_version=ssl_version,
ssl_context=ssl_context,
ssl_show_warn=ssl_show_warn,
)
transport_kwargs: t.Dict[str, t.Any] = {}
if node_class is not DEFAULT:
transport_kwargs["node_class"] = node_class
if node_pool_class is not DEFAULT:
transport_kwargs["node_pool_class"] = node_class
if randomize_nodes_in_pool is not DEFAULT:
transport_kwargs["randomize_nodes_in_pool"] = randomize_nodes_in_pool
if node_selector_class is not DEFAULT:
transport_kwargs["node_selector_class"] = node_selector_class
if dead_node_backoff_factor is not DEFAULT:
transport_kwargs["dead_node_backoff_factor"] = dead_node_backoff_factor
if max_dead_node_backoff is not DEFAULT:
transport_kwargs["max_dead_node_backoff"] = max_dead_node_backoff
if meta_header is not DEFAULT:
transport_kwargs["meta_header"] = meta_header
transport_serializers = DEFAULT_SERIALIZERS.copy()
if serializers is not DEFAULT:
transport_serializers.update(serializers)
# Override compatibility serializers from their non-compat mimetypes too.
# So we use the same serializer for requests and responses.
for mime_subtype in ("json", "x-ndjson"):
if f"application/{mime_subtype}" in serializers:
compat_mimetype = (
f"application/vnd.elasticsearch+{mime_subtype}"
)
if compat_mimetype not in serializers:
transport_serializers[compat_mimetype] = serializers[
f"application/{mime_subtype}"
]
transport_kwargs["serializers"] = transport_serializers
transport_kwargs["default_mimetype"] = default_mimetype
if sniff_on_start is not DEFAULT:
transport_kwargs["sniff_on_start"] = sniff_on_start
if sniff_before_requests is not DEFAULT:
transport_kwargs["sniff_before_requests"] = sniff_before_requests
if sniff_on_node_failure is not DEFAULT:
transport_kwargs["sniff_on_node_failure"] = sniff_on_node_failure
if sniff_timeout is not DEFAULT:
transport_kwargs["sniff_timeout"] = sniff_timeout
if min_delay_between_sniffing is not DEFAULT:
transport_kwargs[
"min_delay_between_sniffing"
] = min_delay_between_sniffing
_transport = transport_class(
node_configs,
client_meta_service=CLIENT_META_SERVICE,
sniff_callback=sniff_callback,
**transport_kwargs,
)
super().__init__(_transport)
# These are set per-request so are stored separately.
self._request_timeout = request_timeout
self._max_retries = max_retries
self._retry_on_timeout = retry_on_timeout
if isinstance(retry_on_status, int):
retry_on_status = (retry_on_status,)
self._retry_on_status = retry_on_status
else:
super().__init__(_transport)
if headers is not DEFAULT and headers is not None:
self._headers.update(headers)
if opaque_id is not DEFAULT and opaque_id is not None: # type: ignore[comparison-overlap]
self._headers["x-opaque-id"] = opaque_id
self._headers = resolve_auth_headers(
self._headers,
http_auth=http_auth,
api_key=api_key,
basic_auth=basic_auth,
bearer_auth=bearer_auth,
)
# namespaced clients for compatibility with API names
self.async_search = AsyncSearchClient(self)
self.autoscaling = AutoscalingClient(self)
self.cat = CatClient(self)
self.cluster = ClusterClient(self)
self.fleet = FleetClient(self)
self.features = FeaturesClient(self)
self.indices = IndicesClient(self)
self.ingest = IngestClient(self)
self.nodes = NodesClient(self)
self.snapshot = SnapshotClient(self)
self.tasks = TasksClient(self)
self.xpack = XPackClient(self)
self.ccr = CcrClient(self)
self.dangling_indices = DanglingIndicesClient(self)
self.enrich = EnrichClient(self)
self.eql = EqlClient(self)
self.graph = GraphClient(self)
self.ilm = IlmClient(self)
self.license = LicenseClient(self)
self.logstash = LogstashClient(self)
self.migration = MigrationClient(self)
self.ml = MlClient(self)
self.monitoring = MonitoringClient(self)
self.rollup = RollupClient(self)
self.searchable_snapshots = SearchableSnapshotsClient(self)
self.security = SecurityClient(self)
self.slm = SlmClient(self)
self.shutdown = ShutdownClient(self)
self.sql = SqlClient(self)
self.ssl = SslClient(self)
self.text_structure = TextStructureClient(self)
self.transform = TransformClient(self)
self.watcher = WatcherClient(self)
def __repr__(self) -> str:
try:
# get a list of all connections
nodes = [node.base_url for node in self.transport.node_pool.all()]
# truncate to 5 if there are too many
if len(nodes) > 5:
nodes = nodes[:5] + ["..."]
return f"<{self.__class__.__name__}({nodes})>"
except Exception:
# probably operating on custom transport and connection_pool, ignore
return super().__repr__()
def __enter__(self) -> "Elasticsearch":
try:
# All this to avoid a Mypy error when using unasync.
getattr(self.transport, "_async_call")()
except AttributeError:
pass
return self
def __exit__(self, *_: t.Any) -> None:
self.close()
def options(
self: SelfType,
*,
opaque_id: t.Union[DefaultType, str] = DEFAULT,
api_key: t.Union[DefaultType, str, t.Tuple[str, str]] = DEFAULT,
basic_auth: t.Union[DefaultType, str, t.Tuple[str, str]] = DEFAULT,
bearer_auth: t.Union[DefaultType, str] = DEFAULT,
headers: t.Union[DefaultType, t.Mapping[str, str]] = DEFAULT,
request_timeout: t.Union[DefaultType, t.Optional[float]] = DEFAULT,
ignore_status: t.Union[DefaultType, int, t.Collection[int]] = DEFAULT,
max_retries: t.Union[DefaultType, int] = DEFAULT,
retry_on_status: t.Union[DefaultType, int, t.Collection[int]] = DEFAULT,
retry_on_timeout: t.Union[DefaultType, bool] = DEFAULT,
) -> SelfType:
client = type(self)(_transport=self.transport)
resolved_headers = headers if headers is not DEFAULT else None
resolved_headers = resolve_auth_headers(
headers=resolved_headers,
api_key=api_key,
basic_auth=basic_auth,
bearer_auth=bearer_auth,
)
resolved_opaque_id = opaque_id if opaque_id is not DEFAULT else None
if resolved_opaque_id:
resolved_headers["x-opaque-id"] = resolved_opaque_id
if resolved_headers:
new_headers = self._headers.copy()
new_headers.update(resolved_headers)
client._headers = new_headers
else:
client._headers = self._headers.copy()
if request_timeout is not DEFAULT:
client._request_timeout = request_timeout
if ignore_status is not DEFAULT:
if isinstance(ignore_status, int):
ignore_status = (ignore_status,)
client._ignore_status = ignore_status
if max_retries is not DEFAULT:
if not isinstance(max_retries, int):
raise TypeError("'max_retries' must be of type 'int'")
client._max_retries = max_retries
if retry_on_status is not DEFAULT:
if isinstance(retry_on_status, int):
retry_on_status = (retry_on_status,)
client._retry_on_status = retry_on_status
if retry_on_timeout is not DEFAULT:
if not isinstance(retry_on_timeout, bool):
raise TypeError("'retry_on_timeout' must be of type 'bool'")
client._retry_on_timeout = retry_on_timeout
return client
def close(self) -> None:
"""Closes the Transport and all internal connections"""
self.transport.close()
@_rewrite_parameters()
def ping(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[t.Union[t.List[str], str]] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> bool:
"""
Returns True if a successful response returns from the info() API,
otherwise returns False. This API call can fail either at the transport
layer (due to connection errors or timeouts) or from a non-2XX HTTP response
(due to authentication or authorization issues).
If you want to discover why the request failed you should use the ``info()`` API.
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html>`_
"""
__path = "/"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
try:
self.perform_request("HEAD", __path, params=__query, headers=__headers)
return True
except (ApiError, TransportError):
return False
# AUTO-GENERATED-API-DEFINITIONS #
@_rewrite_parameters(
body_name="operations",
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def bulk(
self,
*,
operations: t.Union[
t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]
],
index: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pipeline: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[
t.Union["t.Literal['false', 'true', 'wait_for']", bool, str]
] = None,
require_alias: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
timeout: t.Optional[t.Union[int, str]] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to perform multiple index/update/delete operations in a single request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-bulk.html>`_
:param operations:
:param index: Default index for items which don't provide one
:param pipeline: The pipeline id to preprocess incoming documents with
:param refresh: If `true` then refresh the affected shards to make this operation
visible to search, if `wait_for` then wait for a refresh to make this operation
visible to search, if `false` (the default) then do nothing with refreshes.
:param require_alias: Sets require_alias for all incoming documents. Defaults
to unset (false)
:param routing: Specific routing value
:param source: True or false to return the _source field or not, or default list
of fields to return, can be overridden on each sub-request
:param source_excludes: Default list of fields to exclude from the returned _source
field, can be overridden on each sub-request
:param source_includes: Default list of fields to extract and return from the
_source field, can be overridden on each sub-request
:param timeout: Explicit operation timeout
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the bulk operation. Defaults to 1, meaning the primary
shard only. Set to `all` for all shard copies, otherwise set to any non-negative
value less than or equal to the total number of copies for the shard (number
of replicas + 1)
"""
if operations is None:
raise ValueError("Empty value passed for parameter 'operations'")
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_bulk"
else:
__path = "/_bulk"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pipeline is not None:
__query["pipeline"] = pipeline
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if require_alias is not None:
__query["require_alias"] = require_alias
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if timeout is not None:
__query["timeout"] = timeout
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
__body = operations
__headers = {
"accept": "application/json",
"content-type": "application/x-ndjson",
}
return self.perform_request( # type: ignore[return-value]
"PUT", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def clear_scroll(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
scroll_id: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Explicitly clears the search context for a scroll.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/clear-scroll-api.html>`_
:param scroll_id:
"""
__path = "/_search/scroll"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if scroll_id is not None:
__body["scroll_id"] = scroll_id
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"DELETE", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def close_point_in_time(
self,
*,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Close a point in time
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/point-in-time-api.html>`_
:param id:
"""
if id is None:
raise ValueError("Empty value passed for parameter 'id'")
__path = "/_pit"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if id is not None:
__body["id"] = id
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"DELETE", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def count(
self,
*,
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
allow_no_indices: t.Optional[bool] = None,
analyze_wildcard: t.Optional[bool] = None,
analyzer: t.Optional[str] = None,
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
df: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ignore_throttled: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
lenient: t.Optional[bool] = None,
min_score: t.Optional[float] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
q: t.Optional[str] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
routing: t.Optional[str] = None,
terminate_after: t.Optional[int] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns number of documents matching a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-count.html>`_
:param index: A comma-separated list of indices to restrict the results
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param analyze_wildcard: Specify whether wildcard and prefix queries should be
analyzed (default: false)
:param analyzer: The analyzer to use for the query string
:param default_operator: The default operator for query string query (AND or
OR)
:param df: The field to use as default where no field prefix is given in the
query string
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param ignore_throttled: Whether specified concrete, expanded or aliased indices
should be ignored when throttled
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param lenient: Specify whether format-based query failures (such as providing
text to a numeric field) should be ignored
:param min_score: Include only documents with a specific `_score` value in the
result
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param q: Query in the Lucene query string syntax
:param query:
:param routing: A comma-separated list of specific routing values
:param terminate_after: The maximum count for each shard, upon reaching which
the query execution will terminate early
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_count"
else:
__path = "/_count"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if analyze_wildcard is not None:
__query["analyze_wildcard"] = analyze_wildcard
if analyzer is not None:
__query["analyzer"] = analyzer
if default_operator is not None:
__query["default_operator"] = default_operator
if df is not None:
__query["df"] = df
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ignore_throttled is not None:
__query["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if lenient is not None:
__query["lenient"] = lenient
if min_score is not None:
__query["min_score"] = min_score
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if q is not None:
__query["q"] = q
if query is not None:
__body["query"] = query
if routing is not None:
__query["routing"] = routing
if terminate_after is not None:
__query["terminate_after"] = terminate_after
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_name="document",
)
def create(
self,
*,
index: str,
id: str,
document: t.Mapping[str, t.Any],
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pipeline: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[
t.Union["t.Literal['false', 'true', 'wait_for']", bool, str]
] = None,
routing: t.Optional[str] = None,
timeout: t.Optional[t.Union[int, str]] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Creates a new document in the index. Returns a 409 response when a document with
a same ID already exists in the index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html>`_
:param index: The name of the index
:param id: Document ID
:param document:
:param pipeline: The pipeline id to preprocess incoming documents with
:param refresh: If `true` then refresh the affected shards to make this operation
visible to search, if `wait_for` then wait for a refresh to make this operation
visible to search, if `false` (the default) then do nothing with refreshes.
:param routing: Specific routing value
:param timeout: Explicit operation timeout
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the index operation. Defaults to 1, meaning the primary
shard only. Set to `all` for all shard copies, otherwise set to any non-negative
value less than or equal to the total number of copies for the shard (number
of replicas + 1)
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
if document is None:
raise ValueError("Empty value passed for parameter 'document'")
__path = f"/{_quote(index)}/_create/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pipeline is not None:
__query["pipeline"] = pipeline
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if timeout is not None:
__query["timeout"] = timeout
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
__body = document
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"PUT", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def delete(
self,
*,
index: str,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
if_primary_term: t.Optional[int] = None,
if_seq_no: t.Optional[int] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[
t.Union["t.Literal['false', 'true', 'wait_for']", bool, str]
] = None,
routing: t.Optional[str] = None,
timeout: t.Optional[t.Union[int, str]] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Removes a document from the index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete.html>`_
:param index: The name of the index
:param id: The document ID
:param if_primary_term: only perform the delete operation if the last operation
that has changed the document has the specified primary term
:param if_seq_no: only perform the delete operation if the last operation that
has changed the document has the specified sequence number
:param refresh: If `true` then refresh the affected shards to make this operation
visible to search, if `wait_for` then wait for a refresh to make this operation
visible to search, if `false` (the default) then do nothing with refreshes.
:param routing: Specific routing value
:param timeout: Explicit operation timeout
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the delete operation. Defaults to 1, meaning the primary
shard only. Set to `all` for all shard copies, otherwise set to any non-negative
value less than or equal to the total number of copies for the shard (number
of replicas + 1)
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_doc/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if if_primary_term is not None:
__query["if_primary_term"] = if_primary_term
if if_seq_no is not None:
__query["if_seq_no"] = if_seq_no
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if timeout is not None:
__query["timeout"] = timeout
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"DELETE", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={"from": "from_"},
)
def delete_by_query(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
allow_no_indices: t.Optional[bool] = None,
analyze_wildcard: t.Optional[bool] = None,
analyzer: t.Optional[str] = None,
conflicts: t.Optional[t.Union["t.Literal['abort', 'proceed']", str]] = None,
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
df: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
from_: t.Optional[int] = None,
human: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
lenient: t.Optional[bool] = None,
max_docs: t.Optional[int] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
q: t.Optional[str] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
refresh: t.Optional[bool] = None,
request_cache: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
routing: t.Optional[str] = None,
scroll: t.Optional[t.Union[int, str]] = None,
scroll_size: t.Optional[int] = None,
search_timeout: t.Optional[t.Union[int, str]] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
slice: t.Optional[t.Mapping[str, t.Any]] = None,
slices: t.Optional[int] = None,
sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
terminate_after: t.Optional[int] = None,
timeout: t.Optional[t.Union[int, str]] = None,
version: t.Optional[bool] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
wait_for_completion: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Deletes documents matching the provided query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
empty string to perform the operation on all indices
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param analyze_wildcard: Specify whether wildcard and prefix queries should be
analyzed (default: false)
:param analyzer: The analyzer to use for the query string
:param conflicts: What to do when the delete by query hits version conflicts?
:param default_operator: The default operator for query string query (AND or
OR)
:param df: The field to use as default where no field prefix is given in the
query string
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param from_: Starting offset (default: 0)
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param lenient: Specify whether format-based query failures (such as providing
text to a numeric field) should be ignored
:param max_docs:
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param q: Query in the Lucene query string syntax
:param query:
:param refresh: Should the affected indexes be refreshed?
:param request_cache: Specify if request cache should be used for this request
or not, defaults to index level setting
:param requests_per_second: The throttle for this request in sub-requests per
second. -1 means no throttle.
:param routing: A comma-separated list of specific routing values
:param scroll: Specify how long a consistent view of the index should be maintained
for scrolled search
:param scroll_size: Size on the scroll request powering the delete by query
:param search_timeout: Explicit timeout for each search request. Defaults to
no timeout.
:param search_type: Search operation type
:param slice:
:param slices: The number of slices this task should be divided into. Defaults
to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`.
:param sort: A comma-separated list of <field>:<direction> pairs
:param stats: Specific 'tag' of the request for logging and statistical purposes
:param terminate_after: The maximum number of documents to collect for each shard,
upon reaching which the query execution will terminate early.
:param timeout: Time each individual bulk request should wait for shards that
are unavailable.
:param version: Specify whether to return document version as part of a hit
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the delete by query operation. Defaults to 1, meaning
the primary shard only. Set to `all` for all shard copies, otherwise set
to any non-negative value less than or equal to the total number of copies
for the shard (number of replicas + 1)
:param wait_for_completion: Should the request should block until the delete
by query is complete.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
__path = f"/{_quote(index)}/_delete_by_query"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
# The 'sort' parameter with a colon can't be encoded to the body.
if sort is not None and (
(isinstance(sort, str) and ":" in sort)
or (
isinstance(sort, (list, tuple))
and all(isinstance(_x, str) for _x in sort)
and any(":" in _x for _x in sort)
)
):
__query["sort"] = sort
sort = None
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if analyze_wildcard is not None:
__query["analyze_wildcard"] = analyze_wildcard
if analyzer is not None:
__query["analyzer"] = analyzer
if conflicts is not None:
__query["conflicts"] = conflicts
if default_operator is not None:
__query["default_operator"] = default_operator
if df is not None:
__query["df"] = df
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if from_ is not None:
__query["from"] = from_
if human is not None:
__query["human"] = human
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if lenient is not None:
__query["lenient"] = lenient
if max_docs is not None:
__body["max_docs"] = max_docs
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if q is not None:
__query["q"] = q
if query is not None:
__body["query"] = query
if refresh is not None:
__query["refresh"] = refresh
if request_cache is not None:
__query["request_cache"] = request_cache
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
if routing is not None:
__query["routing"] = routing
if scroll is not None:
__query["scroll"] = scroll
if scroll_size is not None:
__query["scroll_size"] = scroll_size
if search_timeout is not None:
__query["search_timeout"] = search_timeout
if search_type is not None:
__query["search_type"] = search_type
if slice is not None:
__body["slice"] = slice
if slices is not None:
__query["slices"] = slices
if sort is not None:
__query["sort"] = sort
if stats is not None:
__query["stats"] = stats
if terminate_after is not None:
__query["terminate_after"] = terminate_after
if timeout is not None:
__query["timeout"] = timeout
if version is not None:
__query["version"] = version
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
if wait_for_completion is not None:
__query["wait_for_completion"] = wait_for_completion
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def delete_by_query_rethrottle(
self,
*,
task_id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
) -> ObjectApiResponse[t.Any]:
"""
Changes the number of requests per second for a particular Delete By Query operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html>`_
:param task_id: The task id to rethrottle
:param requests_per_second: The throttle to set on this request in floating sub-requests
per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'task_id'")
__path = f"/_delete_by_query/{_quote(task_id)}/_rethrottle"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers
)
@_rewrite_parameters()
def delete_script(
self,
*,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[t.Union[int, str]] = None,
pretty: t.Optional[bool] = None,
timeout: t.Optional[t.Union[int, str]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Deletes a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:param id: Script ID
:param master_timeout: Specify timeout for connection to master
:param timeout: Explicit operation timeout
"""
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/_scripts/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
if timeout is not None:
__query["timeout"] = timeout
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"DELETE", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def exists(
self,
*,
index: str,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> HeadApiResponse:
"""
Returns information about whether a document exists in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:param index: The name of the index
:param id: The document ID
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param realtime: Specify whether to perform the operation in realtime or search
mode
:param refresh: Refresh the shard containing the document before performing the
operation
:param routing: Specific routing value
:param source: True or false to return the _source field or not, or a list of
fields to return
:param source_excludes: A list of fields to exclude from the returned _source
field
:param source_includes: A list of fields to extract and return from the _source
field
:param stored_fields: A comma-separated list of stored fields to return in the
response
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_doc/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stored_fields is not None:
__query["stored_fields"] = stored_fields
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"HEAD", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def exists_source(
self,
*,
index: str,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> HeadApiResponse:
"""
Returns information about whether a document source exists in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:param index: The name of the index
:param id: The document ID
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param realtime: Specify whether to perform the operation in realtime or search
mode
:param refresh: Refresh the shard containing the document before performing the
operation
:param routing: Specific routing value
:param source: True or false to return the _source field or not, or a list of
fields to return
:param source_excludes: A list of fields to exclude from the returned _source
field
:param source_includes: A list of fields to extract and return from the _source
field
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_source/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"HEAD", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def explain(
self,
*,
index: str,
id: str,
analyze_wildcard: t.Optional[bool] = None,
analyzer: t.Optional[str] = None,
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
df: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
lenient: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
q: t.Optional[str] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns information about why a specific matches (or doesn't match) a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-explain.html>`_
:param index: The name of the index
:param id: The document ID
:param analyze_wildcard: Specify whether wildcards and prefix queries in the
query string query should be analyzed (default: false)
:param analyzer: The analyzer for the query string query
:param default_operator: The default operator for query string query (AND or
OR)
:param df: The default field for query string query (default: _all)
:param lenient: Specify whether format-based query failures (such as providing
text to a numeric field) should be ignored
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param q: Query in the Lucene query string syntax
:param query:
:param routing: Specific routing value
:param source: True or false to return the _source field or not, or a list of
fields to return
:param source_excludes: A list of fields to exclude from the returned _source
field
:param source_includes: A list of fields to extract and return from the _source
field
:param stored_fields: A comma-separated list of stored fields to return in the
response
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_explain/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if analyze_wildcard is not None:
__query["analyze_wildcard"] = analyze_wildcard
if analyzer is not None:
__query["analyzer"] = analyzer
if default_operator is not None:
__query["default_operator"] = default_operator
if df is not None:
__query["df"] = df
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if lenient is not None:
__query["lenient"] = lenient
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if q is not None:
__query["q"] = q
if query is not None:
__body["query"] = query
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stored_fields is not None:
__query["stored_fields"] = stored_fields
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def field_caps(
self,
*,
fields: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
allow_no_indices: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
filters: t.Optional[str] = None,
human: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
include_unmapped: t.Optional[bool] = None,
index_filter: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
runtime_mappings: t.Optional[
t.Mapping[
str,
t.Union[
t.Mapping[str, t.Any],
t.Union[
t.List[t.Mapping[str, t.Any]],
t.Tuple[t.Mapping[str, t.Any], ...],
],
],
]
] = None,
types: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns the information about the capabilities of fields among multiple indices.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-caps.html>`_
:param fields: Comma-separated list of fields to retrieve capabilities for. Wildcard
(`*`) expressions are supported.
:param index: Comma-separated list of data streams, indices, and aliases used
to limit the request. Supports wildcards (*). To target all data streams
and indices, omit this parameter or use * or _all.
:param allow_no_indices: If false, the request returns an error if any wildcard
expression, index alias, or `_all` value targets only missing or closed indices.
This behavior applies even if the request targets other open indices. For
example, a request targeting `foo*,bar*` returns an error if an index starts
with foo but no index starts with bar.
:param expand_wildcards: Type of index that wildcard patterns can match. If the
request can target data streams, this argument determines whether wildcard
expressions match hidden data streams. Supports comma-separated values, such
as `open,hidden`.
:param filters: An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent
:param ignore_unavailable: If `true`, missing or closed indices are not included
in the response.
:param include_unmapped: If true, unmapped fields are included in the response.
:param index_filter: Allows to filter indices if the provided query rewrites
to match_none on every shard.
:param runtime_mappings: Defines ad-hoc runtime fields in the request similar
to the way it is done in search requests. These fields exist only as part
of the query and take precedence over fields defined with the same name in
the index mappings.
:param types: Only return results for fields that have one of the types in the
list
"""
if fields is None:
raise ValueError("Empty value passed for parameter 'fields'")
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_field_caps"
else:
__path = "/_field_caps"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if fields is not None:
__query["fields"] = fields
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if filters is not None:
__query["filters"] = filters
if human is not None:
__query["human"] = human
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if include_unmapped is not None:
__query["include_unmapped"] = include_unmapped
if index_filter is not None:
__body["index_filter"] = index_filter
if pretty is not None:
__query["pretty"] = pretty
if runtime_mappings is not None:
__body["runtime_mappings"] = runtime_mappings
if types is not None:
__query["types"] = types
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def get(
self,
*,
index: str,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns a document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:param index: Name of the index that contains the document.
:param id: Unique identifier of the document.
:param preference: Specifies the node or shard the operation should be performed
on. Random by default.
:param realtime: Boolean) If true, the request is real-time as opposed to near-real-time.
:param refresh: If true, Elasticsearch refreshes the affected shards to make
this operation visible to search. If false, do nothing with refreshes.
:param routing: Target the specified primary shard.
:param source: True or false to return the _source field or not, or a list of
fields to return.
:param source_excludes: A comma-separated list of source fields to exclude in
the response.
:param source_includes: A comma-separated list of source fields to include in
the response.
:param stored_fields: A comma-separated list of stored fields to return in the
response
:param version: Explicit version number for concurrency control. The specified
version must match the current version of the document for the request to
succeed.
:param version_type: Specific version type: internal, external, external_gte.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_doc/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stored_fields is not None:
__query["stored_fields"] = stored_fields
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters()
def get_script(
self,
*,
id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[t.Union[int, str]] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:param id: Script ID
:param master_timeout: Specify timeout for connection to master
"""
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/_scripts/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters()
def get_script_context(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns all script contexts.
`<https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-contexts.html>`_
"""
__path = "/_script_context"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters()
def get_script_languages(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns available script types, languages and contexts
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
"""
__path = "/_script_language"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def get_source(
self,
*,
index: str,
id: str,
preference: t.Optional[str] = None,
realtime: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns the source of a document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html>`_
:param index: Name of the index that contains the document.
:param id: Unique identifier of the document.
:param preference: Specifies the node or shard the operation should be performed
on. Random by default.
:param realtime: Boolean) If true, the request is real-time as opposed to near-real-time.
:param refresh: If true, Elasticsearch refreshes the affected shards to make
this operation visible to search. If false, do nothing with refreshes.
:param routing: Target the specified primary shard.
:param source: True or false to return the _source field or not, or a list of
fields to return.
:param source_excludes: A comma-separated list of source fields to exclude in
the response.
:param source_includes: A comma-separated list of source fields to include in
the response.
:param stored_fields:
:param version: Explicit version number for concurrency control. The specified
version must match the current version of the document for the request to
succeed.
:param version_type: Specific version type: internal, external, external_gte.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_source/{_quote(id)}"
__query: t.Dict[str, t.Any] = {}
if preference is not None:
__query["preference"] = preference
if realtime is not None:
__query["realtime"] = realtime
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stored_fields is not None:
__query["stored_fields"] = stored_fields
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_name="document",
)
def index(
self,
*,
index: str,
document: t.Mapping[str, t.Any],
id: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
if_primary_term: t.Optional[int] = None,
if_seq_no: t.Optional[int] = None,
op_type: t.Optional[t.Union["t.Literal['create', 'index']", str]] = None,
pipeline: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[
t.Union["t.Literal['false', 'true', 'wait_for']", bool, str]
] = None,
require_alias: t.Optional[bool] = None,
routing: t.Optional[str] = None,
timeout: t.Optional[t.Union[int, str]] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Creates or updates a document in an index.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html>`_
:param index: The name of the index
:param document:
:param id: Document ID
:param if_primary_term: only perform the index operation if the last operation
that has changed the document has the specified primary term
:param if_seq_no: only perform the index operation if the last operation that
has changed the document has the specified sequence number
:param op_type: Explicit operation type. Defaults to `index` for requests with
an explicit document ID, and to `create`for requests without an explicit
document ID
:param pipeline: The pipeline id to preprocess incoming documents with
:param refresh: If `true` then refresh the affected shards to make this operation
visible to search, if `wait_for` then wait for a refresh to make this operation
visible to search, if `false` (the default) then do nothing with refreshes.
:param require_alias: When true, requires destination to be an alias. Default
is false
:param routing: Specific routing value
:param timeout: Explicit operation timeout
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the index operation. Defaults to 1, meaning the primary
shard only. Set to `all` for all shard copies, otherwise set to any non-negative
value less than or equal to the total number of copies for the shard (number
of replicas + 1)
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if document is None:
raise ValueError("Empty value passed for parameter 'document'")
if index not in SKIP_IN_PATH and id not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_doc/{_quote(id)}"
__method = "PUT"
elif index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_doc"
__method = "POST"
else:
raise ValueError("Couldn't find a path for the given parameters")
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if if_primary_term is not None:
__query["if_primary_term"] = if_primary_term
if if_seq_no is not None:
__query["if_seq_no"] = if_seq_no
if op_type is not None:
__query["op_type"] = op_type
if pipeline is not None:
__query["pipeline"] = pipeline
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if require_alias is not None:
__query["require_alias"] = require_alias
if routing is not None:
__query["routing"] = routing
if timeout is not None:
__query["timeout"] = timeout
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
__body = document
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
__method, __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def info(
self,
*,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns basic information about the cluster.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/index.html>`_
"""
__path = "/"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"GET", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={"_source": "source"},
)
def knn_search(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
knn: t.Mapping[str, t.Any],
docvalue_fields: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
error_trace: t.Optional[bool] = None,
fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Performs a kNN search.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
to perform the operation on all indices
:param knn: kNN query to execute
:param docvalue_fields: The request returns doc values for field names matching
these patterns in the hits.fields property of the response. Accepts wildcard
(*) patterns.
:param fields: The request returns values for field names matching these patterns
in the hits.fields property of the response. Accepts wildcard (*) patterns.
:param routing: A comma-separated list of specific routing values
:param source: Indicates which source fields are returned for matching documents.
These fields are returned in the hits._source property of the search response.
:param stored_fields: List of stored fields to return as part of a hit. If no
fields are specified, no stored fields are included in the response. If this
field is specified, the _source parameter defaults to false. You can pass
_source: true to return both source fields and stored fields in the search
response.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if knn is None:
raise ValueError("Empty value passed for parameter 'knn'")
__path = f"/{_quote(index)}/_knn_search"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if knn is not None:
__body["knn"] = knn
if docvalue_fields is not None:
__body["docvalue_fields"] = docvalue_fields
if error_trace is not None:
__query["error_trace"] = error_trace
if fields is not None:
__body["fields"] = fields
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if routing is not None:
__query["routing"] = routing
if source is not None:
__body["_source"] = source
if stored_fields is not None:
__body["stored_fields"] = stored_fields
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def mget(
self,
*,
index: t.Optional[str] = None,
docs: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ids: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
routing: t.Optional[str] = None,
source: t.Optional[
t.Union[bool, t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]]
] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to get multiple documents in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-get.html>`_
:param index: Name of the index to retrieve documents from when `ids` are specified,
or when a document in the `docs` array does not specify an index.
:param docs: The documents you want to retrieve. Required if no index is specified
in the request URI.
:param ids: The IDs of the documents you want to retrieve. Allowed when the index
is specified in the request URI.
:param preference: Specifies the node or shard the operation should be performed
on. Random by default.
:param realtime: If `true`, the request is real-time as opposed to near-real-time.
:param refresh: If `true`, the request refreshes relevant shards before retrieving
documents.
:param routing: Custom value used to route operations to a specific shard.
:param source: True or false to return the `_source` field or not, or a list
of fields to return.
:param source_excludes: A comma-separated list of source fields to exclude from
the response. You can also use this parameter to exclude fields from the
subset specified in `_source_includes` query parameter.
:param source_includes: A comma-separated list of source fields to include in
the response. If this parameter is specified, only these source fields are
returned. You can exclude fields from this subset using the `_source_excludes`
query parameter. If the `_source` parameter is `false`, this parameter is
ignored.
:param stored_fields: If `true`, retrieves the document fields stored in the
index rather than the document `_source`.
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_mget"
else:
__path = "/_mget"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if docs is not None:
__body["docs"] = docs
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ids is not None:
__body["ids"] = ids
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if refresh is not None:
__query["refresh"] = refresh
if routing is not None:
__query["routing"] = routing
if source is not None:
__query["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stored_fields is not None:
__query["stored_fields"] = stored_fields
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_name="searches",
)
def msearch(
self,
*,
searches: t.Union[
t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]
],
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
allow_no_indices: t.Optional[bool] = None,
ccs_minimize_roundtrips: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ignore_throttled: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
max_concurrent_searches: t.Optional[int] = None,
max_concurrent_shard_requests: t.Optional[int] = None,
pre_filter_shard_size: t.Optional[int] = None,
pretty: t.Optional[bool] = None,
rest_total_hits_as_int: t.Optional[bool] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
typed_keys: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to execute several search operations in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-multi-search.html>`_
:param searches:
:param index: Comma-separated list of data streams, indices, and index aliases
to search.
:param allow_no_indices: If false, the request returns an error if any wildcard
expression, index alias, or _all value targets only missing or closed indices.
This behavior applies even if the request targets other open indices. For
example, a request targeting foo*,bar* returns an error if an index starts
with foo but no index starts with bar.
:param ccs_minimize_roundtrips: If true, network roundtrips between the coordinating
node and remote clusters are minimized for cross-cluster search requests.
:param expand_wildcards: Type of index that wildcard expressions can match. If
the request can target data streams, this argument determines whether wildcard
expressions match hidden data streams.
:param ignore_throttled: If true, concrete, expanded or aliased indices are ignored
when frozen.
:param ignore_unavailable: If true, missing or closed indices are not included
in the response.
:param max_concurrent_searches: Maximum number of concurrent searches the multi
search API can execute.
:param max_concurrent_shard_requests: Maximum number of concurrent shard requests
that each sub-search request executes per node.
:param pre_filter_shard_size: Defines a threshold that enforces a pre-filter
roundtrip to prefilter search shards based on query rewriting if the number
of shards the search request expands to exceeds the threshold. This filter
roundtrip can limit the number of shards significantly if for instance a
shard can not match any documents based on its rewrite method i.e., if date
filters are mandatory to match but the shard bounds and the query are disjoint.
:param rest_total_hits_as_int: If true, hits.total are returned as an integer
in the response. Defaults to false, which returns an object.
:param search_type: Indicates whether global term and document frequencies should
be used when scoring returned documents.
:param typed_keys: Specifies whether aggregation and suggester names should be
prefixed by their respective types in the response.
"""
if searches is None:
raise ValueError("Empty value passed for parameter 'searches'")
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_msearch"
else:
__path = "/_msearch"
__query: t.Dict[str, t.Any] = {}
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if ccs_minimize_roundtrips is not None:
__query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ignore_throttled is not None:
__query["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if max_concurrent_searches is not None:
__query["max_concurrent_searches"] = max_concurrent_searches
if max_concurrent_shard_requests is not None:
__query["max_concurrent_shard_requests"] = max_concurrent_shard_requests
if pre_filter_shard_size is not None:
__query["pre_filter_shard_size"] = pre_filter_shard_size
if pretty is not None:
__query["pretty"] = pretty
if rest_total_hits_as_int is not None:
__query["rest_total_hits_as_int"] = rest_total_hits_as_int
if search_type is not None:
__query["search_type"] = search_type
if typed_keys is not None:
__query["typed_keys"] = typed_keys
__body = searches
__headers = {
"accept": "application/json",
"content-type": "application/x-ndjson",
}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_name="search_templates",
)
def msearch_template(
self,
*,
search_templates: t.Union[
t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]
],
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
ccs_minimize_roundtrips: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
max_concurrent_searches: t.Optional[int] = None,
pretty: t.Optional[bool] = None,
rest_total_hits_as_int: t.Optional[bool] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
typed_keys: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to execute several search template operations in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-multi-search.html>`_
:param search_templates:
:param index: A comma-separated list of index names to use as default
:param ccs_minimize_roundtrips: Indicates whether network round-trips should
be minimized as part of cross-cluster search requests execution
:param max_concurrent_searches: Controls the maximum number of concurrent searches
the multi search api will execute
:param rest_total_hits_as_int: Indicates whether hits.total should be rendered
as an integer or an object in the rest search response
:param search_type: Search operation type
:param typed_keys: Specify whether aggregation and suggester names should be
prefixed by their respective types in the response
"""
if search_templates is None:
raise ValueError("Empty value passed for parameter 'search_templates'")
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_msearch/template"
else:
__path = "/_msearch/template"
__query: t.Dict[str, t.Any] = {}
if ccs_minimize_roundtrips is not None:
__query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if max_concurrent_searches is not None:
__query["max_concurrent_searches"] = max_concurrent_searches
if pretty is not None:
__query["pretty"] = pretty
if rest_total_hits_as_int is not None:
__query["rest_total_hits_as_int"] = rest_total_hits_as_int
if search_type is not None:
__query["search_type"] = search_type
if typed_keys is not None:
__query["typed_keys"] = typed_keys
__body = search_templates
__headers = {
"accept": "application/json",
"content-type": "application/x-ndjson",
}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def mtermvectors(
self,
*,
index: t.Optional[str] = None,
docs: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
error_trace: t.Optional[bool] = None,
field_statistics: t.Optional[bool] = None,
fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ids: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
offsets: t.Optional[bool] = None,
payloads: t.Optional[bool] = None,
positions: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
routing: t.Optional[str] = None,
term_statistics: t.Optional[bool] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns multiple termvectors in one request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-multi-termvectors.html>`_
:param index: The index in which the document resides.
:param docs:
:param field_statistics: Specifies if document count, sum of document frequencies
and sum of total term frequencies should be returned. Applies to all returned
documents unless otherwise specified in body "params" or "docs".
:param fields: A comma-separated list of fields to return. Applies to all returned
documents unless otherwise specified in body "params" or "docs".
:param ids:
:param offsets: Specifies if term offsets should be returned. Applies to all
returned documents unless otherwise specified in body "params" or "docs".
:param payloads: Specifies if term payloads should be returned. Applies to all
returned documents unless otherwise specified in body "params" or "docs".
:param positions: Specifies if term positions should be returned. Applies to
all returned documents unless otherwise specified in body "params" or "docs".
:param preference: Specify the node or shard the operation should be performed
on (default: random) .Applies to all returned documents unless otherwise
specified in body "params" or "docs".
:param realtime: Specifies if requests are real-time as opposed to near-real-time
(default: true).
:param routing: Specific routing value. Applies to all returned documents unless
otherwise specified in body "params" or "docs".
:param term_statistics: Specifies if total term frequency and document frequency
should be returned. Applies to all returned documents unless otherwise specified
in body "params" or "docs".
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_mtermvectors"
else:
__path = "/_mtermvectors"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if docs is not None:
__body["docs"] = docs
if error_trace is not None:
__query["error_trace"] = error_trace
if field_statistics is not None:
__query["field_statistics"] = field_statistics
if fields is not None:
__query["fields"] = fields
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ids is not None:
__body["ids"] = ids
if offsets is not None:
__query["offsets"] = offsets
if payloads is not None:
__query["payloads"] = payloads
if positions is not None:
__query["positions"] = positions
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if routing is not None:
__query["routing"] = routing
if term_statistics is not None:
__query["term_statistics"] = term_statistics
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def open_point_in_time(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
keep_alive: t.Union[int, str],
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Open a point in time that can be used in subsequent searches
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/point-in-time-api.html>`_
:param index: A comma-separated list of index names to open point in time; use
`_all` or empty string to perform the operation on all indices
:param keep_alive: Specific the time to live for the point in time
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if keep_alive is None:
raise ValueError("Empty value passed for parameter 'keep_alive'")
__path = f"/{_quote(index)}/_pit"
__query: t.Dict[str, t.Any] = {}
if keep_alive is not None:
__query["keep_alive"] = keep_alive
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
)
def put_script(
self,
*,
id: str,
script: t.Mapping[str, t.Any],
context: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
master_timeout: t.Optional[t.Union[int, str]] = None,
pretty: t.Optional[bool] = None,
timeout: t.Optional[t.Union[int, str]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Creates or updates a script.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html>`_
:param id: Script ID
:param script:
:param context: Script context
:param master_timeout: Specify timeout for connection to master
:param timeout: Explicit operation timeout
"""
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
if script is None:
raise ValueError("Empty value passed for parameter 'script'")
if id not in SKIP_IN_PATH and context not in SKIP_IN_PATH:
__path = f"/_scripts/{_quote(id)}/{_quote(context)}"
elif id not in SKIP_IN_PATH:
__path = f"/_scripts/{_quote(id)}"
else:
raise ValueError("Couldn't find a path for the given parameters")
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if script is not None:
__body["script"] = script
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if master_timeout is not None:
__query["master_timeout"] = master_timeout
if pretty is not None:
__query["pretty"] = pretty
if timeout is not None:
__query["timeout"] = timeout
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"PUT", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def rank_eval(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
requests: t.Union[
t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]
],
allow_no_indices: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
metric: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
search_type: t.Optional[str] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to evaluate the quality of ranked search results over a set of typical
search queries
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html>`_
:param index: Comma-separated list of data streams, indices, and index aliases
used to limit the request. Wildcard (`*`) expressions are supported. To target
all data streams and indices in a cluster, omit this parameter or use `_all`
or `*`.
:param requests: A set of typical search requests, together with their provided
ratings.
:param allow_no_indices: If `false`, the request returns an error if any wildcard
expression, index alias, or `_all` value targets only missing or closed indices.
This behavior applies even if the request targets other open indices. For
example, a request targeting `foo*,bar*` returns an error if an index starts
with `foo` but no index starts with `bar`.
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param ignore_unavailable: If `true`, missing or closed indices are not included
in the response.
:param metric: Definition of the evaluation metric to calculate.
:param search_type: Search operation type
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if requests is None:
raise ValueError("Empty value passed for parameter 'requests'")
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_rank_eval"
else:
__path = "/_rank_eval"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if requests is not None:
__body["requests"] = requests
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if metric is not None:
__body["metric"] = metric
if pretty is not None:
__query["pretty"] = pretty
if search_type is not None:
__query["search_type"] = search_type
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def reindex(
self,
*,
dest: t.Mapping[str, t.Any],
source: t.Mapping[str, t.Any],
conflicts: t.Optional[t.Union["t.Literal['abort', 'proceed']", str]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
max_docs: t.Optional[int] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
require_alias: t.Optional[bool] = None,
script: t.Optional[t.Mapping[str, t.Any]] = None,
scroll: t.Optional[t.Union[int, str]] = None,
size: t.Optional[int] = None,
slices: t.Optional[int] = None,
timeout: t.Optional[t.Union[int, str]] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
wait_for_completion: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to copy documents from one index to another, optionally filtering the
source documents by a query, changing the destination index settings, or fetching
the documents from a remote cluster.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html>`_
:param dest:
:param source:
:param conflicts:
:param max_docs:
:param refresh: Should the affected indexes be refreshed?
:param requests_per_second: The throttle to set on this request in sub-requests
per second. -1 means no throttle.
:param require_alias:
:param script:
:param scroll: Control how long to keep the search context alive
:param size:
:param slices: The number of slices this task should be divided into. Defaults
to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`.
:param timeout: Time each individual bulk request should wait for shards that
are unavailable.
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the reindex operation. Defaults to 1, meaning the
primary shard only. Set to `all` for all shard copies, otherwise set to any
non-negative value less than or equal to the total number of copies for the
shard (number of replicas + 1)
:param wait_for_completion: Should the request should block until the reindex
is complete.
"""
if dest is None:
raise ValueError("Empty value passed for parameter 'dest'")
if source is None:
raise ValueError("Empty value passed for parameter 'source'")
__path = "/_reindex"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if dest is not None:
__body["dest"] = dest
if source is not None:
__body["source"] = source
if conflicts is not None:
__body["conflicts"] = conflicts
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if max_docs is not None:
__body["max_docs"] = max_docs
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
if require_alias is not None:
__query["require_alias"] = require_alias
if script is not None:
__body["script"] = script
if scroll is not None:
__query["scroll"] = scroll
if size is not None:
__body["size"] = size
if slices is not None:
__query["slices"] = slices
if timeout is not None:
__query["timeout"] = timeout
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
if wait_for_completion is not None:
__query["wait_for_completion"] = wait_for_completion
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def reindex_rethrottle(
self,
*,
task_id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
) -> ObjectApiResponse[t.Any]:
"""
Changes the number of requests per second for a particular Reindex operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html>`_
:param task_id: The task id to rethrottle
:param requests_per_second: The throttle to set on this request in floating sub-requests
per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'task_id'")
__path = f"/_reindex/{_quote(task_id)}/_rethrottle"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
ignore_deprecated_options={"params"},
)
def render_search_template(
self,
*,
id: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
file: t.Optional[str] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
params: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
source: t.Optional[str] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to use the Mustache language to pre-render a search definition.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/render-search-template-api.html>`_
:param id: The id of the stored search template
:param file:
:param params:
:param source:
"""
if id not in SKIP_IN_PATH:
__path = f"/_render/template/{_quote(id)}"
else:
__path = "/_render/template"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if file is not None:
__body["file"] = file
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if params is not None:
__body["params"] = params
if pretty is not None:
__query["pretty"] = pretty
if source is not None:
__body["source"] = source
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def scripts_painless_execute(
self,
*,
context: t.Optional[str] = None,
context_setup: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
script: t.Optional[t.Mapping[str, t.Any]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows an arbitrary script to be executed and a result to be returned
`<https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html>`_
:param context:
:param context_setup:
:param script:
"""
__path = "/_scripts/painless/_execute"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if context is not None:
__body["context"] = context
if context_setup is not None:
__body["context_setup"] = context_setup
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if script is not None:
__body["script"] = script
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def scroll(
self,
*,
scroll_id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
rest_total_hits_as_int: t.Optional[bool] = None,
scroll: t.Optional[t.Union[int, str]] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to retrieve a large numbers of results from a single search request.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-request-body.html#request-body-search-scroll>`_
:param scroll_id: Scroll ID of the search.
:param rest_total_hits_as_int: If true, the API response’s hit.total property
is returned as an integer. If false, the API response’s hit.total property
is returned as an object.
:param scroll: Period to retain the search context for scrolling.
"""
if scroll_id is None:
raise ValueError("Empty value passed for parameter 'scroll_id'")
__path = "/_search/scroll"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if scroll_id is not None:
__body["scroll_id"] = scroll_id
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if rest_total_hits_as_int is not None:
__query["rest_total_hits_as_int"] = rest_total_hits_as_int
if scroll is not None:
__body["scroll"] = scroll
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
"from": "from_",
},
)
def search(
self,
*,
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
aggregations: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None,
aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None,
allow_no_indices: t.Optional[bool] = None,
allow_partial_search_results: t.Optional[bool] = None,
analyze_wildcard: t.Optional[bool] = None,
analyzer: t.Optional[str] = None,
batched_reduce_size: t.Optional[int] = None,
ccs_minimize_roundtrips: t.Optional[bool] = None,
collapse: t.Optional[t.Mapping[str, t.Any]] = None,
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
df: t.Optional[str] = None,
docvalue_fields: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
explain: t.Optional[bool] = None,
fields: t.Optional[
t.Union[t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
from_: t.Optional[int] = None,
highlight: t.Optional[t.Mapping[str, t.Any]] = None,
human: t.Optional[bool] = None,
ignore_throttled: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
indices_boost: t.Optional[
t.Union[t.List[t.Mapping[str, float]], t.Tuple[t.Mapping[str, float], ...]]
] = None,
lenient: t.Optional[bool] = None,
max_concurrent_shard_requests: t.Optional[int] = None,
min_compatible_shard_node: t.Optional[str] = None,
min_score: t.Optional[float] = None,
pit: t.Optional[t.Mapping[str, t.Any]] = None,
post_filter: t.Optional[t.Mapping[str, t.Any]] = None,
pre_filter_shard_size: t.Optional[int] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
profile: t.Optional[bool] = None,
q: t.Optional[str] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
request_cache: t.Optional[bool] = None,
rescore: t.Optional[
t.Union[
t.Mapping[str, t.Any],
t.Union[
t.List[t.Mapping[str, t.Any]], t.Tuple[t.Mapping[str, t.Any], ...]
],
]
] = None,
rest_total_hits_as_int: t.Optional[bool] = None,
routing: t.Optional[str] = None,
runtime_mappings: t.Optional[
t.Mapping[
str,
t.Union[
t.Mapping[str, t.Any],
t.Union[
t.List[t.Mapping[str, t.Any]],
t.Tuple[t.Mapping[str, t.Any], ...],
],
],
]
] = None,
script_fields: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None,
scroll: t.Optional[t.Union[int, str]] = None,
search_after: t.Optional[
t.Union[
t.List[t.Union[None, float, int, str]],
t.Tuple[t.Union[None, float, int, str], ...],
]
] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
seq_no_primary_term: t.Optional[bool] = None,
size: t.Optional[int] = None,
slice: t.Optional[t.Mapping[str, t.Any]] = None,
sort: t.Optional[
t.Union[
t.Union[str, t.Mapping[str, t.Any]],
t.Union[
t.List[t.Union[str, t.Mapping[str, t.Any]]],
t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...],
],
]
] = None,
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
stored_fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
suggest: t.Optional[t.Mapping[str, t.Any]] = None,
suggest_field: t.Optional[str] = None,
suggest_mode: t.Optional[
t.Union["t.Literal['always', 'missing', 'popular']", str]
] = None,
suggest_size: t.Optional[int] = None,
suggest_text: t.Optional[str] = None,
terminate_after: t.Optional[int] = None,
timeout: t.Optional[str] = None,
track_scores: t.Optional[bool] = None,
track_total_hits: t.Optional[t.Union[bool, int]] = None,
typed_keys: t.Optional[bool] = None,
version: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns results matching a query.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-search.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
empty string to perform the operation on all indices
:param aggregations:
:param aggs:
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param allow_partial_search_results: Indicate if an error should be returned
if there is a partial search failure or timeout
:param analyze_wildcard: Specify whether wildcard and prefix queries should be
analyzed (default: false)
:param analyzer: The analyzer to use for the query string
:param batched_reduce_size: The number of shard results that should be reduced
at once on the coordinating node. This value should be used as a protection
mechanism to reduce the memory overhead per search request if the potential
number of shards in the request can be large.
:param ccs_minimize_roundtrips: Indicates whether network round-trips should
be minimized as part of cross-cluster search requests execution
:param collapse:
:param default_operator: The default operator for query string query (AND or
OR)
:param df: The field to use as default where no field prefix is given in the
query string
:param docvalue_fields: Array of wildcard (*) patterns. The request returns doc
values for field names matching these patterns in the hits.fields property
of the response.
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param explain: If true, returns detailed information about score computation
as part of a hit.
:param fields: Array of wildcard (*) patterns. The request returns values for
field names matching these patterns in the hits.fields property of the response.
:param from_: Starting document offset. By default, you cannot page through more
than 10,000 hits using the from and size parameters. To page through more
hits, use the search_after parameter.
:param highlight:
:param ignore_throttled: Whether specified concrete, expanded or aliased indices
should be ignored when throttled
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param indices_boost: Boosts the _score of documents from specified indices.
:param lenient: Specify whether format-based query failures (such as providing
text to a numeric field) should be ignored
:param max_concurrent_shard_requests: The number of concurrent shard requests
per node this search executes concurrently. This value should be used to
limit the impact of the search on the cluster in order to limit the number
of concurrent shard requests
:param min_compatible_shard_node: The minimum compatible version that all shards
involved in search should have for this request to be successful
:param min_score: Minimum _score for matching documents. Documents with a lower
_score are not included in the search results.
:param pit: Limits the search to a point in time (PIT). If you provide a PIT,
you cannot specify an <index> in the request path.
:param post_filter:
:param pre_filter_shard_size: A threshold that enforces a pre-filter roundtrip
to prefilter search shards based on query rewriting if the number of shards
the search request expands to exceeds the threshold. This filter roundtrip
can limit the number of shards significantly if for instance a shard can
not match any documents based on its rewrite method ie. if date filters are
mandatory to match but the shard bounds and the query are disjoint.
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param profile:
:param q: Query in the Lucene query string syntax
:param query: Defines the search definition using the Query DSL.
:param request_cache: Specify if request cache should be used for this request
or not, defaults to index level setting
:param rescore:
:param rest_total_hits_as_int: Indicates whether hits.total should be rendered
as an integer or an object in the rest search response
:param routing: A comma-separated list of specific routing values
:param runtime_mappings: Defines one or more runtime fields in the search request.
These fields take precedence over mapped fields with the same name.
:param script_fields: Retrieve a script evaluation (based on different fields)
for each hit.
:param scroll: Specify how long a consistent view of the index should be maintained
for scrolled search
:param search_after:
:param search_type: Search operation type
:param seq_no_primary_term: If true, returns sequence number and primary term
of the last modification of each hit. See Optimistic concurrency control.
:param size: The number of hits to return. By default, you cannot page through
more than 10,000 hits using the from and size parameters. To page through
more hits, use the search_after parameter.
:param slice:
:param sort:
:param source: Indicates which source fields are returned for matching documents.
These fields are returned in the hits._source property of the search response.
:param source_excludes: A list of fields to exclude from the returned _source
field
:param source_includes: A list of fields to extract and return from the _source
field
:param stats: Stats groups to associate with the search. Each group maintains
a statistics aggregation for its associated searches. You can retrieve these
stats using the indices stats API.
:param stored_fields: List of stored fields to return as part of a hit. If no
fields are specified, no stored fields are included in the response. If this
field is specified, the _source parameter defaults to false. You can pass
_source: true to return both source fields and stored fields in the search
response.
:param suggest:
:param suggest_field: Specifies which field to use for suggestions.
:param suggest_mode: Specify suggest mode
:param suggest_size: How many suggestions to return in response
:param suggest_text: The source text for which the suggestions should be returned.
:param terminate_after: Maximum number of documents to collect for each shard.
If a query reaches this limit, Elasticsearch terminates the query early.
Elasticsearch collects documents before sorting. Defaults to 0, which does
not terminate query execution early.
:param timeout: Specifies the period of time to wait for a response from each
shard. If no response is received before the timeout expires, the request
fails and returns an error. Defaults to no timeout.
:param track_scores: If true, calculate and return document scores, even if the
scores are not used for sorting.
:param track_total_hits: Number of hits matching the query to count accurately.
If true, the exact number of hits is returned at the cost of some performance.
If false, the response does not include the total number of hits matching
the query. Defaults to 10,000 hits.
:param typed_keys: Specify whether aggregation and suggester names should be
prefixed by their respective types in the response
:param version: If true, returns document version as part of a hit.
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_search"
else:
__path = "/_search"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
# The 'sort' parameter with a colon can't be encoded to the body.
if sort is not None and (
(isinstance(sort, str) and ":" in sort)
or (
isinstance(sort, (list, tuple))
and all(isinstance(_x, str) for _x in sort)
and any(":" in _x for _x in sort)
)
):
__query["sort"] = sort
sort = None
if aggregations is not None:
__body["aggregations"] = aggregations
if aggs is not None:
__body["aggs"] = aggs
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if allow_partial_search_results is not None:
__query["allow_partial_search_results"] = allow_partial_search_results
if analyze_wildcard is not None:
__query["analyze_wildcard"] = analyze_wildcard
if analyzer is not None:
__query["analyzer"] = analyzer
if batched_reduce_size is not None:
__query["batched_reduce_size"] = batched_reduce_size
if ccs_minimize_roundtrips is not None:
__query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips
if collapse is not None:
__body["collapse"] = collapse
if default_operator is not None:
__query["default_operator"] = default_operator
if df is not None:
__query["df"] = df
if docvalue_fields is not None:
__body["docvalue_fields"] = docvalue_fields
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if explain is not None:
__body["explain"] = explain
if fields is not None:
__body["fields"] = fields
if filter_path is not None:
__query["filter_path"] = filter_path
if from_ is not None:
__body["from"] = from_
if highlight is not None:
__body["highlight"] = highlight
if human is not None:
__query["human"] = human
if ignore_throttled is not None:
__query["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if indices_boost is not None:
__body["indices_boost"] = indices_boost
if lenient is not None:
__query["lenient"] = lenient
if max_concurrent_shard_requests is not None:
__query["max_concurrent_shard_requests"] = max_concurrent_shard_requests
if min_compatible_shard_node is not None:
__query["min_compatible_shard_node"] = min_compatible_shard_node
if min_score is not None:
__body["min_score"] = min_score
if pit is not None:
__body["pit"] = pit
if post_filter is not None:
__body["post_filter"] = post_filter
if pre_filter_shard_size is not None:
__query["pre_filter_shard_size"] = pre_filter_shard_size
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if profile is not None:
__body["profile"] = profile
if q is not None:
__query["q"] = q
if query is not None:
__body["query"] = query
if request_cache is not None:
__query["request_cache"] = request_cache
if rescore is not None:
__body["rescore"] = rescore
if rest_total_hits_as_int is not None:
__query["rest_total_hits_as_int"] = rest_total_hits_as_int
if routing is not None:
__query["routing"] = routing
if runtime_mappings is not None:
__body["runtime_mappings"] = runtime_mappings
if script_fields is not None:
__body["script_fields"] = script_fields
if scroll is not None:
__query["scroll"] = scroll
if search_after is not None:
__body["search_after"] = search_after
if search_type is not None:
__query["search_type"] = search_type
if seq_no_primary_term is not None:
__body["seq_no_primary_term"] = seq_no_primary_term
if size is not None:
__body["size"] = size
if slice is not None:
__body["slice"] = slice
if sort is not None:
__body["sort"] = sort
if source is not None:
__body["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if stats is not None:
__body["stats"] = stats
if stored_fields is not None:
__body["stored_fields"] = stored_fields
if suggest is not None:
__body["suggest"] = suggest
if suggest_field is not None:
__query["suggest_field"] = suggest_field
if suggest_mode is not None:
__query["suggest_mode"] = suggest_mode
if suggest_size is not None:
__query["suggest_size"] = suggest_size
if suggest_text is not None:
__query["suggest_text"] = suggest_text
if terminate_after is not None:
__body["terminate_after"] = terminate_after
if timeout is not None:
__body["timeout"] = timeout
if track_scores is not None:
__body["track_scores"] = track_scores
if track_total_hits is not None:
__body["track_total_hits"] = track_total_hits
if typed_keys is not None:
__query["typed_keys"] = typed_keys
if version is not None:
__body["version"] = version
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def search_mvt(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
field: str,
zoom: int,
x: int,
y: int,
aggs: t.Optional[t.Mapping[str, t.Mapping[str, t.Any]]] = None,
error_trace: t.Optional[bool] = None,
exact_bounds: t.Optional[bool] = None,
extent: t.Optional[int] = None,
fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
grid_precision: t.Optional[int] = None,
grid_type: t.Optional[
t.Union["t.Literal['centroid', 'grid', 'point']", str]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
runtime_mappings: t.Optional[
t.Mapping[
str,
t.Union[
t.Mapping[str, t.Any],
t.Union[
t.List[t.Mapping[str, t.Any]],
t.Tuple[t.Mapping[str, t.Any], ...],
],
],
]
] = None,
size: t.Optional[int] = None,
sort: t.Optional[
t.Union[
t.Union[str, t.Mapping[str, t.Any]],
t.Union[
t.List[t.Union[str, t.Mapping[str, t.Any]]],
t.Tuple[t.Union[str, t.Mapping[str, t.Any]], ...],
],
]
] = None,
track_total_hits: t.Optional[t.Union[bool, int]] = None,
) -> BinaryApiResponse:
"""
Searches a vector tile for geospatial values. Returns results as a binary Mapbox
vector tile.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-vector-tile-api.html>`_
:param index: Comma-separated list of data streams, indices, or aliases to search
:param field: Field containing geospatial data to return
:param zoom: Zoom level for the vector tile to search
:param x: X coordinate for the vector tile to search
:param y: Y coordinate for the vector tile to search
:param aggs: Sub-aggregations for the geotile_grid. Supports the following aggregation
types: - avg - cardinality - max - min - sum
:param exact_bounds: If false, the meta layer’s feature is the bounding box of
the tile. If true, the meta layer’s feature is a bounding box resulting from
a geo_bounds aggregation. The aggregation runs on <field> values that intersect
the <zoom>/<x>/<y> tile with wrap_longitude set to false. The resulting bounding
box may be larger than the vector tile.
:param extent: Size, in pixels, of a side of the tile. Vector tiles are square
with equal sides.
:param fields: Fields to return in the `hits` layer. Supports wildcards (`*`).
This parameter does not support fields with array values. Fields with array
values may return inconsistent results.
:param grid_precision: Additional zoom levels available through the aggs layer.
For example, if <zoom> is 7 and grid_precision is 8, you can zoom in up to
level 15. Accepts 0-8. If 0, results don’t include the aggs layer.
:param grid_type: Determines the geometry type for features in the aggs layer.
In the aggs layer, each feature represents a geotile_grid cell. If 'grid'
each feature is a Polygon of the cells bounding box. If 'point' each feature
is a Point that is the centroid of the cell.
:param query: Query DSL used to filter documents for the search.
:param runtime_mappings: Defines one or more runtime fields in the search request.
These fields take precedence over mapped fields with the same name.
:param size: Maximum number of features to return in the hits layer. Accepts
0-10000. If 0, results don’t include the hits layer.
:param sort: Sorts features in the hits layer. By default, the API calculates
a bounding box for each feature. It sorts features based on this box’s diagonal
length, from longest to shortest.
:param track_total_hits: Number of hits matching the query to count accurately.
If `true`, the exact number of hits is returned at the cost of some performance.
If `false`, the response does not include the total number of hits matching
the query.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if field in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'field'")
if zoom in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'zoom'")
if x in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'x'")
if y in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'y'")
__path = f"/{_quote(index)}/_mvt/{_quote(field)}/{_quote(zoom)}/{_quote(x)}/{_quote(y)}"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
# The 'sort' parameter with a colon can't be encoded to the body.
if sort is not None and (
(isinstance(sort, str) and ":" in sort)
or (
isinstance(sort, (list, tuple))
and all(isinstance(_x, str) for _x in sort)
and any(":" in _x for _x in sort)
)
):
__query["sort"] = sort
sort = None
if aggs is not None:
__body["aggs"] = aggs
if error_trace is not None:
__query["error_trace"] = error_trace
if exact_bounds is not None:
__body["exact_bounds"] = exact_bounds
if extent is not None:
__body["extent"] = extent
if fields is not None:
__body["fields"] = fields
if filter_path is not None:
__query["filter_path"] = filter_path
if grid_precision is not None:
__body["grid_precision"] = grid_precision
if grid_type is not None:
__body["grid_type"] = grid_type
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if query is not None:
__body["query"] = query
if runtime_mappings is not None:
__body["runtime_mappings"] = runtime_mappings
if size is not None:
__body["size"] = size
if sort is not None:
__body["sort"] = sort
if track_total_hits is not None:
__body["track_total_hits"] = track_total_hits
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/vnd.mapbox-vector-tile"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def search_shards(
self,
*,
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
allow_no_indices: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
local: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
routing: t.Optional[str] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns information about the indices and shards that a search request would
be executed against.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-shards.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
empty string to perform the operation on all indices
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param local: Return local information, do not retrieve the state from master
node (default: false)
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param routing: Specific routing value
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_search_shards"
else:
__path = "/_search_shards"
__query: t.Dict[str, t.Any] = {}
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if local is not None:
__query["local"] = local
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if routing is not None:
__query["routing"] = routing
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers
)
@_rewrite_parameters(
body_fields=True,
ignore_deprecated_options={"params"},
)
def search_template(
self,
*,
index: t.Optional[t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]] = None,
allow_no_indices: t.Optional[bool] = None,
ccs_minimize_roundtrips: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
explain: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
id: t.Optional[str] = None,
ignore_throttled: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
params: t.Optional[t.Mapping[str, t.Any]] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
profile: t.Optional[bool] = None,
rest_total_hits_as_int: t.Optional[bool] = None,
routing: t.Optional[str] = None,
scroll: t.Optional[t.Union[int, str]] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
source: t.Optional[str] = None,
typed_keys: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Allows to use the Mustache language to pre-render a search definition.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-template.html>`_
:param index: Comma-separated list of data streams, indices, and aliases to search.
Supports wildcards (*).
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param ccs_minimize_roundtrips: Indicates whether network round-trips should
be minimized as part of cross-cluster search requests execution
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param explain:
:param id: ID of the search template to use. If no source is specified, this
parameter is required.
:param ignore_throttled: Whether specified concrete, expanded or aliased indices
should be ignored when throttled
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param params:
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param profile:
:param rest_total_hits_as_int: If true, hits.total are rendered as an integer
in the response.
:param routing: Custom value used to route operations to a specific shard.
:param scroll: Specifies how long a consistent view of the index should be maintained
for scrolled search.
:param search_type: The type of the search operation.
:param source: An inline search template. Supports the same parameters as the
search API's request body. Also supports Mustache variables. If no id is
specified, this parameter is required.
:param typed_keys: Specify whether aggregation and suggester names should be
prefixed by their respective types in the response
"""
if index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_search/template"
else:
__path = "/_search/template"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if ccs_minimize_roundtrips is not None:
__query["ccs_minimize_roundtrips"] = ccs_minimize_roundtrips
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if explain is not None:
__body["explain"] = explain
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if id is not None:
__body["id"] = id
if ignore_throttled is not None:
__query["ignore_throttled"] = ignore_throttled
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if params is not None:
__body["params"] = params
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if profile is not None:
__body["profile"] = profile
if rest_total_hits_as_int is not None:
__query["rest_total_hits_as_int"] = rest_total_hits_as_int
if routing is not None:
__query["routing"] = routing
if scroll is not None:
__query["scroll"] = scroll
if search_type is not None:
__query["search_type"] = search_type
if source is not None:
__body["source"] = source
if typed_keys is not None:
__query["typed_keys"] = typed_keys
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def terms_enum(
self,
*,
index: str,
field: str,
case_insensitive: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
index_filter: t.Optional[t.Mapping[str, t.Any]] = None,
pretty: t.Optional[bool] = None,
search_after: t.Optional[str] = None,
size: t.Optional[int] = None,
string: t.Optional[str] = None,
timeout: t.Optional[t.Union[int, str]] = None,
) -> ObjectApiResponse[t.Any]:
"""
The terms enum API can be used to discover terms in the index that begin with
the provided string. It is designed for low-latency look-ups used in auto-complete
scenarios.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/search-terms-enum.html>`_
:param index: Comma-separated list of data streams, indices, and index aliases
to search. Wildcard (*) expressions are supported.
:param field: The string to match at the start of indexed terms. If not provided,
all terms in the field are considered.
:param case_insensitive: When true the provided search string is matched against
index terms without case sensitivity.
:param index_filter: Allows to filter an index shard if the provided query rewrites
to match_none.
:param search_after:
:param size: How many matching terms to return.
:param string: The string after which terms in the index should be returned.
Allows for a form of pagination if the last result from one request is passed
as the search_after parameter for a subsequent request.
:param timeout: The maximum length of time to spend collecting results. Defaults
to "1s" (one second). If the timeout is exceeded the complete flag set to
false in the response and the results may be partial or empty.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if field is None:
raise ValueError("Empty value passed for parameter 'field'")
__path = f"/{_quote(index)}/_terms_enum"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if field is not None:
__body["field"] = field
if case_insensitive is not None:
__body["case_insensitive"] = case_insensitive
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if index_filter is not None:
__body["index_filter"] = index_filter
if pretty is not None:
__query["pretty"] = pretty
if search_after is not None:
__body["search_after"] = search_after
if size is not None:
__body["size"] = size
if string is not None:
__body["string"] = string
if timeout is not None:
__body["timeout"] = timeout
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
)
def termvectors(
self,
*,
index: str,
id: t.Optional[str] = None,
doc: t.Optional[t.Mapping[str, t.Any]] = None,
error_trace: t.Optional[bool] = None,
field_statistics: t.Optional[bool] = None,
fields: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
filter: t.Optional[t.Mapping[str, t.Any]] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
offsets: t.Optional[bool] = None,
payloads: t.Optional[bool] = None,
per_field_analyzer: t.Optional[t.Mapping[str, str]] = None,
positions: t.Optional[bool] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
realtime: t.Optional[bool] = None,
routing: t.Optional[str] = None,
term_statistics: t.Optional[bool] = None,
version: t.Optional[int] = None,
version_type: t.Optional[
t.Union["t.Literal['external', 'external_gte', 'force', 'internal']", str]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Returns information and statistics about terms in the fields of a particular
document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-termvectors.html>`_
:param index: The index in which the document resides.
:param id: The id of the document, when not specified a doc param should be supplied.
:param doc:
:param field_statistics: Specifies if document count, sum of document frequencies
and sum of total term frequencies should be returned.
:param fields: A comma-separated list of fields to return.
:param filter:
:param offsets: Specifies if term offsets should be returned.
:param payloads: Specifies if term payloads should be returned.
:param per_field_analyzer:
:param positions: Specifies if term positions should be returned.
:param preference: Specify the node or shard the operation should be performed
on (default: random).
:param realtime: Specifies if request is real-time as opposed to near-real-time
(default: true).
:param routing: Specific routing value.
:param term_statistics: Specifies if total term frequency and document frequency
should be returned.
:param version: Explicit version number for concurrency control
:param version_type: Specific version type
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if index not in SKIP_IN_PATH and id not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_termvectors/{_quote(id)}"
elif index not in SKIP_IN_PATH:
__path = f"/{_quote(index)}/_termvectors"
else:
raise ValueError("Couldn't find a path for the given parameters")
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if doc is not None:
__body["doc"] = doc
if error_trace is not None:
__query["error_trace"] = error_trace
if field_statistics is not None:
__query["field_statistics"] = field_statistics
if fields is not None:
__query["fields"] = fields
if filter is not None:
__body["filter"] = filter
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if offsets is not None:
__query["offsets"] = offsets
if payloads is not None:
__query["payloads"] = payloads
if per_field_analyzer is not None:
__body["per_field_analyzer"] = per_field_analyzer
if positions is not None:
__query["positions"] = positions
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if realtime is not None:
__query["realtime"] = realtime
if routing is not None:
__query["routing"] = routing
if term_statistics is not None:
__query["term_statistics"] = term_statistics
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={
"_source": "source",
"_source_excludes": "source_excludes",
"_source_includes": "source_includes",
},
)
def update(
self,
*,
index: str,
id: str,
detect_noop: t.Optional[bool] = None,
doc: t.Optional[t.Mapping[str, t.Any]] = None,
doc_as_upsert: t.Optional[bool] = None,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
if_primary_term: t.Optional[int] = None,
if_seq_no: t.Optional[int] = None,
lang: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
refresh: t.Optional[
t.Union["t.Literal['false', 'true', 'wait_for']", bool, str]
] = None,
require_alias: t.Optional[bool] = None,
retry_on_conflict: t.Optional[int] = None,
routing: t.Optional[str] = None,
script: t.Optional[t.Mapping[str, t.Any]] = None,
scripted_upsert: t.Optional[bool] = None,
source: t.Optional[t.Union[bool, t.Mapping[str, t.Any]]] = None,
source_excludes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
source_includes: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
timeout: t.Optional[t.Union[int, str]] = None,
upsert: t.Optional[t.Mapping[str, t.Any]] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
) -> ObjectApiResponse[t.Any]:
"""
Updates a document with a script or partial document.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update.html>`_
:param index: The name of the index
:param id: Document ID
:param detect_noop: Set to false to disable setting 'result' in the response
to 'noop' if no change to the document occurred.
:param doc: A partial update to an existing document.
:param doc_as_upsert: Set to true to use the contents of 'doc' as the value of
'upsert'
:param if_primary_term: Only perform the operation if the document has this primary
term.
:param if_seq_no: Only perform the operation if the document has this sequence
number.
:param lang: The script language.
:param refresh: If 'true', Elasticsearch refreshes the affected shards to make
this operation visible to search, if 'wait_for' then wait for a refresh to
make this operation visible to search, if 'false' do nothing with refreshes.
:param require_alias: If true, the destination must be an index alias.
:param retry_on_conflict: Specify how many times should the operation be retried
when a conflict occurs.
:param routing: Custom value used to route operations to a specific shard.
:param script: Script to execute to update the document.
:param scripted_upsert: Set to true to execute the script whether or not the
document exists.
:param source: Set to false to disable source retrieval. You can also specify
a comma-separated list of the fields you want to retrieve.
:param source_excludes: Specify the source fields you want to exclude.
:param source_includes: Specify the source fields you want to retrieve.
:param timeout: Period to wait for dynamic mapping updates and active shards.
This guarantees Elasticsearch waits for at least the timeout before failing.
The actual wait time could be longer, particularly when multiple waits occur.
:param upsert: If the document does not already exist, the contents of 'upsert'
are inserted as a new document. If the document exists, the 'script' is executed.
:param wait_for_active_shards: The number of shard copies that must be active
before proceeding with the operations. Set to 'all' or any positive integer
up to the total number of shards in the index (number_of_replicas+1). Defaults
to 1 meaning the primary shard.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
if id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'id'")
__path = f"/{_quote(index)}/_update/{_quote(id)}"
__body: t.Dict[str, t.Any] = {}
__query: t.Dict[str, t.Any] = {}
if detect_noop is not None:
__body["detect_noop"] = detect_noop
if doc is not None:
__body["doc"] = doc
if doc_as_upsert is not None:
__body["doc_as_upsert"] = doc_as_upsert
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if if_primary_term is not None:
__query["if_primary_term"] = if_primary_term
if if_seq_no is not None:
__query["if_seq_no"] = if_seq_no
if lang is not None:
__query["lang"] = lang
if pretty is not None:
__query["pretty"] = pretty
if refresh is not None:
__query["refresh"] = refresh
if require_alias is not None:
__query["require_alias"] = require_alias
if retry_on_conflict is not None:
__query["retry_on_conflict"] = retry_on_conflict
if routing is not None:
__query["routing"] = routing
if script is not None:
__body["script"] = script
if scripted_upsert is not None:
__body["scripted_upsert"] = scripted_upsert
if source is not None:
__body["_source"] = source
if source_excludes is not None:
__query["_source_excludes"] = source_excludes
if source_includes is not None:
__query["_source_includes"] = source_includes
if timeout is not None:
__query["timeout"] = timeout
if upsert is not None:
__body["upsert"] = upsert
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
__headers = {"accept": "application/json", "content-type": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters(
body_fields=True,
parameter_aliases={"from": "from_"},
)
def update_by_query(
self,
*,
index: t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]],
allow_no_indices: t.Optional[bool] = None,
analyze_wildcard: t.Optional[bool] = None,
analyzer: t.Optional[str] = None,
conflicts: t.Optional[t.Union["t.Literal['abort', 'proceed']", str]] = None,
default_operator: t.Optional[t.Union["t.Literal['and', 'or']", str]] = None,
df: t.Optional[str] = None,
error_trace: t.Optional[bool] = None,
expand_wildcards: t.Optional[
t.Union[
t.Union["t.Literal['all', 'closed', 'hidden', 'none', 'open']", str],
t.Union[
t.List[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
]
],
t.Tuple[
t.Union[
"t.Literal['all', 'closed', 'hidden', 'none', 'open']", str
],
...,
],
],
]
] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
from_: t.Optional[int] = None,
human: t.Optional[bool] = None,
ignore_unavailable: t.Optional[bool] = None,
lenient: t.Optional[bool] = None,
max_docs: t.Optional[int] = None,
pipeline: t.Optional[str] = None,
preference: t.Optional[str] = None,
pretty: t.Optional[bool] = None,
query: t.Optional[t.Mapping[str, t.Any]] = None,
refresh: t.Optional[bool] = None,
request_cache: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
routing: t.Optional[str] = None,
script: t.Optional[t.Mapping[str, t.Any]] = None,
scroll: t.Optional[t.Union[int, str]] = None,
scroll_size: t.Optional[int] = None,
search_timeout: t.Optional[t.Union[int, str]] = None,
search_type: t.Optional[
t.Union["t.Literal['dfs_query_then_fetch', 'query_then_fetch']", str]
] = None,
slice: t.Optional[t.Mapping[str, t.Any]] = None,
slices: t.Optional[int] = None,
sort: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
stats: t.Optional[t.Union[t.List[str], t.Tuple[str, ...]]] = None,
terminate_after: t.Optional[int] = None,
timeout: t.Optional[t.Union[int, str]] = None,
version: t.Optional[bool] = None,
version_type: t.Optional[bool] = None,
wait_for_active_shards: t.Optional[
t.Union[int, t.Union["t.Literal['all', 'index-setting']", str]]
] = None,
wait_for_completion: t.Optional[bool] = None,
) -> ObjectApiResponse[t.Any]:
"""
Performs an update on every document in the index without changing the source,
for example to pick up a mapping change.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html>`_
:param index: A comma-separated list of index names to search; use `_all` or
empty string to perform the operation on all indices
:param allow_no_indices: Whether to ignore if a wildcard indices expression resolves
into no concrete indices. (This includes `_all` string or when no indices
have been specified)
:param analyze_wildcard: Specify whether wildcard and prefix queries should be
analyzed (default: false)
:param analyzer: The analyzer to use for the query string
:param conflicts:
:param default_operator: The default operator for query string query (AND or
OR)
:param df: The field to use as default where no field prefix is given in the
query string
:param expand_wildcards: Whether to expand wildcard expression to concrete indices
that are open, closed or both.
:param from_: Starting offset (default: 0)
:param ignore_unavailable: Whether specified concrete indices should be ignored
when unavailable (missing or closed)
:param lenient: Specify whether format-based query failures (such as providing
text to a numeric field) should be ignored
:param max_docs:
:param pipeline: Ingest pipeline to set on index requests made by this action.
(default: none)
:param preference: Specify the node or shard the operation should be performed
on (default: random)
:param query:
:param refresh: Should the affected indexes be refreshed?
:param request_cache: Specify if request cache should be used for this request
or not, defaults to index level setting
:param requests_per_second: The throttle to set on this request in sub-requests
per second. -1 means no throttle.
:param routing: A comma-separated list of specific routing values
:param script:
:param scroll: Specify how long a consistent view of the index should be maintained
for scrolled search
:param scroll_size: Size on the scroll request powering the update by query
:param search_timeout: Explicit timeout for each search request. Defaults to
no timeout.
:param search_type: Search operation type
:param slice:
:param slices: The number of slices this task should be divided into. Defaults
to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`.
:param sort: A comma-separated list of <field>:<direction> pairs
:param stats: Specific 'tag' of the request for logging and statistical purposes
:param terminate_after: The maximum number of documents to collect for each shard,
upon reaching which the query execution will terminate early.
:param timeout: Time each individual bulk request should wait for shards that
are unavailable.
:param version: Specify whether to return document version as part of a hit
:param version_type: Should the document increment the version number (internal)
on hit or not (reindex)
:param wait_for_active_shards: Sets the number of shard copies that must be active
before proceeding with the update by query operation. Defaults to 1, meaning
the primary shard only. Set to `all` for all shard copies, otherwise set
to any non-negative value less than or equal to the total number of copies
for the shard (number of replicas + 1)
:param wait_for_completion: Should the request should block until the update
by query operation is complete.
"""
if index in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'index'")
__path = f"/{_quote(index)}/_update_by_query"
__query: t.Dict[str, t.Any] = {}
__body: t.Dict[str, t.Any] = {}
# The 'sort' parameter with a colon can't be encoded to the body.
if sort is not None and (
(isinstance(sort, str) and ":" in sort)
or (
isinstance(sort, (list, tuple))
and all(isinstance(_x, str) for _x in sort)
and any(":" in _x for _x in sort)
)
):
__query["sort"] = sort
sort = None
if allow_no_indices is not None:
__query["allow_no_indices"] = allow_no_indices
if analyze_wildcard is not None:
__query["analyze_wildcard"] = analyze_wildcard
if analyzer is not None:
__query["analyzer"] = analyzer
if conflicts is not None:
__body["conflicts"] = conflicts
if default_operator is not None:
__query["default_operator"] = default_operator
if df is not None:
__query["df"] = df
if error_trace is not None:
__query["error_trace"] = error_trace
if expand_wildcards is not None:
__query["expand_wildcards"] = expand_wildcards
if filter_path is not None:
__query["filter_path"] = filter_path
if from_ is not None:
__query["from"] = from_
if human is not None:
__query["human"] = human
if ignore_unavailable is not None:
__query["ignore_unavailable"] = ignore_unavailable
if lenient is not None:
__query["lenient"] = lenient
if max_docs is not None:
__body["max_docs"] = max_docs
if pipeline is not None:
__query["pipeline"] = pipeline
if preference is not None:
__query["preference"] = preference
if pretty is not None:
__query["pretty"] = pretty
if query is not None:
__body["query"] = query
if refresh is not None:
__query["refresh"] = refresh
if request_cache is not None:
__query["request_cache"] = request_cache
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
if routing is not None:
__query["routing"] = routing
if script is not None:
__body["script"] = script
if scroll is not None:
__query["scroll"] = scroll
if scroll_size is not None:
__query["scroll_size"] = scroll_size
if search_timeout is not None:
__query["search_timeout"] = search_timeout
if search_type is not None:
__query["search_type"] = search_type
if slice is not None:
__body["slice"] = slice
if slices is not None:
__query["slices"] = slices
if sort is not None:
__query["sort"] = sort
if stats is not None:
__query["stats"] = stats
if terminate_after is not None:
__query["terminate_after"] = terminate_after
if timeout is not None:
__query["timeout"] = timeout
if version is not None:
__query["version"] = version
if version_type is not None:
__query["version_type"] = version_type
if wait_for_active_shards is not None:
__query["wait_for_active_shards"] = wait_for_active_shards
if wait_for_completion is not None:
__query["wait_for_completion"] = wait_for_completion
if not __body:
__body = None # type: ignore[assignment]
__headers = {"accept": "application/json"}
if __body is not None:
__headers["content-type"] = "application/json"
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers, body=__body
)
@_rewrite_parameters()
def update_by_query_rethrottle(
self,
*,
task_id: str,
error_trace: t.Optional[bool] = None,
filter_path: t.Optional[
t.Union[str, t.Union[t.List[str], t.Tuple[str, ...]]]
] = None,
human: t.Optional[bool] = None,
pretty: t.Optional[bool] = None,
requests_per_second: t.Optional[int] = None,
) -> ObjectApiResponse[t.Any]:
"""
Changes the number of requests per second for a particular Update By Query operation.
`<https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update-by-query.html>`_
:param task_id: The task id to rethrottle
:param requests_per_second: The throttle to set on this request in floating sub-requests
per second. -1 means set no throttle.
"""
if task_id in SKIP_IN_PATH:
raise ValueError("Empty value passed for parameter 'task_id'")
__path = f"/_update_by_query/{_quote(task_id)}/_rethrottle"
__query: t.Dict[str, t.Any] = {}
if error_trace is not None:
__query["error_trace"] = error_trace
if filter_path is not None:
__query["filter_path"] = filter_path
if human is not None:
__query["human"] = human
if pretty is not None:
__query["pretty"] = pretty
if requests_per_second is not None:
__query["requests_per_second"] = requests_per_second
__headers = {"accept": "application/json"}
return self.perform_request( # type: ignore[return-value]
"POST", __path, params=__query, headers=__headers
)
|
[
"typing.TypeVar",
"warnings.warn",
"logging.getLogger"
] |
[((2630, 2664), 'logging.getLogger', 'logging.getLogger', (['"""elasticsearch"""'], {}), "('elasticsearch')\n", (2647, 2664), False, 'import logging\n'), ((2678, 2722), 'typing.TypeVar', 't.TypeVar', (['"""SelfType"""'], {'bound': '"""Elasticsearch"""'}), "('SelfType', bound='Elasticsearch')\n", (2687, 2722), True, 'import typing as t\n'), ((7493, 7629), 'warnings.warn', 'warnings.warn', (['"""The \'timeout\' parameter is deprecated in favor of \'request_timeout\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'timeout\' parameter is deprecated in favor of \'request_timeout\'",\n category=DeprecationWarning, stacklevel=2)\n', (7506, 7629), False, 'import warnings\n'), ((8388, 8541), 'warnings.warn', 'warnings.warn', (['"""The \'randomize_hosts\' parameter is deprecated in favor of \'randomize_nodes_in_pool\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'randomize_hosts\' parameter is deprecated in favor of \'randomize_nodes_in_pool\'"\n , category=DeprecationWarning, stacklevel=2)\n', (8401, 8541), False, 'import warnings\n'), ((8981, 9137), 'warnings.warn', 'warnings.warn', (['"""The \'sniffer_timeout\' parameter is deprecated in favor of \'min_delay_between_sniffing\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'sniffer_timeout\' parameter is deprecated in favor of \'min_delay_between_sniffing\'"\n , category=DeprecationWarning, stacklevel=2)\n', (8994, 9137), False, 'import warnings\n'), ((9583, 9743), 'warnings.warn', 'warnings.warn', (['"""The \'sniff_on_connection_fail\' parameter is deprecated in favor of \'sniff_on_node_failure\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'sniff_on_connection_fail\' parameter is deprecated in favor of \'sniff_on_node_failure\'"\n , category=DeprecationWarning, stacklevel=2)\n', (9596, 9743), False, 'import warnings\n'), ((10156, 10297), 'warnings.warn', 'warnings.warn', (['"""The \'maxsize\' parameter is deprecated in favor of \'connections_per_node\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'maxsize\' parameter is deprecated in favor of \'connections_per_node\'",\n category=DeprecationWarning, stacklevel=2)\n', (10169, 10297), False, 'import warnings\n'), ((11449, 11603), 'warnings.warn', 'warnings.warn', (['"""The \'host_info_callback\' parameter is deprecated in favor of \'sniffed_node_callback\'"""'], {'category': 'DeprecationWarning', 'stacklevel': '(2)'}), '(\n "The \'host_info_callback\' parameter is deprecated in favor of \'sniffed_node_callback\'"\n , category=DeprecationWarning, stacklevel=2)\n', (11462, 11603), False, 'import warnings\n')]
|
"""The module for Genomic Uncertain Deletion Validation."""
from typing import List, Optional, Dict, Tuple
import logging
from ga4gh.vrs import models
from ga4gh.vrsatile.pydantic.vrs_models import RelativeCopyClass
from variation.schemas.app_schemas import Endpoint
from variation.validators.duplication_deletion_base import\
DuplicationDeletionBase
from variation.schemas.classification_response_schema import \
ClassificationType, Classification
from variation.schemas.token_response_schema import Token
from variation.schemas.token_response_schema import GeneMatchToken
from variation.schemas.normalize_response_schema\
import HGVSDupDelMode as HGVSDupDelModeEnum
logger = logging.getLogger("variation")
logger.setLevel(logging.DEBUG)
class GenomicUncertainDeletion(DuplicationDeletionBase):
"""The Genomic UncertainDeletion Validator class."""
async def get_transcripts(self, gene_tokens: List, classification: Classification,
errors: List) -> Optional[List[str]]:
"""Get transcript accessions for a given classification.
:param List gene_tokens: A list of gene tokens
:param Classification classification: A classification for a list of
tokens
:param List errors: List of errors
:return: List of transcript accessions
"""
transcripts = await self.get_genomic_transcripts(classification, errors)
return transcripts
async def get_valid_invalid_results(
self, classification_tokens: List, transcripts: List,
classification: Classification, results: List, gene_tokens: List,
mane_data_found: Dict, is_identifier: bool,
hgvs_dup_del_mode: HGVSDupDelModeEnum,
endpoint_name: Optional[Endpoint] = None,
baseline_copies: Optional[int] = None,
relative_copy_class: Optional[RelativeCopyClass] = None,
do_liftover: bool = False
) -> None:
"""Add validation result objects to a list of results.
:param List classification_tokens: A list of classification Tokens
:param List transcripts: A list of transcript accessions
:param Classification classification: A classification for a list of
tokens
:param List results: Stores validation result objects
:param List gene_tokens: List of GeneMatchTokens for a classification
:param Dict mane_data_found: MANE Transcript information found
:param bool is_identifier: `True` if identifier is given for exact
location. `False` otherwise.
:param HGVSDupDelModeEnum hgvs_dup_del_mode: Must be: `default`, `absolute_cnv`,
`relative_cnv`, `repeated_seq_expr`, `literal_seq_expr`. This parameter
determines how to represent HGVS dup/del expressions as VRS objects.
:param Optional[Endpoint] endpoint_name: Then name of the endpoint being used
:param Optional[int] baseline_copies: Baseline copies number
:param Optional[RelativeCopyClass] relative_copy_class: The relative copy class
:param bool do_liftover: Whether or not to liftover to GRCh38 assembly
"""
valid_alleles = list()
for s in classification_tokens:
for t in transcripts:
errors = list()
t = self.get_accession(t, classification)
result = await self._get_variation(
s, t, errors, gene_tokens, hgvs_dup_del_mode,
relative_copy_class=relative_copy_class,
baseline_copies=baseline_copies)
variation = result["variation"]
if not errors and (endpoint_name == Endpoint.NORMALIZE or do_liftover):
await self._get_normalize_variation(
gene_tokens, s, t, errors, hgvs_dup_del_mode,
mane_data_found, relative_copy_class=relative_copy_class,
baseline_copies=baseline_copies)
self.add_validation_result(
variation, valid_alleles, results,
classification, s, t, gene_tokens, errors
)
if is_identifier:
break
if endpoint_name == Endpoint.NORMALIZE or do_liftover:
self.add_mane_to_validation_results(
mane_data_found, valid_alleles, results,
classification, gene_tokens
)
async def _get_variation(
self, s: Token, t: str, errors: List, gene_tokens: List,
hgvs_dup_del_mode: HGVSDupDelModeEnum,
relative_copy_class: Optional[RelativeCopyClass] = None,
baseline_copies: Optional[int] = None
) -> Optional[Dict]:
"""Get variation data.
:param Token s: Classification token
:param str t: Accession
:param List errors: List of errors
:param HGVSDupDelModeEnum hgvs_dup_del_mode: Mode to use for
interpreting HGVS duplications and deletions
:param Optional[RelativeCopyClass] relative_copy_class: The relative copy class
:param Optional[int] baseline_copies: Baseline copies number
:return: Dictionary containing start/end position changes and variation
"""
variation, start, end = None, None, None
ival, grch38 = await self._get_ival(t, s, errors, gene_tokens)
if not errors:
if grch38:
t = grch38["ac"]
allele = self.vrs.to_vrs_allele_ranges(
t, s.coordinate_type, s.alt_type, errors, ival)
if start is not None and end is not None:
pos = (start, end)
else:
pos = None
variation = self.hgvs_dup_del_mode.interpret_variation(
s.alt_type, allele, errors,
hgvs_dup_del_mode, pos=pos, relative_copy_class=relative_copy_class,
baseline_copies=baseline_copies)
return {
"start": start,
"end": end,
"variation": variation
}
async def _get_normalize_variation(
self, gene_tokens: List, s: Token, t: str, errors: List,
hgvs_dup_del_mode: HGVSDupDelModeEnum,
mane_data_found: Dict,
relative_copy_class: Optional[RelativeCopyClass] = None,
baseline_copies: Optional[int] = None) -> None:
"""Get variation that will be returned in normalize endpoint.
:param List gene_tokens: List of gene tokens
:param Token s: Classification token
:param str t: Accession
:param HGVSDupDelModeEnum hgvs_dup_del_mode: Mode to use for
interpreting HGVS duplications and deletions
:param Dict mane_data_found: MANE Transcript data found for given query
:param Optional[RelativeCopyClass] relative_copy_class: The relative copy class
:param Optional[int] baseline_copies: Baseline copies number
"""
if not gene_tokens:
ival, grch38 = await self._get_ival(
t, s, errors, gene_tokens, is_norm=True)
self.add_grch38_to_mane_data(
t, s, errors, grch38, mane_data_found, hgvs_dup_del_mode,
ival=ival, relative_copy_class=relative_copy_class,
baseline_copies=baseline_copies)
async def _get_ival(
self, t: str, s: Token, errors: List, gene_tokens: List,
is_norm: bool = False
) -> Optional[Tuple[models.SequenceInterval, Dict]]:
"""Get ival for variations with ranges.
:param str t: Accession
:param Token t: Classification token
:param List errors: List of errors
:param bool is_norm: `True` if normalize endpoint is being used.
`False` otherwise.
:return: Sequence Interval and GRCh38 data if normalize endpoint
is being used
"""
ival = None
grch38 = None
gene = gene_tokens[0].token if gene_tokens else None
if s.start_pos1_del == "?" and s.end_pos2_del == "?":
# format: (?_#)_(#_?)
if is_norm:
t, start, end, _, _, grch38 = await self.get_grch38_pos_ac(
t, s.start_pos2_del, s.end_pos1_del
)
else:
start = s.start_pos2_del
end = s.end_pos1_del
await self.validate_gene_or_accession_pos(
t, [start, end], errors, gene=gene)
if not errors and start and end:
ival = models.SequenceInterval(
start=self.vrs.get_start_indef_range(start),
end=self.vrs.get_end_indef_range(end),
type="SequenceInterval"
)
elif s.start_pos1_del == "?" and \
s.start_pos2_del != "?" and \
s.end_pos1_del != "?" and \
s.end_pos2_del is None:
# format: (?_#)_#
if is_norm:
t, start, end, _, _, grch38 = await self.get_grch38_pos_ac(
t, s.start_pos2_del, s.end_pos1_del
)
else:
start = s.start_pos2_del
end = s.end_pos1_del
await self.validate_gene_or_accession_pos(
t, [start, end], errors, gene=gene
)
if not errors and start and end:
ival = models.SequenceInterval(
start=self.vrs.get_start_indef_range(start), # noqa: E501
end=models.Number(value=end, type="Number"),
type="SequenceInterval"
)
elif s.start_pos1_del != "?" and \
s.start_pos2_del is None and \
s.end_pos1_del != "?" and \
s.end_pos2_del == "?":
# format: #_(#_?)
if is_norm:
t, start, end, _, _, grch38 = await self.get_grch38_pos_ac(
t, s.start_pos1_del, s.end_pos1_del
)
else:
start = s.start_pos1_del
end = s.end_pos1_del
start -= 1
await self.validate_gene_or_accession_pos(
t, [start, end], errors, gene=gene)
if not errors and start and end:
ival = models.SequenceInterval(
start=models.Number(value=start, type="Number"),
end=self.vrs.get_end_indef_range(end),
type="SequenceInterval"
)
else:
errors.append("Not yet supported")
return ival, grch38
def get_gene_tokens(
self, classification: Classification) -> List[GeneMatchToken]:
"""Return gene tokens for a classification.
:param Classification classification: The classification for tokens
:return: A list of Gene Match Tokens in the classification
"""
return self.get_gene_symbol_tokens(classification)
def variation_name(self) -> str:
"""Return the variation name."""
return "genomic uncertain deletion"
def is_token_instance(self, t: Token) -> bool:
"""Check that token is Genomic Uncertain Deletion.
:param Token t: Classification token
"""
return t.token_type == "GenomicUncertainDeletion"
def validates_classification_type(
self,
classification_type: ClassificationType) -> bool:
"""Return whether or not the classification type is
Genomic Uncertain Deletion.
:param ClassificationType classification_type: Classification type
:return: `True` if classification type matches, `False` otherwise
"""
return classification_type == \
ClassificationType.GENOMIC_UNCERTAIN_DELETION
|
[
"ga4gh.vrs.models.Number",
"logging.getLogger"
] |
[((693, 723), 'logging.getLogger', 'logging.getLogger', (['"""variation"""'], {}), "('variation')\n", (710, 723), False, 'import logging\n'), ((9553, 9592), 'ga4gh.vrs.models.Number', 'models.Number', ([], {'value': 'end', 'type': '"""Number"""'}), "(value=end, type='Number')\n", (9566, 9592), False, 'from ga4gh.vrs import models\n'), ((10381, 10422), 'ga4gh.vrs.models.Number', 'models.Number', ([], {'value': 'start', 'type': '"""Number"""'}), "(value=start, type='Number')\n", (10394, 10422), False, 'from ga4gh.vrs import models\n')]
|
import FWCore.ParameterSet.Config as cms
import copy
process = cms.Process("ProcessOne")
##
## MessageLogger
##
process.load('FWCore.MessageService.MessageLogger_cfi')
process.MessageLogger.cerr.enable = False
process.MessageLogger.AlignPCLThresholdsWriter=dict()
process.MessageLogger.AlignPCLThresholds=dict()
process.MessageLogger.cout = cms.untracked.PSet(
enable = cms.untracked.bool(True),
enableStatistics = cms.untracked.bool(True),
threshold = cms.untracked.string("INFO"),
default = cms.untracked.PSet(limit = cms.untracked.int32(0)),
FwkReport = cms.untracked.PSet(limit = cms.untracked.int32(-1),
reportEvery = cms.untracked.int32(1000)
),
AlignPCLThresholdsWriter = cms.untracked.PSet( limit = cms.untracked.int32(-1)),
AlignPCLThresholds = cms.untracked.PSet( limit = cms.untracked.int32(-1))
)
##
## Empty source
##
process.source = cms.Source("EmptyIOVSource",
timetype = cms.string('runnumber'),
firstValue = cms.uint64(1),
lastValue = cms.uint64(1),
interval = cms.uint64(1)
)
##
## Database output service
##
process.load("CondCore.CondDB.CondDB_cfi")
##
## Output database (in this case local sqlite file)
##
process.CondDB.connect = 'sqlite_file:mythresholds.db'
process.PoolDBOutputService = cms.Service("PoolDBOutputService",
process.CondDB,
timetype = cms.untracked.string('runnumber'),
toPut = cms.VPSet(cms.PSet(record = cms.string('AlignPCLThresholdsRcd'),
tag = cms.string('PCLThresholds_express_v0')
)
)
)
##
## Impot the thresholds configuration
##
import CondFormats.PCLConfig.Thresholds_cff as Thresholds
##
## Example on how to add to the default extra degrees of freedom
##
AddSurfaceThresholds = copy.deepcopy(Thresholds.default)
BPixSurface= cms.VPSet(
cms.PSet(alignableId = cms.string("TPBModule"),
DOF = cms.string("Surface1"),
cut = cms.double(0.1),
sigCut = cms.double(0.1),
maxMoveCut = cms.double(0.1),
maxErrorCut = cms.double(10.0)
)
)
DefaultPlusSurface = AddSurfaceThresholds+BPixSurface
#print DefaultPlusSurface.dumpPython()
process.WriteInDB = cms.EDAnalyzer("AlignPCLThresholdsWriter",
record= cms.string('AlignPCLThresholdsRcd'),
### minimum number of records found in pede output
minNRecords = cms.uint32(25000),
#thresholds = cms.VPSet() # empty object
#thresholds = DefaultPlusSurface # add extra deegree of freedom
thresholds = Thresholds.default # as a cms.VPset
)
process.p = cms.Path(process.WriteInDB)
|
[
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.uint64",
"copy.deepcopy",
"FWCore.ParameterSet.Config.double",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.uint32",
"FWCore.ParameterSet.Config.Path"
] |
[((65, 90), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""ProcessOne"""'], {}), "('ProcessOne')\n", (76, 90), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2341, 2374), 'copy.deepcopy', 'copy.deepcopy', (['Thresholds.default'], {}), '(Thresholds.default)\n', (2354, 2374), False, 'import copy\n'), ((3451, 3478), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.WriteInDB'], {}), '(process.WriteInDB)\n', (3459, 3478), True, 'import FWCore.ParameterSet.Config as cms\n'), ((387, 411), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (405, 411), True, 'import FWCore.ParameterSet.Config as cms\n'), ((436, 460), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(True)'], {}), '(True)\n', (454, 460), True, 'import FWCore.ParameterSet.Config as cms\n'), ((478, 506), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""INFO"""'], {}), "('INFO')\n", (498, 506), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1117, 1140), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""runnumber"""'], {}), "('runnumber')\n", (1127, 1140), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1183, 1196), 'FWCore.ParameterSet.Config.uint64', 'cms.uint64', (['(1)'], {}), '(1)\n', (1193, 1196), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1238, 1251), 'FWCore.ParameterSet.Config.uint64', 'cms.uint64', (['(1)'], {}), '(1)\n', (1248, 1251), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1292, 1305), 'FWCore.ParameterSet.Config.uint64', 'cms.uint64', (['(1)'], {}), '(1)\n', (1302, 1305), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1702, 1735), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""runnumber"""'], {}), "('runnumber')\n", (1722, 1735), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2936, 2971), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""AlignPCLThresholdsRcd"""'], {}), "('AlignPCLThresholdsRcd')\n", (2946, 2971), True, 'import FWCore.ParameterSet.Config as cms\n'), ((3109, 3126), 'FWCore.ParameterSet.Config.uint32', 'cms.uint32', (['(25000)'], {}), '(25000)\n', (3119, 3126), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2432, 2455), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""TPBModule"""'], {}), "('TPBModule')\n", (2442, 2455), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2490, 2512), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""Surface1"""'], {}), "('Surface1')\n", (2500, 2512), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2547, 2562), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.1)'], {}), '(0.1)\n', (2557, 2562), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2597, 2612), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.1)'], {}), '(0.1)\n', (2607, 2612), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2647, 2662), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(0.1)'], {}), '(0.1)\n', (2657, 2662), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2697, 2713), 'FWCore.ParameterSet.Config.double', 'cms.double', (['(10.0)'], {}), '(10.0)\n', (2707, 2713), True, 'import FWCore.ParameterSet.Config as cms\n'), ((551, 573), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(0)'], {}), '(0)\n', (570, 573), True, 'import FWCore.ParameterSet.Config as cms\n'), ((642, 665), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(-1)'], {}), '(-1)\n', (661, 665), True, 'import FWCore.ParameterSet.Config as cms\n'), ((716, 741), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1000)'], {}), '(1000)\n', (735, 741), True, 'import FWCore.ParameterSet.Config as cms\n'), ((893, 916), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(-1)'], {}), '(-1)\n', (912, 916), True, 'import FWCore.ParameterSet.Config as cms\n'), ((978, 1001), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(-1)'], {}), '(-1)\n', (997, 1001), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1815, 1850), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""AlignPCLThresholdsRcd"""'], {}), "('AlignPCLThresholdsRcd')\n", (1825, 1850), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1927, 1965), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""PCLThresholds_express_v0"""'], {}), "('PCLThresholds_express_v0')\n", (1937, 1965), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
import click
from ghutil.types import Issue
@click.command()
@click.option('-d', '--delete', is_flag=True, help='Remove the given labels')
@click.option('--set', is_flag=True, help='Replace the current labels')
@Issue.argument('issue')
@click.argument('label', nargs=-1)
@click.pass_context
def cli(ctx, issue, label, delete, set): # noqa: B002
"""
Label an issue/PR.
By default, any labels listed on the command line are added to the issue's
current labels. To instead replace the current labels, specify the `--set`
option. To remove the given labels from the issue, specify the `--delete`
option.
Note that specifying `--set` without any labels will remove all labels from
the issue.
"""
if delete and set:
ctx.fail('--delete and --set are mutually exclusive')
elif set:
issue.labels.put(json=label)
elif delete:
for lb in label:
issue.labels[lb].delete()
else:
issue.labels.post(json=label)
|
[
"ghutil.types.Issue.argument",
"click.option",
"click.argument",
"click.command"
] |
[((48, 63), 'click.command', 'click.command', ([], {}), '()\n', (61, 63), False, 'import click\n'), ((65, 141), 'click.option', 'click.option', (['"""-d"""', '"""--delete"""'], {'is_flag': '(True)', 'help': '"""Remove the given labels"""'}), "('-d', '--delete', is_flag=True, help='Remove the given labels')\n", (77, 141), False, 'import click\n'), ((143, 213), 'click.option', 'click.option', (['"""--set"""'], {'is_flag': '(True)', 'help': '"""Replace the current labels"""'}), "('--set', is_flag=True, help='Replace the current labels')\n", (155, 213), False, 'import click\n'), ((215, 238), 'ghutil.types.Issue.argument', 'Issue.argument', (['"""issue"""'], {}), "('issue')\n", (229, 238), False, 'from ghutil.types import Issue\n'), ((240, 273), 'click.argument', 'click.argument', (['"""label"""'], {'nargs': '(-1)'}), "('label', nargs=-1)\n", (254, 273), False, 'import click\n')]
|
import unittest
import numpy as np
import pystan
from pystan._compat import PY2
from pystan.tests.helper import get_model
class TestArgs(unittest.TestCase):
@classmethod
def setUpClass(cls):
model_code = 'parameters {real x;real y;real z;} model {x ~ normal(0,1);y ~ normal(0,1);z ~ normal(0,1);}'
cls.model = get_model("standard_normals_model", model_code)
#cls.model = pystan.StanModel(model_code=model_code)
def test_control(self):
model = self.model
assertRaisesRegex = self.assertRaisesRegexp if PY2 else self.assertRaisesRegex
with assertRaisesRegex(ValueError, '`control` must be a dictionary'):
control_invalid = 3
model.sampling(control=control_invalid)
with assertRaisesRegex(ValueError, '`control` contains unknown'):
control_invalid = dict(foo=3)
model.sampling(control=control_invalid)
with assertRaisesRegex(ValueError, '`metric` must be one of'):
model.sampling(control={'metric': 'lorem-ipsum'})
def test_print_summary(self):
model = self.model
fit = model.sampling(iter=100)
summary_full = pystan.misc.stansummary(fit)
summary_one_par1 = pystan.misc.stansummary(fit, pars='z')
summary_one_par2 = pystan.misc.stansummary(fit, pars=['z'])
summary_pars = pystan.misc.stansummary(fit, pars=['x', 'y'])
self.assertNotEqual(summary_full, summary_one_par1)
self.assertNotEqual(summary_full, summary_one_par2)
self.assertNotEqual(summary_full, summary_pars)
self.assertNotEqual(summary_one_par1, summary_pars)
self.assertNotEqual(summary_one_par2, summary_pars)
self.assertEqual(summary_one_par1, summary_one_par2)
|
[
"pystan.tests.helper.get_model",
"pystan.misc.stansummary"
] |
[((338, 385), 'pystan.tests.helper.get_model', 'get_model', (['"""standard_normals_model"""', 'model_code'], {}), "('standard_normals_model', model_code)\n", (347, 385), False, 'from pystan.tests.helper import get_model\n'), ((1178, 1206), 'pystan.misc.stansummary', 'pystan.misc.stansummary', (['fit'], {}), '(fit)\n', (1201, 1206), False, 'import pystan\n'), ((1234, 1272), 'pystan.misc.stansummary', 'pystan.misc.stansummary', (['fit'], {'pars': '"""z"""'}), "(fit, pars='z')\n", (1257, 1272), False, 'import pystan\n'), ((1300, 1340), 'pystan.misc.stansummary', 'pystan.misc.stansummary', (['fit'], {'pars': "['z']"}), "(fit, pars=['z'])\n", (1323, 1340), False, 'import pystan\n'), ((1364, 1409), 'pystan.misc.stansummary', 'pystan.misc.stansummary', (['fit'], {'pars': "['x', 'y']"}), "(fit, pars=['x', 'y'])\n", (1387, 1409), False, 'import pystan\n')]
|
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import operator
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_post_parameters
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import tables
from horizon.utils import memoized
from horizon import views
from openstack_dashboard import api
from openstack_dashboard import policy
from garr_horizon.content.garr_users import forms as project_forms
from garr_horizon.content.garr_users import tables as project_tables
from openstack_dashboard.utils import identity
from garr_horizon.content.garr_users.models import User, Project
LOG = logging.getLogger(__name__)
class IndexView(tables.DataTableView):
table_class = project_tables.UsersTable
template_name = 'identity/garr_users/index.html'
page_title = _("External Users")
def get_filters(self):
filter_field = self.table.get_filter_field()
filter_string = self.table.get_filter_string()
if filter_string:
if filter_field == 'project':
filter_string = Project.objects.get(name=filter_string).id
return {filter_field: filter_string}
else:
return None
def get_data(self):
users = []
list_permission = False
if policy.check((("identity", "identity:list_users"),),
self.request):
list_permission = True
elif policy.check((("identity", "identity:get_user"),),
self.request):
list_permission = True
if list_permission:
filters = self.get_filters()
try:
if filters is not None:
return User.objects.filter(**filters)
else:
return User.objects.all()
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve user list.'))
else:
msg = _("Insufficient privilege level to view user information.")
messages.info(self.request, msg)
class UpdateView(forms.ModalFormView):
template_name = 'identity/garr_users/update.html'
form_id = "update_user_form"
form_class = project_forms.UpdateUserForm
submit_label = _("Update User")
submit_url = "horizon:identity:garr_users:update"
success_url = reverse_lazy('horizon:identity:garr_users:index')
page_title = _("Update User")
def dispatch(self, *args, **kwargs):
return super(UpdateView, self).dispatch(*args, **kwargs)
@memoized.memoized_method
def get_object(self):
try:
return User.objects.get(id=self.kwargs['user_id'])
except Exception:
redirect = reverse("horizon:identity:garr_users:index")
exceptions.handle(self.request,
_('Unable to retrieve user information.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
args = (self.kwargs['user_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
def get_initial(self):
user = self.get_object()
data = {'id': user.id,
'name': user.name,
'project': user.project,
'email': getattr(user, 'email'),
'idp': getattr(user, 'idp'),
'cn': getattr(user, 'cn', ''),
'source': getattr(user, 'source', ''),
'duration': getattr(user, 'duration', ''),
}
return data
class CreateView(forms.ModalFormView):
template_name = 'identity/garr_users/create.html'
form_id = "create_user_form"
form_class = project_forms.CreateUserForm
submit_label = _("Create User")
submit_url = reverse_lazy("horizon:identity:garr_users:create")
success_url = reverse_lazy('horizon:identity:garr_users:index')
page_title = _("Create User")
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(CreateView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(CreateView, self).get_form_kwargs()
return kwargs
class DetailView(views.HorizonTemplateView):
template_name = 'identity/garr_users/detail.html'
page_title = "{{ user.name }}"
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
user = self.get_data()
table = project_tables.UsersTable(self.request)
context["user"] = user
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(user)
return context
@memoized.memoized_method
def get_data(self):
try:
user_id = self.kwargs['user_id']
user = User.objects.get(id=user_id)
except Exception:
redirect = self.get_redirect_url()
exceptions.handle(self.request,
_('Unable to retrieve user details.'),
redirect=redirect)
return user
def get_redirect_url(self):
return reverse('horizon:identity:garr_users:index')
class ChangePasswordView(forms.ModalFormView):
template_name = 'identity/garr_users/change_password.html'
form_id = "change_user_password_form"
form_class = project_forms.ChangePasswordForm
submit_url = "horizon:identity:garr_users:change_password"
submit_label = _("Save")
success_url = reverse_lazy('horizon:identity:garr_users:index')
page_title = _("Change Password")
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(ChangePasswordView, self).dispatch(*args, **kwargs)
@memoized.memoized_method
def get_object(self):
try:
return User.objects.get(id=self.kwargs['user_id'])
except Exception:
redirect = reverse("horizon:identity:garr_users:index")
exceptions.handle(self.request,
_('Unable to retrieve user information.'),
redirect=redirect)
def get_context_data(self, **kwargs):
context = super(ChangePasswordView, self).get_context_data(**kwargs)
args = (self.kwargs['user_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
return context
def get_initial(self):
user = self.get_object()
return {'id': self.kwargs['user_id'],
'name': user.name}
class ActivateView(forms.ModalFormView):
template_name = 'identity/garr_users/create.html'
form_id = "activate_user_form"
form_class = project_forms.ActivateUserForm
submit_label = _("Activate User")
submit_url = reverse_lazy("horizon:identity:garr_users:create_keystone")
success_url = reverse_lazy('horizon:identity:users:index')
page_title = _("Create Keystone User")
@method_decorator(sensitive_post_parameters('password',
'confirm_password'))
def dispatch(self, *args, **kwargs):
return super(ActivateView, self).dispatch(*args, **kwargs)
@staticmethod
@memoized.memoized_method
def get_object(user_id):
try:
return User.objects.get(id=user_id)
except Exception:
redirect = reverse('horizon:identity:garr_users:index')
exceptions.handle(self.request,
_('Unable to retrieve user information.'))
def get_form_kwargs(self):
kwargs = super(ActivateView, self).get_form_kwargs()
try:
roles = api.keystone.role_list(self.request)
except Exception:
redirect = reverse("horizon:identity:garr_users:index")
exceptions.handle(self.request,
_("Unable to retrieve user roles."),
redirect=redirect)
roles.sort(key=operator.attrgetter("id"))
kwargs['roles'] = roles
return kwargs
def get_initial(self):
# Set the domain of the user
domain = api.keystone.get_default_domain(self.request)
default_role = api.keystone.get_default_role(self.request)
user_id = self.kwargs.get('user_id', None)
if not user_id:
return {
'domain_id': domain.id,
'domain_name': domain.name,
'role_id': getattr(default_role, "id", None)
}
else:
user = self.get_object(user_id)
return {'domain_id': domain.id,
'domain_name': domain.name,
'role_id': getattr(default_role, "id", None),
'name': user.name,
'email': user.email,
'default_user_id': int(user.id)}
|
[
"openstack_dashboard.api.keystone.role_list",
"django.core.urlresolvers.reverse",
"garr_horizon.content.garr_users.models.User.objects.all",
"garr_horizon.content.garr_users.models.Project.objects.get",
"garr_horizon.content.garr_users.tables.UsersTable",
"horizon.messages.info",
"garr_horizon.content.garr_users.models.User.objects.get",
"garr_horizon.content.garr_users.models.User.objects.filter",
"django.views.decorators.debug.sensitive_post_parameters",
"operator.attrgetter",
"openstack_dashboard.api.keystone.get_default_role",
"openstack_dashboard.policy.check",
"django.utils.translation.ugettext_lazy",
"openstack_dashboard.api.keystone.get_default_domain",
"django.core.urlresolvers.reverse_lazy",
"logging.getLogger"
] |
[((1608, 1635), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1625, 1635), False, 'import logging\n'), ((1790, 1809), 'django.utils.translation.ugettext_lazy', '_', (['"""External Users"""'], {}), "('External Users')\n", (1791, 1809), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3262, 3278), 'django.utils.translation.ugettext_lazy', '_', (['"""Update User"""'], {}), "('Update User')\n", (3263, 3278), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3351, 3400), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (3363, 3400), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((3418, 3434), 'django.utils.translation.ugettext_lazy', '_', (['"""Update User"""'], {}), "('Update User')\n", (3419, 3434), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4832, 4848), 'django.utils.translation.ugettext_lazy', '_', (['"""Create User"""'], {}), "('Create User')\n", (4833, 4848), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((4866, 4916), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:garr_users:create"""'], {}), "('horizon:identity:garr_users:create')\n", (4878, 4916), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((4935, 4984), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (4947, 4984), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((5002, 5018), 'django.utils.translation.ugettext_lazy', '_', (['"""Create User"""'], {}), "('Create User')\n", (5003, 5018), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6662, 6671), 'django.utils.translation.ugettext_lazy', '_', (['"""Save"""'], {}), "('Save')\n", (6663, 6671), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6690, 6739), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (6702, 6739), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((6757, 6777), 'django.utils.translation.ugettext_lazy', '_', (['"""Change Password"""'], {}), "('Change Password')\n", (6758, 6777), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8007, 8025), 'django.utils.translation.ugettext_lazy', '_', (['"""Activate User"""'], {}), "('Activate User')\n", (8008, 8025), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8043, 8102), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:garr_users:create_keystone"""'], {}), "('horizon:identity:garr_users:create_keystone')\n", (8055, 8102), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((8121, 8165), 'django.core.urlresolvers.reverse_lazy', 'reverse_lazy', (['"""horizon:identity:users:index"""'], {}), "('horizon:identity:users:index')\n", (8133, 8165), False, 'from django.core.urlresolvers import reverse_lazy\n'), ((8183, 8208), 'django.utils.translation.ugettext_lazy', '_', (['"""Create Keystone User"""'], {}), "('Create Keystone User')\n", (8184, 8208), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2263, 2329), 'openstack_dashboard.policy.check', 'policy.check', (["(('identity', 'identity:list_users'),)", 'self.request'], {}), "((('identity', 'identity:list_users'),), self.request)\n", (2275, 2329), False, 'from openstack_dashboard import policy\n'), ((4120, 4155), 'django.core.urlresolvers.reverse', 'reverse', (['self.submit_url'], {'args': 'args'}), '(self.submit_url, args=args)\n', (4127, 4155), False, 'from django.core.urlresolvers import reverse\n'), ((5042, 5099), 'django.views.decorators.debug.sensitive_post_parameters', 'sensitive_post_parameters', (['"""password"""', '"""confirm_password"""'], {}), "('password', 'confirm_password')\n", (5067, 5099), False, 'from django.views.decorators.debug import sensitive_post_parameters\n'), ((5664, 5703), 'garr_horizon.content.garr_users.tables.UsersTable', 'project_tables.UsersTable', (['self.request'], {}), '(self.request)\n', (5689, 5703), True, 'from garr_horizon.content.garr_users import tables as project_tables\n'), ((6331, 6375), 'django.core.urlresolvers.reverse', 'reverse', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (6338, 6375), False, 'from django.core.urlresolvers import reverse\n'), ((6801, 6858), 'django.views.decorators.debug.sensitive_post_parameters', 'sensitive_post_parameters', (['"""password"""', '"""confirm_password"""'], {}), "('password', 'confirm_password')\n", (6826, 6858), False, 'from django.views.decorators.debug import sensitive_post_parameters\n'), ((7608, 7643), 'django.core.urlresolvers.reverse', 'reverse', (['self.submit_url'], {'args': 'args'}), '(self.submit_url, args=args)\n', (7615, 7643), False, 'from django.core.urlresolvers import reverse\n'), ((8232, 8289), 'django.views.decorators.debug.sensitive_post_parameters', 'sensitive_post_parameters', (['"""password"""', '"""confirm_password"""'], {}), "('password', 'confirm_password')\n", (8257, 8289), False, 'from django.views.decorators.debug import sensitive_post_parameters\n'), ((9400, 9445), 'openstack_dashboard.api.keystone.get_default_domain', 'api.keystone.get_default_domain', (['self.request'], {}), '(self.request)\n', (9431, 9445), False, 'from openstack_dashboard import api\n'), ((9469, 9512), 'openstack_dashboard.api.keystone.get_default_role', 'api.keystone.get_default_role', (['self.request'], {}), '(self.request)\n', (9498, 9512), False, 'from openstack_dashboard import api\n'), ((2403, 2467), 'openstack_dashboard.policy.check', 'policy.check', (["(('identity', 'identity:get_user'),)", 'self.request'], {}), "((('identity', 'identity:get_user'),), self.request)\n", (2415, 2467), False, 'from openstack_dashboard import policy\n'), ((2965, 3024), 'django.utils.translation.ugettext_lazy', '_', (['"""Insufficient privilege level to view user information."""'], {}), "('Insufficient privilege level to view user information.')\n", (2966, 3024), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3037, 3069), 'horizon.messages.info', 'messages.info', (['self.request', 'msg'], {}), '(self.request, msg)\n', (3050, 3069), False, 'from horizon import messages\n'), ((3631, 3674), 'garr_horizon.content.garr_users.models.User.objects.get', 'User.objects.get', ([], {'id': "self.kwargs['user_id']"}), "(id=self.kwargs['user_id'])\n", (3647, 3674), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((5999, 6027), 'garr_horizon.content.garr_users.models.User.objects.get', 'User.objects.get', ([], {'id': 'user_id'}), '(id=user_id)\n', (6015, 6027), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((7111, 7154), 'garr_horizon.content.garr_users.models.User.objects.get', 'User.objects.get', ([], {'id': "self.kwargs['user_id']"}), "(id=self.kwargs['user_id'])\n", (7127, 7154), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((8557, 8585), 'garr_horizon.content.garr_users.models.User.objects.get', 'User.objects.get', ([], {'id': 'user_id'}), '(id=user_id)\n', (8573, 8585), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((8923, 8959), 'openstack_dashboard.api.keystone.role_list', 'api.keystone.role_list', (['self.request'], {}), '(self.request)\n', (8945, 8959), False, 'from openstack_dashboard import api\n'), ((3724, 3768), 'django.core.urlresolvers.reverse', 'reverse', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (3731, 3768), False, 'from django.core.urlresolvers import reverse\n'), ((7204, 7248), 'django.core.urlresolvers.reverse', 'reverse', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (7211, 7248), False, 'from django.core.urlresolvers import reverse\n'), ((8635, 8679), 'django.core.urlresolvers.reverse', 'reverse', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (8642, 8679), False, 'from django.core.urlresolvers import reverse\n'), ((9009, 9053), 'django.core.urlresolvers.reverse', 'reverse', (['"""horizon:identity:garr_users:index"""'], {}), "('horizon:identity:garr_users:index')\n", (9016, 9053), False, 'from django.core.urlresolvers import reverse\n'), ((9237, 9262), 'operator.attrgetter', 'operator.attrgetter', (['"""id"""'], {}), "('id')\n", (9256, 9262), False, 'import operator\n'), ((2046, 2085), 'garr_horizon.content.garr_users.models.Project.objects.get', 'Project.objects.get', ([], {'name': 'filter_string'}), '(name=filter_string)\n', (2065, 2085), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((2684, 2714), 'garr_horizon.content.garr_users.models.User.objects.filter', 'User.objects.filter', ([], {}), '(**filters)\n', (2703, 2714), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((2764, 2782), 'garr_horizon.content.garr_users.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (2780, 2782), False, 'from garr_horizon.content.garr_users.models import User, Project\n'), ((3843, 3884), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user information."""'], {}), "('Unable to retrieve user information.')\n", (3844, 3884), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((6175, 6212), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user details."""'], {}), "('Unable to retrieve user details.')\n", (6176, 6212), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((7323, 7364), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user information."""'], {}), "('Unable to retrieve user information.')\n", (7324, 7364), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((8754, 8795), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user information."""'], {}), "('Unable to retrieve user information.')\n", (8755, 8795), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((9128, 9163), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user roles."""'], {}), "('Unable to retrieve user roles.')\n", (9129, 9163), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2897, 2931), 'django.utils.translation.ugettext_lazy', '_', (['"""Unable to retrieve user list."""'], {}), "('Unable to retrieve user list.')\n", (2898, 2931), True, 'from django.utils.translation import ugettext_lazy as _\n')]
|
import os
import sys
sys.path.append("../")
from paper_hook import paper_hook
def recurse_print(t, prefix) :
for elem in t :
if elem == "v" :
print("{0} = {1}".format(prefix, t["v"]))
continue
recurse_print(t[elem], "{0} {1}".format(prefix, elem))
def printout(t) :
recurse_print(t, "t ")
print("\n** \nt 0 -1 -1 = <value>, means t[0] = value")
print("\nt 0 2 -1 = <value>, means t[0][2] = value")
def solver(m=1, gamma=3, kappa=7, L=20) :
if m == 1 and gamma == 3:
t = paper_hook(m=1, gamma=3, k=7, L=L)
else :
t = paper_hook(m=1, gamma=3, k=7, L=L)
printout(t)
def main():
args = sys.argv[1:]
if len(sys.argv) > 1 :
m = int(args[0])
gamma = int(args[1])
kappa = int(args[2])
L = int(args[3])
else :
m = 1
gamma = 3
kappa = 7
L = 20
solver(m, gamma, kappa, L)
if __name__ == '__main__':
main()
|
[
"sys.path.append",
"paper_hook.paper_hook"
] |
[((21, 43), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (36, 43), False, 'import sys\n'), ((492, 526), 'paper_hook.paper_hook', 'paper_hook', ([], {'m': '(1)', 'gamma': '(3)', 'k': '(7)', 'L': 'L'}), '(m=1, gamma=3, k=7, L=L)\n', (502, 526), False, 'from paper_hook import paper_hook\n'), ((542, 576), 'paper_hook.paper_hook', 'paper_hook', ([], {'m': '(1)', 'gamma': '(3)', 'k': '(7)', 'L': 'L'}), '(m=1, gamma=3, k=7, L=L)\n', (552, 576), False, 'from paper_hook import paper_hook\n')]
|
import data as d
from multiprocessing import Process
d.init()
def foo():
d.add_num(132)
p =Process(target=foo)
print("starting process")
p.start()
d.out()
print('joining')
p.join()
d.out()
print('done')
|
[
"data.out",
"multiprocessing.Process",
"data.init",
"data.add_num"
] |
[((54, 62), 'data.init', 'd.init', ([], {}), '()\n', (60, 62), True, 'import data as d\n'), ((97, 116), 'multiprocessing.Process', 'Process', ([], {'target': 'foo'}), '(target=foo)\n', (104, 116), False, 'from multiprocessing import Process\n'), ((154, 161), 'data.out', 'd.out', ([], {}), '()\n', (159, 161), True, 'import data as d\n'), ((188, 195), 'data.out', 'd.out', ([], {}), '()\n', (193, 195), True, 'import data as d\n'), ((78, 92), 'data.add_num', 'd.add_num', (['(132)'], {}), '(132)\n', (87, 92), True, 'import data as d\n')]
|
import numpy as np
import torch.nn as nn
from .layer_ops import ModuleOperation
from .simple_model import SimpleModel, SimpleModelOperation
from src.utils import param_sizes, weight_vector
class ElbowModel(ModuleOperation):
def __init__(self, w_1=None, w_2=None, w_3=None):
self.w_1 = w_1 if not w_1 is None else weight_vector(SimpleModel().parameters())
self.w_2 = w_2 if not w_2 is None else weight_vector(SimpleModel().parameters())
self.w_3 = nn.Parameter(w_3 if not w_3 is None else weight_vector(SimpleModel().parameters()))
def sample(self):
alpha = np.random.random()
beta = np.random.random()
# Randomly choosing a link
min_to_use = self.w_1 if beta > 0.5 else self.w_2
# Randomly choosing a point on a link
w = min_to_use * (1 - alpha) + self.w_3 * alpha
return w
def run_from_weights(self, w, x):
model = SimpleModelOperation(w).train(self.training)
return model(x)
def __call__(self, x):
if self.training:
w = self.sample()
else:
w = self.w_3
return self.run_from_weights(w, x)
def to(self, *args, **kwargs):
self.w_1 = self.w_1.to(*args, **kwargs)
self.w_2 = self.w_2.to(*args, **kwargs)
self.w_3 = nn.Parameter(self.w_3.to(*args, **kwargs))
return self
def parameters(self):
return [self.w_3]
|
[
"numpy.random.random"
] |
[((601, 619), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (617, 619), True, 'import numpy as np\n'), ((635, 653), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (651, 653), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-03-27 03:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0004_auto_20160326_1958'),
]
operations = [
migrations.AlterModelOptions(
name='cluster',
options={'ordering': ('label',), 'verbose_name': 'centro de interesse', 'verbose_name_plural': 'centros de interesse'},
),
migrations.AlterModelOptions(
name='report',
options={'ordering': ('-id',), 'verbose_name': 'ocorrência', 'verbose_name_plural': 'ocorrências'},
),
migrations.AddField(
model_name='cluster',
name='address',
field=models.CharField(blank=True, max_length=512, verbose_name='endereço'),
),
]
|
[
"django.db.models.CharField",
"django.db.migrations.AlterModelOptions"
] |
[((296, 469), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""cluster"""', 'options': "{'ordering': ('label',), 'verbose_name': 'centro de interesse',\n 'verbose_name_plural': 'centros de interesse'}"}), "(name='cluster', options={'ordering': ('label',\n ), 'verbose_name': 'centro de interesse', 'verbose_name_plural':\n 'centros de interesse'})\n", (324, 469), False, 'from django.db import migrations, models\n'), ((505, 652), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""report"""', 'options': "{'ordering': ('-id',), 'verbose_name': 'ocorrência', 'verbose_name_plural':\n 'ocorrências'}"}), "(name='report', options={'ordering': ('-id',),\n 'verbose_name': 'ocorrência', 'verbose_name_plural': 'ocorrências'})\n", (533, 652), False, 'from django.db import migrations, models\n'), ((794, 863), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(512)', 'verbose_name': '"""endereço"""'}), "(blank=True, max_length=512, verbose_name='endereço')\n", (810, 863), False, 'from django.db import migrations, models\n')]
|
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
from typing import Any, Tuple, Optional
""" CONSTANT VARIABLES """
CONTEXT_PREFIX = "TAXII2"
COMPLEX_OBSERVATION_MODE_SKIP = "Skip indicators with more than a single observation"
COMPLEX_OBSERVATION_MODE_CREATE_ALL = "Create indicator for each observation"
""" HELPER FUNCTIONS """
def try_parse_integer(
int_to_parse: Any, err_msg: str = "Please provide a valid limit (positive integer)"
) -> int:
"""
Tries to parse an integer, and if fails will throw DemistoException with given err_msg
"""
if not int_to_parse:
return int_to_parse
try:
res = int(int_to_parse)
except (TypeError, ValueError):
raise DemistoException(err_msg)
return res
""" COMMAND FUNCTIONS """
def module_test_command(client, limit, fetch_full_feed):
if client.collections:
if fetch_full_feed:
if limit and limit != -1:
return_error(
"Configuration Error - Max Indicators Per Fetch is disabled when Full Feed Fetch is enabled"
)
demisto.results("ok")
else:
return_error("Could not connect to server")
def fetch_indicators_command(
client,
initial_interval,
limit,
last_run_ctx,
fetch_full_feed: bool = False,
filter_args: Optional[dict] = None,
) -> Tuple[list, dict]:
"""
Fetch indicators from TAXII 2 server
:param client: Taxii2FeedClient
:param initial_interval: initial interval in parse_date_range format
:param limit: upper limit of indicators to fetch
:param last_run_ctx: last run dict with {collection_id: last_run_time string}
:param fetch_full_feed: when set to true, will ignore last run, and try to fetch the entire feed
:param filter_args: filter args requested by the user
:return: indicators in cortex TIM format
"""
if initial_interval:
initial_interval, _ = parse_date_range(
initial_interval, date_format=TAXII_TIME_FORMAT
)
if filter_args is None:
filter_args = {}
last_fetch_time = (
last_run_ctx.get(client.collection_to_fetch.id)
if client.collection_to_fetch
else None
)
filter_args["added_after"] = get_added_after(
fetch_full_feed, initial_interval, last_fetch_time, filter_args
)
if client.collection_to_fetch is None:
# fetch all collections
if client.collections is None:
raise DemistoException(ERR_NO_COLL)
indicators: list = []
for collection in client.collections:
client.collection_to_fetch = collection
filter_args["added_after"] = get_added_after(
fetch_full_feed, initial_interval, last_run_ctx.get(collection.id)
)
fetched_iocs = client.build_iterator(limit, **filter_args)
indicators.extend(fetched_iocs)
if limit >= 0:
limit -= len(fetched_iocs)
if limit <= 0:
break
last_run_ctx[collection.id] = client.last_fetched_indicator__modified
else:
# fetch from a single collection
indicators = client.build_iterator(limit, **filter_args)
last_run_ctx[client.collection_to_fetch.id] = (
client.last_fetched_indicator__modified
if client.last_fetched_indicator__modified
else filter_args.get("added_after")
)
return indicators, last_run_ctx
def get_added_after(
fetch_full_feed, initial_interval, last_fetch_time=None, filter_args=None
):
"""
Creates the added_after param, or extracts it from the filter_args
:param fetch_full_feed: when set to true, will limit added_after
:param initial_interval: initial_interval if no
:param last_fetch_time: last_fetch time value (str)
:param filter_args: set of filter_args defined by the user to be merged with added_after
:return: added_after
"""
if fetch_full_feed:
return initial_interval
if not filter_args or "added_after" not in filter_args:
return last_fetch_time or initial_interval
return filter_args["added_after"]
def get_indicators_command(
client, raw="false", limit=10, added_after=None, filter_args=None
):
"""
Fetch indicators from TAXII 2 server
:param client: Taxii2FeedClient
:param raw: When set to "true" will return only rawJSON
:param limit: upper limit of indicators to fetch
:param (Optional) added_after: added after time string in parse_date_range format
:param (Optional) filter_args: filter to be used for taxii poll
:return: indicators in cortex TIM format
"""
limit = try_parse_integer(limit)
filter_args = handle_filter_arg(filter_args)
if added_after and "added_after" not in filter_args:
added_after, _ = parse_date_range(added_after, date_format=TAXII_TIME_FORMAT)
filter_args["added_after"] = added_after
raw = raw == "true"
if client.collection_to_fetch is None:
# fetch all collections
if client.collections is None:
raise DemistoException(ERR_NO_COLL)
indicators: list = []
for collection in client.collections:
client.collection_to_fetch = collection
fetched_iocs = client.build_iterator(limit, **filter_args)
indicators.extend(fetched_iocs)
if limit >= 0:
limit -= len(fetched_iocs)
if limit <= 0:
break
else:
indicators = client.build_iterator(limit=limit, **filter_args)
if raw:
demisto.results({"indicators": [x.get("rawJSON") for x in indicators]})
return
md = f"Found {len(indicators)} results:\n" + tableToMarkdown(
"", indicators, ["value", "type"]
)
if indicators:
return CommandResults(
outputs_prefix=CONTEXT_PREFIX + ".Indicators",
outputs_key_field="value",
outputs=indicators,
readable_output=md,
)
return md
def get_collections_command(client):
"""
Get the available collections in the TAXII server
:param client: FeedClient
:return: available collections
"""
collections = list()
for collection in client.collections:
collections.append({"Name": collection.title, "ID": collection.id})
md = tableToMarkdown("TAXII2 Server Collections:", collections, ["Name", "ID"])
return CommandResults(
outputs_prefix=CONTEXT_PREFIX + ".Collections",
outputs_key_field="ID",
outputs=collections,
readable_output=md,
)
def reset_fetch_command(client):
"""
Reset the last fetch from the integration context
"""
demisto.setIntegrationContext({})
return (
"Fetch was reset successfully. Your next indicator fetch will collect indicators from "
'the configured "First Fetch Time"'
)
def handle_filter_arg(filter_args=None, delimiter="="):
"""
Transforms filter arguments (str) to a dict to be used by build_iterator
:param filter_args: filter_args as typed by the user in the filter_args param
:param delimiter: delimiter to use between filter_key and filter_val
:return: filter_args dict with type:indicator {filter_key: filter_value}
"""
# add filter for indicator types by default
filter_dict = {"type": "indicator"}
if filter_args:
filter_args = argToList(filter_args)
try:
for arg in filter_args:
key, val = arg.split(delimiter)
filter_dict[key] = val
except ValueError:
raise DemistoException(
"Encountered an error while trying to parse filter_args, please make sure you entered them in the correct format"
)
return filter_dict
def main():
params = demisto.params()
args = demisto.args()
url = params.get("url")
collection_to_fetch = params.get("collection_to_fetch")
credentials = params.get("credentials") or {}
username = credentials.get("identifier")
password = credentials.get("password")
proxies = handle_proxy()
verify_certificate = not params.get("insecure", False)
skip_complex_mode = COMPLEX_OBSERVATION_MODE_SKIP == params.get(
"observation_operator_mode"
)
feed_tags = argToList(params.get("feedTags"))
initial_interval = params.get("initial_interval")
fetch_full_feed = params.get("fetch_full_feed") or False
limit = try_parse_integer(params.get("limit") or -1)
limit_per_request = try_parse_integer(params.get("limit_per_request"))
filter_args = handle_filter_arg(params.get("filter_args"))
command = demisto.command()
demisto.info(f"Command being called in {CONTEXT_PREFIX} is {command}")
try:
client = Taxii2FeedClient(
url=url,
collection_to_fetch=collection_to_fetch,
proxies=proxies,
verify=verify_certificate,
skip_complex_mode=skip_complex_mode,
username=username,
password=password,
tags=feed_tags,
limit_per_request=limit_per_request,
)
client.initialise()
commands = {
"taxii2-reset-fetch-indicators": reset_fetch_command,
"taxii2-get-indicators": get_indicators_command,
"taxii2-get-collections": get_collections_command,
}
if demisto.command() == "test-module":
# This is the call made when pressing the integration Test button.
module_test_command(client, limit, fetch_full_feed)
elif demisto.command() == "fetch-indicators":
if fetch_full_feed:
limit = -1
integration_ctx = demisto.getIntegrationContext() or {}
(indicators, integration_ctx) = fetch_indicators_command(
client,
initial_interval,
limit,
integration_ctx,
fetch_full_feed,
filter_args,
)
for iter_ in batch(indicators, batch_size=2000):
demisto.createIndicators(iter_)
demisto.setIntegrationContext(integration_ctx)
else:
return_results(commands[command](client, **args)) # type: ignore[operator]
except Exception as e:
err_msg = f"Failed to execute {command} command. Error: {str(e)}\n\ntraceback: {traceback.format_exc()}"
if isinstance(e, requests.exceptions.SSLError):
LOG(err_msg)
err_msg = (
"Encountered an HTTPS certificate error. This error can be ignored by enabling "
'"Trust any certificate (not secure)" in the instance configuration.'
)
return_error(err_msg)
from TAXII2ApiModule import * # noqa: E402
if __name__ in ("__main__", "__builtin__", "builtins"):
main()
|
[
"demistomock.info",
"demistomock.args",
"demistomock.setIntegrationContext",
"demistomock.getIntegrationContext",
"demistomock.command",
"demistomock.params",
"demistomock.createIndicators",
"demistomock.results"
] |
[((6808, 6841), 'demistomock.setIntegrationContext', 'demisto.setIntegrationContext', (['{}'], {}), '({})\n', (6837, 6841), True, 'import demistomock as demisto\n'), ((7931, 7947), 'demistomock.params', 'demisto.params', ([], {}), '()\n', (7945, 7947), True, 'import demistomock as demisto\n'), ((7959, 7973), 'demistomock.args', 'demisto.args', ([], {}), '()\n', (7971, 7973), True, 'import demistomock as demisto\n'), ((8775, 8792), 'demistomock.command', 'demisto.command', ([], {}), '()\n', (8790, 8792), True, 'import demistomock as demisto\n'), ((8797, 8867), 'demistomock.info', 'demisto.info', (['f"""Command being called in {CONTEXT_PREFIX} is {command}"""'], {}), "(f'Command being called in {CONTEXT_PREFIX} is {command}')\n", (8809, 8867), True, 'import demistomock as demisto\n'), ((1151, 1172), 'demistomock.results', 'demisto.results', (['"""ok"""'], {}), "('ok')\n", (1166, 1172), True, 'import demistomock as demisto\n'), ((9513, 9530), 'demistomock.command', 'demisto.command', ([], {}), '()\n', (9528, 9530), True, 'import demistomock as demisto\n'), ((9706, 9723), 'demistomock.command', 'demisto.command', ([], {}), '()\n', (9721, 9723), True, 'import demistomock as demisto\n'), ((10256, 10302), 'demistomock.setIntegrationContext', 'demisto.setIntegrationContext', (['integration_ctx'], {}), '(integration_ctx)\n', (10285, 10302), True, 'import demistomock as demisto\n'), ((9836, 9867), 'demistomock.getIntegrationContext', 'demisto.getIntegrationContext', ([], {}), '()\n', (9865, 9867), True, 'import demistomock as demisto\n'), ((10211, 10242), 'demistomock.createIndicators', 'demisto.createIndicators', (['iter_'], {}), '(iter_)\n', (10235, 10242), True, 'import demistomock as demisto\n')]
|
"""
Title: Making new layers and models via subclassing
Author: [fchollet](https://twitter.com/fchollet)
Date created: 2019/03/01
Last modified: 2020/04/13
Description: Complete guide to writing `Layer` and `Model` objects from scratch.
"""
"""
## Setup
"""
import tensorflow as tf
from tensorflow import keras
"""
## The `Layer` class: the combination of state (weights) and some computation
One of the central abstraction in Keras is the `Layer` class. A layer
encapsulates both a state (the layer's "weights") and a transformation from
inputs to outputs (a "call", the layer's forward pass).
Here's a densely-connected layer. It has a state: the variables `w` and `b`.
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32, input_dim=32):
super(Linear, self).__init__()
w_init = tf.random_normal_initializer()
self.w = tf.Variable(
initial_value=w_init(shape=(input_dim, units), dtype="float32"),
trainable=True,
)
b_init = tf.zeros_initializer()
self.b = tf.Variable(
initial_value=b_init(shape=(units,), dtype="float32"), trainable=True
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
"""
You would use a layer by calling it on some tensor input(s), much like a Python
function.
"""
x = tf.ones((2, 2))
linear_layer = Linear(4, 2)
y = linear_layer(x)
print(y)
"""
Note that the weights `w` and `b` are automatically tracked by the layer upon
being set as layer attributes:
"""
assert linear_layer.weights == [linear_layer.w, linear_layer.b]
"""
Note you also have access to a quicker shortcut for adding weight to a layer:
the `add_weight()` method:
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32, input_dim=32):
super(Linear, self).__init__()
self.w = self.add_weight(
shape=(input_dim, units), initializer="random_normal", trainable=True
)
self.b = self.add_weight(shape=(units,), initializer="zeros", trainable=True)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
x = tf.ones((2, 2))
linear_layer = Linear(4, 2)
y = linear_layer(x)
print(y)
"""
## Layers can have non-trainable weights
Besides trainable weights, you can add non-trainable weights to a layer as
well. Such weights are meant not to be taken into account during
backpropagation, when you are training the layer.
Here's how to add and use a non-trainable weight:
"""
class ComputeSum(keras.layers.Layer):
def __init__(self, input_dim):
super(ComputeSum, self).__init__()
self.total = tf.Variable(initial_value=tf.zeros((input_dim,)), trainable=False)
def call(self, inputs):
self.total.assign_add(tf.reduce_sum(inputs, axis=0))
return self.total
x = tf.ones((2, 2))
my_sum = ComputeSum(2)
y = my_sum(x)
print(y.numpy())
y = my_sum(x)
print(y.numpy())
"""
It's part of `layer.weights`, but it gets categorized as a non-trainable weight:
"""
print("weights:", len(my_sum.weights))
print("non-trainable weights:", len(my_sum.non_trainable_weights))
# It's not included in the trainable weights:
print("trainable_weights:", my_sum.trainable_weights)
"""
## Best practice: deferring weight creation until the shape of the inputs is known
Our `Linear` layer above took an `input_dim `argument that was used to compute
the shape of the weights `w` and `b` in `__init__()`:
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32, input_dim=32):
super(Linear, self).__init__()
self.w = self.add_weight(
shape=(input_dim, units), initializer="random_normal", trainable=True
)
self.b = self.add_weight(shape=(units,), initializer="zeros", trainable=True)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
"""
In many cases, you may not know in advance the size of your inputs, and you
would like to lazily create weights when that value becomes known, some time
after instantiating the layer.
In the Keras API, we recommend creating layer weights in the `build(self,
inputs_shape)` method of your layer. Like this:
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32):
super(Linear, self).__init__()
self.units = units
def build(self, input_shape):
self.w = self.add_weight(
shape=(input_shape[-1], self.units),
initializer="random_normal",
trainable=True,
)
self.b = self.add_weight(
shape=(self.units,), initializer="random_normal", trainable=True
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
"""
The `__call__()` method of your layer will automatically run build the first time
it is called. You now have a layer that's lazy and thus easier to use:
"""
# At instantiation, we don't know on what inputs this is going to get called
linear_layer = Linear(32)
# The layer's weights are created dynamically the first time the layer is called
y = linear_layer(x)
"""
## Layers are recursively composable
If you assign a Layer instance as an attribute of another Layer, the outer layer
will start tracking the weights of the inner layer.
We recommend creating such sublayers in the `__init__()` method (since the
sublayers will typically have a build method, they will be built when the
outer layer gets built).
"""
# Let's assume we are reusing the Linear class
# with a `build` method that we defined above.
class MLPBlock(keras.layers.Layer):
def __init__(self):
super(MLPBlock, self).__init__()
self.linear_1 = Linear(32)
self.linear_2 = Linear(32)
self.linear_3 = Linear(1)
def call(self, inputs):
x = self.linear_1(inputs)
x = tf.nn.relu(x)
x = self.linear_2(x)
x = tf.nn.relu(x)
return self.linear_3(x)
mlp = MLPBlock()
y = mlp(tf.ones(shape=(3, 64))) # The first call to the `mlp` will create the weights
print("weights:", len(mlp.weights))
print("trainable weights:", len(mlp.trainable_weights))
"""
## The `add_loss()` method
When writing the `call()` method of a layer, you can create loss tensors that
you will want to use later, when writing your training loop. This is doable by
calling `self.add_loss(value)`:
"""
# A layer that creates an activity regularization loss
class ActivityRegularizationLayer(keras.layers.Layer):
def __init__(self, rate=1e-2):
super(ActivityRegularizationLayer, self).__init__()
self.rate = rate
def call(self, inputs):
self.add_loss(self.rate * tf.reduce_sum(inputs))
return inputs
"""
These losses (including those created by any inner layer) can be retrieved via
`layer.losses`. This property is reset at the start of every `__call__()` to
the top-level layer, so that `layer.losses` always contains the loss values
created during the last forward pass.
"""
class OuterLayer(keras.layers.Layer):
def __init__(self):
super(OuterLayer, self).__init__()
self.activity_reg = ActivityRegularizationLayer(1e-2)
def call(self, inputs):
return self.activity_reg(inputs)
layer = OuterLayer()
assert len(layer.losses) == 0 # No losses yet since the layer has never been called
_ = layer(tf.zeros(1, 1))
assert len(layer.losses) == 1 # We created one loss value
# `layer.losses` gets reset at the start of each __call__
_ = layer(tf.zeros(1, 1))
assert len(layer.losses) == 1 # This is the loss created during the call above
"""
In addition, the `loss` property also contains regularization losses created
for the weights of any inner layer:
"""
class OuterLayerWithKernelRegularizer(keras.layers.Layer):
def __init__(self):
super(OuterLayerWithKernelRegularizer, self).__init__()
self.dense = keras.layers.Dense(
32, kernel_regularizer=tf.keras.regularizers.l2(1e-3)
)
def call(self, inputs):
return self.dense(inputs)
layer = OuterLayerWithKernelRegularizer()
_ = layer(tf.zeros((1, 1)))
# This is `1e-3 * sum(layer.dense.kernel ** 2)`,
# created by the `kernel_regularizer` above.
print(layer.losses)
"""
These losses are meant to be taken into account when writing training loops,
like this:
```python
# Instantiate an optimizer.
optimizer = tf.keras.optimizers.SGD(learning_rate=1e-3)
loss_fn = keras.losses.SparseCategoricalCrossentropy(from_logits=True)
# Iterate over the batches of a dataset.
for x_batch_train, y_batch_train in train_dataset:
with tf.GradientTape() as tape:
logits = layer(x_batch_train) # Logits for this minibatch
# Loss value for this minibatch
loss_value = loss_fn(y_batch_train, logits)
# Add extra losses created during this forward pass:
loss_value += sum(model.losses)
grads = tape.gradient(loss_value, model.trainable_weights)
optimizer.apply_gradients(zip(grads, model.trainable_weights))
```
"""
"""
For a detailed guide about writing training loops, see the
[guide to writing a training loop from scratch](/guides/writing_a_training_loop_from_scratch/).
These losses also work seamlessly with `fit()` (they get automatically summed
and added to the main loss, if any):
"""
import numpy as np
inputs = keras.Input(shape=(3,))
outputs = ActivityRegularizationLayer()(inputs)
model = keras.Model(inputs, outputs)
# If there is a loss passed in `compile`, the regularization
# losses get added to it
model.compile(optimizer="adam", loss="mse")
model.fit(np.random.random((2, 3)), np.random.random((2, 3)))
# It's also possible not to pass any loss in `compile`,
# since the model already has a loss to minimize, via the `add_loss`
# call during the forward pass!
model.compile(optimizer="adam")
model.fit(np.random.random((2, 3)), np.random.random((2, 3)))
"""
## The `add_metric()` method
Similarly to `add_loss()`, layers also have an `add_metric()` method
for tracking the moving average of a quantity during training.
Consider the following layer: a "logistic endpoint" layer.
It takes as inputs predictions & targets, it computes a loss which it tracks
via `add_loss()`, and it computes an accuracy scalar, which it tracks via
`add_metric()`.
"""
class LogisticEndpoint(keras.layers.Layer):
def __init__(self, name=None):
super(LogisticEndpoint, self).__init__(name=name)
self.loss_fn = keras.losses.BinaryCrossentropy(from_logits=True)
self.accuracy_fn = keras.metrics.BinaryAccuracy()
def call(self, targets, logits, sample_weights=None):
# Compute the training-time loss value and add it
# to the layer using `self.add_loss()`.
loss = self.loss_fn(targets, logits, sample_weights)
self.add_loss(loss)
# Log accuracy as a metric and add it
# to the layer using `self.add_metric()`.
acc = self.accuracy_fn(targets, logits, sample_weights)
self.add_metric(acc, name="accuracy")
# Return the inference-time prediction tensor (for `.predict()`).
return tf.nn.softmax(logits)
"""
Metrics tracked in this way are accessible via `layer.metrics`:
"""
layer = LogisticEndpoint()
targets = tf.ones((2, 2))
logits = tf.ones((2, 2))
y = layer(targets, logits)
print("layer.metrics:", layer.metrics)
print("current accuracy value:", float(layer.metrics[0].result()))
"""
Just like for `add_loss()`, these metrics are tracked by `fit()`:
"""
inputs = keras.Input(shape=(3,), name="inputs")
targets = keras.Input(shape=(10,), name="targets")
logits = keras.layers.Dense(10)(inputs)
predictions = LogisticEndpoint(name="predictions")(logits, targets)
model = keras.Model(inputs=[inputs, targets], outputs=predictions)
model.compile(optimizer="adam")
data = {
"inputs": np.random.random((3, 3)),
"targets": np.random.random((3, 10)),
}
model.fit(data)
"""
## You can optionally enable serialization on your layers
If you need your custom layers to be serializable as part of a
[Functional model](/guides/functional_api/), you can optionally implement a `get_config()`
method:
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32):
super(Linear, self).__init__()
self.units = units
def build(self, input_shape):
self.w = self.add_weight(
shape=(input_shape[-1], self.units),
initializer="random_normal",
trainable=True,
)
self.b = self.add_weight(
shape=(self.units,), initializer="random_normal", trainable=True
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
def get_config(self):
return {"units": self.units}
# Now you can recreate the layer from its config:
layer = Linear(64)
config = layer.get_config()
print(config)
new_layer = Linear.from_config(config)
"""
Note that the `__init__()` method of the base `Layer` class takes some keyword
arguments, in particular a `name` and a `dtype`. It's good practice to pass
these arguments to the parent class in `__init__()` and to include them in the
layer config:
"""
class Linear(keras.layers.Layer):
def __init__(self, units=32, **kwargs):
super(Linear, self).__init__(**kwargs)
self.units = units
def build(self, input_shape):
self.w = self.add_weight(
shape=(input_shape[-1], self.units),
initializer="random_normal",
trainable=True,
)
self.b = self.add_weight(
shape=(self.units,), initializer="random_normal", trainable=True
)
def call(self, inputs):
return tf.matmul(inputs, self.w) + self.b
def get_config(self):
config = super(Linear, self).get_config()
config.update({"units": self.units})
return config
layer = Linear(64)
config = layer.get_config()
print(config)
new_layer = Linear.from_config(config)
"""
If you need more flexibility when deserializing the layer from its config, you
can also override the `from_config()` class method. This is the base
implementation of `from_config()`:
```python
def from_config(cls, config):
return cls(**config)
```
To learn more about serialization and saving, see the complete
[guide to saving and serializing models](/guides/serialization_and_saving/).
"""
"""
## Privileged `training` argument in the `call()` method
Some layers, in particular the `BatchNormalization` layer and the `Dropout`
layer, have different behaviors during training and inference. For such
layers, it is standard practice to expose a `training` (boolean) argument in
the `call()` method.
By exposing this argument in `call()`, you enable the built-in training and
evaluation loops (e.g. `fit()`) to correctly use the layer in training and
inference.
"""
class CustomDropout(keras.layers.Layer):
def __init__(self, rate, **kwargs):
super(CustomDropout, self).__init__(**kwargs)
self.rate = rate
def call(self, inputs, training=None):
if training:
return tf.nn.dropout(inputs, rate=self.rate)
return inputs
"""
## Privileged `mask` argument in the `call()` method
The other privileged argument supported by `call()` is the `mask` argument.
You will find it in all Keras RNN layers. A mask is a boolean tensor (one
boolean value per timestep in the input) used to skip certain input timesteps
when processing timeseries data.
Keras will automatically pass the correct `mask` argument to `__call__()` for
layers that support it, when a mask is generated by a prior layer.
Mask-generating layers are the `Embedding`
layer configured with `mask_zero=True`, and the `Masking` layer.
To learn more about masking and how to write masking-enabled layers, please
check out the guide
["understanding padding and masking"](/guides/understanding_masking_and_padding/).
"""
"""
## The `Model` class
In general, you will use the `Layer` class to define inner computation blocks,
and will use the `Model` class to define the outer model -- the object you
will train.
For instance, in a ResNet50 model, you would have several ResNet blocks
subclassing `Layer`, and a single `Model` encompassing the entire ResNet50
network.
The `Model` class has the same API as `Layer`, with the following differences:
- It exposes built-in training, evaluation, and prediction loops
(`model.fit()`, `model.evaluate()`, `model.predict()`).
- It exposes the list of its inner layers, via the `model.layers` property.
- It exposes saving and serialization APIs (`save()`, `save_weights()`...)
Effectively, the `Layer` class corresponds to what we refer to in the
literature as a "layer" (as in "convolution layer" or "recurrent layer") or as
a "block" (as in "ResNet block" or "Inception block").
Meanwhile, the `Model` class corresponds to what is referred to in the
literature as a "model" (as in "deep learning model") or as a "network" (as in
"deep neural network").
So if you're wondering, "should I use the `Layer` class or the `Model` class?",
ask yourself: will I need to call `fit()` on it? Will I need to call `save()`
on it? If so, go with `Model`. If not (either because your class is just a block
in a bigger system, or because you are writing training & saving code yourself),
use `Layer`.
For instance, we could take our mini-resnet example above, and use it to build
a `Model` that we could train with `fit()`, and that we could save with
`save_weights()`:
"""
"""
```python
class ResNet(tf.keras.Model):
def __init__(self, num_classes=1000):
super(ResNet, self).__init__()
self.block_1 = ResNetBlock()
self.block_2 = ResNetBlock()
self.global_pool = layers.GlobalAveragePooling2D()
self.classifier = Dense(num_classes)
def call(self, inputs):
x = self.block_1(inputs)
x = self.block_2(x)
x = self.global_pool(x)
return self.classifier(x)
resnet = ResNet()
dataset = ...
resnet.fit(dataset, epochs=10)
resnet.save(filepath)
```
"""
"""
## Putting it all together: an end-to-end example
Here's what you've learned so far:
- A `Layer` encapsulate a state (created in `__init__()` or `build()`) and some
computation (defined in `call()`).
- Layers can be recursively nested to create new, bigger computation blocks.
- Layers can create and track losses (typically regularization losses) as well
as metrics, via `add_loss()` and `add_metric()`
- The outer container, the thing you want to train, is a `Model`. A `Model` is
just like a `Layer`, but with added training and serialization utilities.
Let's put all of these things together into an end-to-end example: we're going
to implement a Variational AutoEncoder (VAE). We'll train it on MNIST digits.
Our VAE will be a subclass of `Model`, built as a nested composition of layers
that subclass `Layer`. It will feature a regularization loss (KL divergence).
"""
from tensorflow.keras import layers
class Sampling(layers.Layer):
"""Uses (z_mean, z_log_var) to sample z, the vector encoding a digit."""
def call(self, inputs):
z_mean, z_log_var = inputs
batch = tf.shape(z_mean)[0]
dim = tf.shape(z_mean)[1]
epsilon = tf.keras.backend.random_normal(shape=(batch, dim))
return z_mean + tf.exp(0.5 * z_log_var) * epsilon
class Encoder(layers.Layer):
"""Maps MNIST digits to a triplet (z_mean, z_log_var, z)."""
def __init__(self, latent_dim=32, intermediate_dim=64, name="encoder", **kwargs):
super(Encoder, self).__init__(name=name, **kwargs)
self.dense_proj = layers.Dense(intermediate_dim, activation="relu")
self.dense_mean = layers.Dense(latent_dim)
self.dense_log_var = layers.Dense(latent_dim)
self.sampling = Sampling()
def call(self, inputs):
x = self.dense_proj(inputs)
z_mean = self.dense_mean(x)
z_log_var = self.dense_log_var(x)
z = self.sampling((z_mean, z_log_var))
return z_mean, z_log_var, z
class Decoder(layers.Layer):
"""Converts z, the encoded digit vector, back into a readable digit."""
def __init__(self, original_dim, intermediate_dim=64, name="decoder", **kwargs):
super(Decoder, self).__init__(name=name, **kwargs)
self.dense_proj = layers.Dense(intermediate_dim, activation="relu")
self.dense_output = layers.Dense(original_dim, activation="sigmoid")
def call(self, inputs):
x = self.dense_proj(inputs)
return self.dense_output(x)
class VariationalAutoEncoder(keras.Model):
"""Combines the encoder and decoder into an end-to-end model for training."""
def __init__(
self,
original_dim,
intermediate_dim=64,
latent_dim=32,
name="autoencoder",
**kwargs
):
super(VariationalAutoEncoder, self).__init__(name=name, **kwargs)
self.original_dim = original_dim
self.encoder = Encoder(latent_dim=latent_dim, intermediate_dim=intermediate_dim)
self.decoder = Decoder(original_dim, intermediate_dim=intermediate_dim)
def call(self, inputs):
z_mean, z_log_var, z = self.encoder(inputs)
reconstructed = self.decoder(z)
# Add KL divergence regularization loss.
kl_loss = -0.5 * tf.reduce_mean(
z_log_var - tf.square(z_mean) - tf.exp(z_log_var) + 1
)
self.add_loss(kl_loss)
return reconstructed
"""
Let's write a simple training loop on MNIST:
"""
original_dim = 784
vae = VariationalAutoEncoder(original_dim, 64, 32)
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-3)
mse_loss_fn = tf.keras.losses.MeanSquaredError()
loss_metric = tf.keras.metrics.Mean()
(x_train, _), _ = tf.keras.datasets.mnist.load_data()
x_train = x_train.reshape(60000, 784).astype("float32") / 255
train_dataset = tf.data.Dataset.from_tensor_slices(x_train)
train_dataset = train_dataset.shuffle(buffer_size=1024).batch(64)
epochs = 2
# Iterate over epochs.
for epoch in range(epochs):
print("Start of epoch %d" % (epoch,))
# Iterate over the batches of the dataset.
for step, x_batch_train in enumerate(train_dataset):
with tf.GradientTape() as tape:
reconstructed = vae(x_batch_train)
# Compute reconstruction loss
loss = mse_loss_fn(x_batch_train, reconstructed)
loss += sum(vae.losses) # Add KLD regularization loss
grads = tape.gradient(loss, vae.trainable_weights)
optimizer.apply_gradients(zip(grads, vae.trainable_weights))
loss_metric(loss)
if step % 100 == 0:
print("step %d: mean loss = %.4f" % (step, loss_metric.result()))
"""
Note that since the VAE is subclassing `Model`, it features built-in training
loops. So you could also have trained it like this:
"""
vae = VariationalAutoEncoder(784, 64, 32)
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-3)
vae.compile(optimizer, loss=tf.keras.losses.MeanSquaredError())
vae.fit(x_train, x_train, epochs=2, batch_size=64)
"""
## Beyond object-oriented development: the Functional API
Was this example too much object-oriented development for you? You can also
build models using the [Functional API](/guides/functional_api/). Importantly,
choosing one style or another does not prevent you from leveraging components
written in the other style: you can always mix-and-match.
For instance, the Functional API example below reuses the same `Sampling` layer
we defined in the example above:
"""
original_dim = 784
intermediate_dim = 64
latent_dim = 32
# Define encoder model.
original_inputs = tf.keras.Input(shape=(original_dim,), name="encoder_input")
x = layers.Dense(intermediate_dim, activation="relu")(original_inputs)
z_mean = layers.Dense(latent_dim, name="z_mean")(x)
z_log_var = layers.Dense(latent_dim, name="z_log_var")(x)
z = Sampling()((z_mean, z_log_var))
encoder = tf.keras.Model(inputs=original_inputs, outputs=z, name="encoder")
# Define decoder model.
latent_inputs = tf.keras.Input(shape=(latent_dim,), name="z_sampling")
x = layers.Dense(intermediate_dim, activation="relu")(latent_inputs)
outputs = layers.Dense(original_dim, activation="sigmoid")(x)
decoder = tf.keras.Model(inputs=latent_inputs, outputs=outputs, name="decoder")
# Define VAE model.
outputs = decoder(z)
vae = tf.keras.Model(inputs=original_inputs, outputs=outputs, name="vae")
# Add KL divergence regularization loss.
kl_loss = -0.5 * tf.reduce_mean(z_log_var - tf.square(z_mean) - tf.exp(z_log_var) + 1)
vae.add_loss(kl_loss)
# Train.
optimizer = tf.keras.optimizers.Adam(learning_rate=1e-3)
vae.compile(optimizer, loss=tf.keras.losses.MeanSquaredError())
vae.fit(x_train, x_train, epochs=3, batch_size=64)
"""
For more information, make sure to read the [Functional API guide](/guides/functional_api/).
"""
|
[
"tensorflow.reduce_sum",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.metrics.Mean",
"tensorflow.keras.backend.random_normal",
"tensorflow.matmul",
"tensorflow.keras.metrics.BinaryAccuracy",
"tensorflow.keras.regularizers.l2",
"tensorflow.nn.softmax",
"tensorflow.nn.relu",
"tensorflow.keras.losses.MeanSquaredError",
"tensorflow.keras.Input",
"tensorflow.keras.optimizers.Adam",
"tensorflow.exp",
"tensorflow.ones",
"tensorflow.keras.Model",
"tensorflow.random_normal_initializer",
"tensorflow.zeros_initializer",
"tensorflow.GradientTape",
"tensorflow.data.Dataset.from_tensor_slices",
"tensorflow.keras.datasets.mnist.load_data",
"tensorflow.keras.losses.BinaryCrossentropy",
"tensorflow.zeros",
"numpy.random.random",
"tensorflow.shape",
"tensorflow.square",
"tensorflow.nn.dropout"
] |
[((1342, 1357), 'tensorflow.ones', 'tf.ones', (['(2, 2)'], {}), '((2, 2))\n', (1349, 1357), True, 'import tensorflow as tf\n'), ((2132, 2147), 'tensorflow.ones', 'tf.ones', (['(2, 2)'], {}), '((2, 2))\n', (2139, 2147), True, 'import tensorflow as tf\n'), ((2825, 2840), 'tensorflow.ones', 'tf.ones', (['(2, 2)'], {}), '((2, 2))\n', (2832, 2840), True, 'import tensorflow as tf\n'), ((9266, 9289), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(3,)'}), '(shape=(3,))\n', (9277, 9289), False, 'from tensorflow import keras\n'), ((9346, 9374), 'tensorflow.keras.Model', 'keras.Model', (['inputs', 'outputs'], {}), '(inputs, outputs)\n', (9357, 9374), False, 'from tensorflow import keras\n'), ((11174, 11189), 'tensorflow.ones', 'tf.ones', (['(2, 2)'], {}), '((2, 2))\n', (11181, 11189), True, 'import tensorflow as tf\n'), ((11199, 11214), 'tensorflow.ones', 'tf.ones', (['(2, 2)'], {}), '((2, 2))\n', (11206, 11214), True, 'import tensorflow as tf\n'), ((11434, 11472), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(3,)', 'name': '"""inputs"""'}), "(shape=(3,), name='inputs')\n", (11445, 11472), False, 'from tensorflow import keras\n'), ((11483, 11523), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '(10,)', 'name': '"""targets"""'}), "(shape=(10,), name='targets')\n", (11494, 11523), False, 'from tensorflow import keras\n'), ((11641, 11699), 'tensorflow.keras.Model', 'keras.Model', ([], {'inputs': '[inputs, targets]', 'outputs': 'predictions'}), '(inputs=[inputs, targets], outputs=predictions)\n', (11652, 11699), False, 'from tensorflow import keras\n'), ((21502, 21547), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (21526, 21547), True, 'import tensorflow as tf\n'), ((21561, 21595), 'tensorflow.keras.losses.MeanSquaredError', 'tf.keras.losses.MeanSquaredError', ([], {}), '()\n', (21593, 21595), True, 'import tensorflow as tf\n'), ((21611, 21634), 'tensorflow.keras.metrics.Mean', 'tf.keras.metrics.Mean', ([], {}), '()\n', (21632, 21634), True, 'import tensorflow as tf\n'), ((21654, 21689), 'tensorflow.keras.datasets.mnist.load_data', 'tf.keras.datasets.mnist.load_data', ([], {}), '()\n', (21687, 21689), True, 'import tensorflow as tf\n'), ((21769, 21812), 'tensorflow.data.Dataset.from_tensor_slices', 'tf.data.Dataset.from_tensor_slices', (['x_train'], {}), '(x_train)\n', (21803, 21812), True, 'import tensorflow as tf\n'), ((22805, 22850), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (22829, 22850), True, 'import tensorflow as tf\n'), ((23540, 23599), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '(original_dim,)', 'name': '"""encoder_input"""'}), "(shape=(original_dim,), name='encoder_input')\n", (23554, 23599), True, 'import tensorflow as tf\n'), ((23827, 23892), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': 'original_inputs', 'outputs': 'z', 'name': '"""encoder"""'}), "(inputs=original_inputs, outputs=z, name='encoder')\n", (23841, 23892), True, 'import tensorflow as tf\n'), ((23934, 23988), 'tensorflow.keras.Input', 'tf.keras.Input', ([], {'shape': '(latent_dim,)', 'name': '"""z_sampling"""'}), "(shape=(latent_dim,), name='z_sampling')\n", (23948, 23988), True, 'import tensorflow as tf\n'), ((24130, 24199), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': 'latent_inputs', 'outputs': 'outputs', 'name': '"""decoder"""'}), "(inputs=latent_inputs, outputs=outputs, name='decoder')\n", (24144, 24199), True, 'import tensorflow as tf\n'), ((24248, 24315), 'tensorflow.keras.Model', 'tf.keras.Model', ([], {'inputs': 'original_inputs', 'outputs': 'outputs', 'name': '"""vae"""'}), "(inputs=original_inputs, outputs=outputs, name='vae')\n", (24262, 24315), True, 'import tensorflow as tf\n'), ((24489, 24534), 'tensorflow.keras.optimizers.Adam', 'tf.keras.optimizers.Adam', ([], {'learning_rate': '(0.001)'}), '(learning_rate=0.001)\n', (24513, 24534), True, 'import tensorflow as tf\n'), ((5943, 5965), 'tensorflow.ones', 'tf.ones', ([], {'shape': '(3, 64)'}), '(shape=(3, 64))\n', (5950, 5965), True, 'import tensorflow as tf\n'), ((7317, 7331), 'tensorflow.zeros', 'tf.zeros', (['(1)', '(1)'], {}), '(1, 1)\n', (7325, 7331), True, 'import tensorflow as tf\n'), ((7461, 7475), 'tensorflow.zeros', 'tf.zeros', (['(1)', '(1)'], {}), '(1, 1)\n', (7469, 7475), True, 'import tensorflow as tf\n'), ((8062, 8078), 'tensorflow.zeros', 'tf.zeros', (['(1, 1)'], {}), '((1, 1))\n', (8070, 8078), True, 'import tensorflow as tf\n'), ((9516, 9540), 'numpy.random.random', 'np.random.random', (['(2, 3)'], {}), '((2, 3))\n', (9532, 9540), True, 'import numpy as np\n'), ((9542, 9566), 'numpy.random.random', 'np.random.random', (['(2, 3)'], {}), '((2, 3))\n', (9558, 9566), True, 'import numpy as np\n'), ((9768, 9792), 'numpy.random.random', 'np.random.random', (['(2, 3)'], {}), '((2, 3))\n', (9784, 9792), True, 'import numpy as np\n'), ((9794, 9818), 'numpy.random.random', 'np.random.random', (['(2, 3)'], {}), '((2, 3))\n', (9810, 9818), True, 'import numpy as np\n'), ((11533, 11555), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(10)'], {}), '(10)\n', (11551, 11555), False, 'from tensorflow import keras\n'), ((11756, 11780), 'numpy.random.random', 'np.random.random', (['(3, 3)'], {}), '((3, 3))\n', (11772, 11780), True, 'import numpy as np\n'), ((11797, 11822), 'numpy.random.random', 'np.random.random', (['(3, 10)'], {}), '((3, 10))\n', (11813, 11822), True, 'import numpy as np\n'), ((23604, 23653), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['intermediate_dim'], {'activation': '"""relu"""'}), "(intermediate_dim, activation='relu')\n", (23616, 23653), False, 'from tensorflow.keras import layers\n'), ((23680, 23719), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['latent_dim'], {'name': '"""z_mean"""'}), "(latent_dim, name='z_mean')\n", (23692, 23719), False, 'from tensorflow.keras import layers\n'), ((23735, 23777), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['latent_dim'], {'name': '"""z_log_var"""'}), "(latent_dim, name='z_log_var')\n", (23747, 23777), False, 'from tensorflow.keras import layers\n'), ((23993, 24042), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['intermediate_dim'], {'activation': '"""relu"""'}), "(intermediate_dim, activation='relu')\n", (24005, 24042), False, 'from tensorflow.keras import layers\n'), ((24068, 24116), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['original_dim'], {'activation': '"""sigmoid"""'}), "(original_dim, activation='sigmoid')\n", (24080, 24116), False, 'from tensorflow.keras import layers\n'), ((820, 850), 'tensorflow.random_normal_initializer', 'tf.random_normal_initializer', ([], {}), '()\n', (848, 850), True, 'import tensorflow as tf\n'), ((1013, 1035), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (1033, 1035), True, 'import tensorflow as tf\n'), ((5815, 5828), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (5825, 5828), True, 'import tensorflow as tf\n'), ((5870, 5883), 'tensorflow.nn.relu', 'tf.nn.relu', (['x'], {}), '(x)\n', (5880, 5883), True, 'import tensorflow as tf\n'), ((10380, 10429), 'tensorflow.keras.losses.BinaryCrossentropy', 'keras.losses.BinaryCrossentropy', ([], {'from_logits': '(True)'}), '(from_logits=True)\n', (10411, 10429), False, 'from tensorflow import keras\n'), ((10457, 10487), 'tensorflow.keras.metrics.BinaryAccuracy', 'keras.metrics.BinaryAccuracy', ([], {}), '()\n', (10485, 10487), False, 'from tensorflow import keras\n'), ((11039, 11060), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['logits'], {}), '(logits)\n', (11052, 11060), True, 'import tensorflow as tf\n'), ((19147, 19197), 'tensorflow.keras.backend.random_normal', 'tf.keras.backend.random_normal', ([], {'shape': '(batch, dim)'}), '(shape=(batch, dim))\n', (19177, 19197), True, 'import tensorflow as tf\n'), ((19524, 19573), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['intermediate_dim'], {'activation': '"""relu"""'}), "(intermediate_dim, activation='relu')\n", (19536, 19573), False, 'from tensorflow.keras import layers\n'), ((19600, 19624), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['latent_dim'], {}), '(latent_dim)\n', (19612, 19624), False, 'from tensorflow.keras import layers\n'), ((19654, 19678), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['latent_dim'], {}), '(latent_dim)\n', (19666, 19678), False, 'from tensorflow.keras import layers\n'), ((20218, 20267), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['intermediate_dim'], {'activation': '"""relu"""'}), "(intermediate_dim, activation='relu')\n", (20230, 20267), False, 'from tensorflow.keras import layers\n'), ((20296, 20344), 'tensorflow.keras.layers.Dense', 'layers.Dense', (['original_dim'], {'activation': '"""sigmoid"""'}), "(original_dim, activation='sigmoid')\n", (20308, 20344), False, 'from tensorflow.keras import layers\n'), ((22879, 22913), 'tensorflow.keras.losses.MeanSquaredError', 'tf.keras.losses.MeanSquaredError', ([], {}), '()\n', (22911, 22913), True, 'import tensorflow as tf\n'), ((24562, 24596), 'tensorflow.keras.losses.MeanSquaredError', 'tf.keras.losses.MeanSquaredError', ([], {}), '()\n', (24594, 24596), True, 'import tensorflow as tf\n'), ((1202, 1227), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (1211, 1227), True, 'import tensorflow as tf\n'), ((2091, 2116), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (2100, 2116), True, 'import tensorflow as tf\n'), ((2762, 2791), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['inputs'], {'axis': '(0)'}), '(inputs, axis=0)\n', (2775, 2791), True, 'import tensorflow as tf\n'), ((3829, 3854), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (3838, 3854), True, 'import tensorflow as tf\n'), ((4679, 4704), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (4688, 4704), True, 'import tensorflow as tf\n'), ((12570, 12595), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (12579, 12595), True, 'import tensorflow as tf\n'), ((13594, 13619), 'tensorflow.matmul', 'tf.matmul', (['inputs', 'self.w'], {}), '(inputs, self.w)\n', (13603, 13619), True, 'import tensorflow as tf\n'), ((14998, 15035), 'tensorflow.nn.dropout', 'tf.nn.dropout', (['inputs'], {'rate': 'self.rate'}), '(inputs, rate=self.rate)\n', (15011, 15035), True, 'import tensorflow as tf\n'), ((19075, 19091), 'tensorflow.shape', 'tf.shape', (['z_mean'], {}), '(z_mean)\n', (19083, 19091), True, 'import tensorflow as tf\n'), ((19109, 19125), 'tensorflow.shape', 'tf.shape', (['z_mean'], {}), '(z_mean)\n', (19117, 19125), True, 'import tensorflow as tf\n'), ((22103, 22120), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (22118, 22120), True, 'import tensorflow as tf\n'), ((2662, 2684), 'tensorflow.zeros', 'tf.zeros', (['(input_dim,)'], {}), '((input_dim,))\n', (2670, 2684), True, 'import tensorflow as tf\n'), ((6634, 6655), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['inputs'], {}), '(inputs)\n', (6647, 6655), True, 'import tensorflow as tf\n'), ((7904, 7935), 'tensorflow.keras.regularizers.l2', 'tf.keras.regularizers.l2', (['(0.001)'], {}), '(0.001)\n', (7928, 7935), True, 'import tensorflow as tf\n'), ((19222, 19245), 'tensorflow.exp', 'tf.exp', (['(0.5 * z_log_var)'], {}), '(0.5 * z_log_var)\n', (19228, 19245), True, 'import tensorflow as tf\n'), ((24422, 24439), 'tensorflow.exp', 'tf.exp', (['z_log_var'], {}), '(z_log_var)\n', (24428, 24439), True, 'import tensorflow as tf\n'), ((24402, 24419), 'tensorflow.square', 'tf.square', (['z_mean'], {}), '(z_mean)\n', (24411, 24419), True, 'import tensorflow as tf\n'), ((21271, 21288), 'tensorflow.exp', 'tf.exp', (['z_log_var'], {}), '(z_log_var)\n', (21277, 21288), True, 'import tensorflow as tf\n'), ((21251, 21268), 'tensorflow.square', 'tf.square', (['z_mean'], {}), '(z_mean)\n', (21260, 21268), True, 'import tensorflow as tf\n')]
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Multipurpose TensorFlow Docker Helper.
- Assembles Dockerfiles
- Builds images (and optionally runs image tests)
- Pushes images to Docker Hub (provided with credentials)
Logs are written to stderr; the list of successfully built images is
written to stdout.
Read README.md (in this directory) for instructions!
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
from distutils.dir_util import copy_tree
import errno
import glob
import itertools
import json
import multiprocessing
import os
import platform
import re
import shutil
import sys
import urllib
from absl import app
from absl import flags
import cerberus
import docker
import yaml
import tarfile
import tempfile
FLAGS = flags.FLAGS
flags.DEFINE_string('hub_username', None,
'Dockerhub username, only used with --upload_to_hub')
flags.DEFINE_string(
'hub_password', None,
('Dockerhub password, only used with --upload_to_hub. Use from an env param'
' so your password isn\'t in your history.'))
flags.DEFINE_integer('hub_timeout', 3600,
'Abort Hub upload if it takes longer than this.')
flags.DEFINE_string(
'repository', 'tensorflow',
'Tag local images as {repository}:tag (in addition to the '
'hub_repository, if uploading to hub)')
flags.DEFINE_string(
'hub_repository', None,
'Push tags to this Docker Hub repository, e.g. tensorflow/tensorflow')
flags.DEFINE_boolean(
'upload_to_hub',
False,
('Push built images to Docker Hub (you must also provide --hub_username, '
'--hub_password, and --hub_repository)'),
short_name='u',
)
flags.DEFINE_boolean(
'print_models', False, 'print out the list of models', short_name='t')
flags.DEFINE_boolean(
'build_packages', False, 'Do not build packages', short_name='z')
flags.DEFINE_boolean(
'construct_dockerfiles', False, 'Do not build Dockerfiles', short_name='d')
flags.DEFINE_boolean(
'generate_documentation', False, 'Do not create README.md', short_name='e')
flags.DEFINE_boolean(
'keep_temp_dockerfiles',
False,
'Retain .temp.Dockerfiles created while building images.',
short_name='k')
flags.DEFINE_boolean(
'build_images', False, 'Do not build images', short_name='b')
flags.DEFINE_boolean(
'list_images', False, 'Do not list images that would be built', short_name='c')
flags.DEFINE_boolean(
'list_packages', False, 'Do not list packages that would be built')
flags.DEFINE_string(
'run_tests_path', None,
('Execute test scripts on generated Dockerfiles before pushing them. '
'Flag value must be a full path to the "tests" directory, which is usually'
' $(realpath ./tests). A failed tests counts the same as a failed build.'))
flags.DEFINE_boolean(
'stop_on_failure', False,
('Stop processing tags if any one build fails. If False or not specified, '
'failures are reported but do not affect the other images.'))
flags.DEFINE_boolean(
'dry_run',
False,
'Do not build or deploy anything at all.',
short_name='n',
)
flags.DEFINE_string(
'exclude_tags_matching',
None,
('Regular expression that skips processing on any tag it matches. Must '
'match entire string, e.g. ".*gpu.*" ignores all GPU tags.'),
short_name='x')
flags.DEFINE_multi_string(
'only_tags_matching',
[],
('Regular expression that skips processing on any tag it does not match. '
'Must match entire string, e.g. ".*gpu.*" includes only GPU tags.'),
short_name='i')
flags.DEFINE_string(
'model_dir',
'.', 'Path to the model repo.')
flags.DEFINE_string(
'output_dir',
'output', 'Path to an output directory for model packages.'
' Will be created if it doesn\'t exist.')
flags.DEFINE_string(
'dockerfile_dir',
'models/dockerfiles', 'Path to an output directory for Dockerfiles.'
' Will be created if it doesn\'t exist.'
' Existing files in this directory will be deleted when new Dockerfiles'
' are made.',
short_name='o')
flags.DEFINE_string(
'partial_dir',
'./partials',
'Path to a directory containing foo.partial.Dockerfile partial files.'
' can have subdirectories, e.g. "bar/baz.partial.Dockerfile".',
short_name='p')
flags.DEFINE_multi_string(
'release', [],
'Set of releases to build and tag. Defaults to every release type.',
short_name='r')
flags.DEFINE_multi_string(
'arg', [],
('Extra build arguments. These are used for expanding tag names if needed '
'(e.g. --arg _TAG_PREFIX=foo) and for using as build arguments (unused '
'args will print a warning).'),
short_name='a')
flags.DEFINE_boolean(
'nocache', False,
'Disable the Docker build cache; identical to "docker build --no-cache"')
flags.DEFINE_string(
'spec_dir',
'./specs',
'Path to the YAML specification directory',
short_name='s')
flags.DEFINE_string(
'framework',
'tensorflow',
'Name of the deep learning framework. This is being used with the '
'--generate_new_spec arg to map the model zoo directory structure, which '
'includes a directory for the framework.')
flags.DEFINE_string(
'generate_new_spec',
None,
'Used to auto generate a spec with model package files. Specify the name '
'for the new spec, which should be formatted like modelname-precision-mode')
flags.DEFINE_string(
'model_download',
None,
'Use only with --generate_new_spec to specify the URL to download a '
'pretrained model.')
flags.DEFINE_boolean(
'quiet', False,
'quiet mode',
short_name='q')
# Schema to verify the contents of merged spec yaml with Cerberus.
# Must be converted to a dict from yaml to work.
# Note: can add python references with e.g.
# !!python/name:builtins.str
# !!python/name:__main__.funcname
# (but this may not be considered safe?)
SCHEMA_TEXT = """
header:
type: string
slice_sets:
type: dict
keyschema:
type: string
valueschema:
type: list
schema:
type: dict
schema:
add_to_name:
type: string
dockerfile_exclusive_name:
type: string
dockerfile_subdirectory:
type: string
partials:
type: list
schema:
type: string
ispartial: true
documentation:
type: dict
schema:
name:
type: string
uri:
type: string
text_replace:
type: dict
docs:
type: list
schema:
type: dict
schema:
name:
type: string
uri:
type: string
test_runtime:
type: string
required: false
tests:
type: list
default: []
schema:
type: string
args:
type: list
default: []
schema:
type: string
isfullarg: true
files:
type: list
schema:
type: dict
schema:
source:
type: string
destination:
type: string
downloads:
type: list
schema:
type: dict
schema:
source:
type: string
destination:
type: string
releases:
type: dict
keyschema:
type: string
valueschema:
type: dict
schema:
is_dockerfiles:
type: boolean
required: false
default: false
upload_images:
type: boolean
required: false
default: true
tag_specs:
type: list
required: true
schema:
type: string
"""
# Template used when generating new model spec files with a slice set
model_spec_template = {
'releases':
{
'versioned': {'tag_specs': []},
'dockerfiles': {'tag_specs': []}
},
'slice_sets': {}
}
# Slice set template used when generating new model spec files
slice_set_template = [
{
'add_to_name': '',
'dockerfile_subdirectory': 'model_containers',
'partials': ['model_package', 'entrypoint'],
'documentation': {'text_replace': {}},
'args': [],
'files': [],
'downloads': []
}]
class TfDockerTagValidator(cerberus.Validator):
"""Custom Cerberus validator for TF tag spec.
Note: Each _validate_foo function's docstring must end with a segment
describing its own validation schema, e.g. "The rule's arguments are...". If
you add a new validator, you can copy/paste that section.
"""
def __init__(self, *args, **kwargs):
# See http://docs.python-cerberus.org/en/stable/customize.html
if 'partials' in kwargs:
self.partials = kwargs['partials']
super(cerberus.Validator, self).__init__(*args, **kwargs)
def _validate_ispartial(self, ispartial, field, value):
"""Validate that a partial references an existing partial spec.
Args:
ispartial: Value of the rule, a bool
field: The field being validated
value: The field's value
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if ispartial and value not in self.partials:
self._error(field,
'{} is not present in the partials directory.'.format(value))
def _validate_isfullarg(self, isfullarg, field, value):
"""Validate that a string is either a FULL=arg or NOT.
Args:
isfullarg: Value of the rule, a bool
field: The field being validated
value: The field's value
The rule's arguments are validated against this schema:
{'type': 'boolean'}
"""
if isfullarg and '=' not in value:
self._error(field, '{} should be of the form ARG=VALUE.'.format(value))
if not isfullarg and '=' in value:
self._error(field, '{} should be of the form ARG (no =).'.format(value))
def eprint(*args, **kwargs):
if "quiet" in kwargs.keys():
quiet = kwargs["quiet"]
del kwargs["quiet"]
if quiet != True:
print(*args, file=sys.stderr, flush=True, **kwargs)
else:
print(*args, file=sys.stderr, flush=True, **kwargs)
def aggregate_all_slice_combinations(spec, slice_set_names):
"""Figure out all of the possible slice groupings for a tag spec."""
slice_sets = copy.deepcopy(spec['slice_sets'])
for name in slice_set_names:
for slice_set in slice_sets[name]:
slice_set['set_name'] = name
slices_grouped_but_not_keyed = [slice_sets[name] for name in slice_set_names]
all_slice_combos = list(itertools.product(*slices_grouped_but_not_keyed))
return all_slice_combos
def build_name_from_slices(format_string, slices, args, is_dockerfile=False):
"""Build the tag name (cpu-devel...) from a list of slices."""
name_formatter = copy.deepcopy(args)
name_formatter.update({s['set_name']: s['add_to_name'] for s in slices})
name_formatter.update({
s['set_name']: s['dockerfile_exclusive_name']
for s in slices
if is_dockerfile and 'dockerfile_exclusive_name' in s
})
name = format_string.format(**name_formatter)
return name
def update_args_dict(args_dict, updater):
"""Update a dict of arg values with more values from a list or dict."""
if isinstance(updater, list):
for arg in updater:
key, sep, value = arg.partition('=')
if sep == '=':
args_dict[key] = value
if isinstance(updater, dict):
for key, value in updater.items():
args_dict[key] = value
return args_dict
def get_slice_sets_and_required_args(slice_sets, tag_spec):
"""Extract used-slice-sets and required CLI arguments from a spec string.
For example, {FOO}{bar}{bat} finds FOO, bar, and bat. Assuming bar and bat
are both named slice sets, FOO must be specified on the command line.
Args:
slice_sets: Dict of named slice sets
tag_spec: The tag spec string, e.g. {_FOO}{blep}
Returns:
(used_slice_sets, required_args), a tuple of lists
"""
required_args = []
used_slice_sets = []
extract_bracketed_words = re.compile(r'\{([^}]+)\}')
possible_args_or_slice_set_names = extract_bracketed_words.findall(tag_spec)
for name in possible_args_or_slice_set_names:
if name in slice_sets:
used_slice_sets.append(name)
else:
required_args.append(name)
return (used_slice_sets, required_args)
def gather_tag_args(slices, cli_input_args, required_args):
"""Build a dictionary of all the CLI and slice-specified args for a tag."""
args = {}
for s in slices:
args = update_args_dict(args, s['args'])
args = update_args_dict(args, cli_input_args)
for arg in required_args:
if arg not in args:
eprint(('> Error: {} is not a valid slice_set, and also isn\'t an arg '
'provided on the command line. If it is an arg, please specify '
'it with --arg. If not, check the slice_sets list.'.format(arg)))
exit(1)
return args
def gather_slice_list_items(slices, key):
"""For a list of slices, get the flattened list of all of a certain key."""
return list(itertools.chain(*[s[key] for s in slices if key in s]))
def find_first_slice_value(slices, key):
"""For a list of slices, get the first value for a certain key."""
for s in slices:
if key in s and s[key] is not None:
return s[key]
return None
def assemble_tags(spec, cli_args, enabled_releases, all_partials):
"""Gather all the tags based on our spec.
Args:
spec: Nested dict containing full Tag spec
cli_args: List of ARG=foo arguments to pass along to Docker build
enabled_releases: List of releases to parse. Empty list = all
all_partials: Dict of every partial, for reference
Returns:
Dict of tags and how to build them
"""
tag_data = collections.defaultdict(list)
for name, release in spec['releases'].items():
for tag_spec in release['tag_specs']:
if enabled_releases and name not in enabled_releases:
eprint(('> Skipping release {}'.format(name)), quiet=FLAGS.quiet)
continue
used_slice_sets, required_cli_args = get_slice_sets_and_required_args(
spec['slice_sets'], tag_spec)
slice_combos = aggregate_all_slice_combinations(spec, used_slice_sets)
for slices in slice_combos:
tag_args = gather_tag_args(slices, cli_args, required_cli_args)
tag_name = build_name_from_slices(tag_spec, slices, tag_args,
release['is_dockerfiles'])
used_partials = gather_slice_list_items(slices, 'partials')
used_tests = gather_slice_list_items(slices, 'tests')
documentation = {}
if 'documentation' in slices[len(slices)-1]:
documentation = slices[len(slices)-1]['documentation']
docs_list = gather_slice_list_items([documentation], 'docs')
documentation_contents = merge_docs(docs_list)
documentation.update({ 'contents': documentation_contents })
else:
docs_list = []
files_list = gather_slice_list_items(slices, 'files')
downloads_list = gather_slice_list_items(slices, 'downloads')
test_runtime = find_first_slice_value(slices, 'test_runtime')
dockerfile_subdirectory = find_first_slice_value(
slices, 'dockerfile_subdirectory')
dockerfile_contents = merge_partials(spec['header'], used_partials,
all_partials)
tag_data[tag_name].append({
'release': name,
'tag_spec': tag_spec,
'is_dockerfiles': release['is_dockerfiles'],
'upload_images': release['upload_images'],
'cli_args': tag_args,
'dockerfile_subdirectory': dockerfile_subdirectory or '',
'partials': used_partials,
'tests': used_tests,
'test_runtime': test_runtime,
'dockerfile_contents': dockerfile_contents,
'files': files_list,
'downloads': downloads_list,
'documentation': documentation,
})
return tag_data
def merge_partials(header, used_partials, all_partials):
"""Merge all partial contents with their header."""
used_partials = list(used_partials)
return '\n'.join([header] + [all_partials[u] for u in used_partials])
def doc_contents(path):
"""
Read document and return contents
Args:
path (string): read partials from this directory.
Returns:
contents of path.
"""
contents = ""
try:
with open(path, 'r', encoding="utf-8") as f:
contents = f.read()
except Exception as e:
eprint("error reading {} exception: {}".format(path, e))
raise e
return contents
def merge_docs(docs_list):
"""Build the README.md document"""
contents=''
for doc in docs_list:
name=doc['name']
uri=doc['uri']
contents+='\n'.join([doc_contents(uri) + '\n'])
return contents
def upload_in_background(hub_repository, dock, image, tag):
"""Upload a docker image (to be used by multiprocessing)."""
image.tag(hub_repository, tag=tag)
eprint(dock.images.push(hub_repository, tag=tag), quiet=FLAGS.quiet)
def mkdir_p(path):
"""Create a directory and its parents, even if it already exists."""
try:
os.makedirs(path)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def delete_dockerfiles(dir_path):
"""Recursively, list a directory content and delete Dockerfiles if exist."""
for afile in os.listdir(dir_path):
file_path = os.path.join(dir_path, afile)
try:
if os.path.isfile(file_path) and file_path.endswith(".Dockerfile"):
os.unlink(file_path)
elif os.path.isdir(file_path):
delete_dockerfiles(file_path)
except Exception as e:
print(e)
def gather_existing_partials(partial_path):
"""Find and read all available partials.
Args:
partial_path (string): read partials from this directory.
Returns:
Dict[string, string] of partial short names (like "ubuntu/python" or
"bazel") to the full contents of that partial.
"""
partials = {}
for path, _, files in os.walk(partial_path):
for name in files:
fullpath = os.path.join(path, name)
if '.partial.Dockerfile' not in fullpath:
eprint(('> Probably not a problem: skipping {}, which is not a '
'partial.').format(fullpath), quiet=FLAGS.quiet)
continue
# partial_dir/foo/bar.partial.Dockerfile -> foo/bar
simple_name = fullpath[len(partial_path) + 1:-len('.partial.dockerfile')]
with open(fullpath, 'r') as f:
try:
partial_contents = f.read()
except Exception as e:
eprint("error reading {} exception: {}".format(simple_name, e))
sys.exit(1)
partials[simple_name] = partial_contents
return partials
def get_package_name(package_def):
if "cli_args" in package_def:
cli_args = package_def["cli_args"]
if "PACKAGE_NAME" in cli_args:
return cli_args["PACKAGE_NAME"]
return None
def write_package(package_def):
output_dir = os.path.join(os.getcwd(), FLAGS.output_dir)
if not os.path.isdir(output_dir):
eprint(">> Creating directory: {}".format(output_dir), quiet=FLAGS.quiet)
os.mkdir(output_dir)
package = get_package_name(package_def)
if package != None:
tar_file = os.path.join(FLAGS.output_dir, "{}.tar.gz".format(package))
eprint("> Creating package: {}".format(tar_file), quiet=FLAGS.quiet)
try:
temp_dir = tempfile.mkdtemp()
# Grab things from the files list
model_dir=os.path.join(os.getcwd(), FLAGS.model_dir)
if "files" in package_def.keys():
for item in package_def["files"]:
source = os.path.join(model_dir, item["source"])
destination = os.path.join(temp_dir, item["destination"])
if os.path.isdir(source):
if not os.path.isdir(os.path.dirname(destination)):
os.makedirs(os.path.dirname(destination))
copy_tree(source, destination)
eprint("> Copied {} to {}".format(source, destination), quiet=FLAGS.quiet)
doc_partials_dir = os.path.join(destination, ".docs")
if os.path.isdir(doc_partials_dir):
shutil.rmtree(doc_partials_dir, ignore_errors=True)
elif os.path.isfile(source):
# Ensure that the directories exist first, otherwise the file copy will fail
if not os.path.isdir(os.path.dirname(destination)):
os.makedirs(os.path.dirname(destination))
shutil.copy(source, destination)
eprint("> Copied {} to {}".format(source, destination), quiet=FLAGS.quiet)
else:
eprint("ERROR: Unable to find file or directory: {}".format(source))
sys.exit(1)
# Grab things from the downloads list
if "downloads" in package_def.keys():
for item in package_def["downloads"]:
source = item["source"]
destination = os.path.join(temp_dir, item["destination"])
# Ensure that the directories exist first, otherwise the file copy will fail
if not os.path.isdir(os.path.dirname(destination)):
os.makedirs(os.path.dirname(destination))
urllib.request.urlretrieve(source, destination)
eprint("Copied {} to {}".format(source, destination), quiet=FLAGS.quiet)
# Write tar file
eprint("Writing {} to {}".format(temp_dir, tar_file), quiet=FLAGS.quiet)
with tarfile.open(tar_file, "w:gz") as tar:
tar.add(temp_dir, arcname=package)
finally:
eprint("Deleting temp directory: {}".format(temp_dir), quiet=FLAGS.quiet)
shutil.rmtree(temp_dir)
def update_spec(a, b):
"""Merge two dictionary specs into one, recursing through any embedded dicts."""
for k, v in b.items():
if isinstance(v, dict):
a[k] = update_spec(a.get(k, {}), v)
elif isinstance(v, list) and k in a:
if isinstance(v[0], dict):
# If a list of dicts is detected for an existing key, reject it
# This is a duplicate slice set
eprint('Duplicate slice set found for {}'.format(k))
exit(1)
a[k] = list(set(a[k]).union(v))
elif k in a and a[k] != v:
# If a string value for an existing key is being overwritten with
# a different value, the specs are ambiguous
eprint('Conflicting values found in spec files for key {}'.format(k))
eprint(' {} vs. {}'.format(a[k], v))
exit(1)
else:
a[k] = v
return a
def get_use_case_directory(framework, model_name, precision, mode):
"""
Searches the model zoo repo to find a matching model/precision/mode to get
the use case directory.
Returns the use case string and the model name string, since sometimes the
model name used in the model zoo will be slightly different than the model
name in the spec since the spec always uses dashes and the model zoo
sometimes has underscores.
"""
zoo_model_name = model_name
search_path = os.path.join(os.getcwd(), FLAGS.model_dir, 'benchmarks', '*',
framework, zoo_model_name, mode, precision)
matches = glob.glob(search_path)
if len(matches) > 1:
sys.exit('Found multiple directory matches in the model repo for '
'{}'.format(search_path))
elif len(matches) == 0:
# try replacing - with _ in the model name, and search again
zoo_model_name = zoo_model_name.replace('-', '_')
original_search_path = search_path
search_path = os.path.join(os.getcwd(), FLAGS.model_dir, 'benchmarks',
'*',
framework, zoo_model_name, mode, precision)
matches = glob.glob(search_path)
if len(matches) == 0:
sys.exit('No matching model directory was found for found for {} '
'or {}'.format(original_search_path, search_path))
# use the directory path to find use case (which should be right after framework)
dir_list = matches[0].split('/')
use_case = dir_list[dir_list.index(framework) - 1]
return use_case, zoo_model_name
def auto_generate_package_file_list(framework, use_case, model_name, precision, mode):
"""
Auto-generates the list of model package files for the specified model.
Files that are included are:
- benchmarks/common
- benchmarks/launch_benchmark.py
- model/common
- quickstart/common
- the benchmarks directory for the model/mode/precision
- README file for the model from the benchmarks directory
- __init__.py files from the benchmarks directory
- the models directory for the model/mode/precision, if found
- extra 'common' folders for the model in the benchmarks and models directories, if found
- quickstart folder for the model
"""
model_dir = os.path.join(os.getcwd(), FLAGS.model_dir)
# common directories/files
model_package_files = [
{'source': 'benchmarks/common', 'destination': 'benchmarks/common'},
{'source': 'benchmarks/launch_benchmark.py', 'destination': 'benchmarks/launch_benchmark.py'},
{'source': 'models/common', 'destination': 'models/common'},
{'source': 'quickstart/common', 'destination': 'quickstart/common'}
]
# benchmarks folder and the README.md
benchmarks_folder = os.path.join('benchmarks', use_case, framework,
model_name, mode, precision)
model_package_files.append({'source': benchmarks_folder,
'destination': benchmarks_folder})
model_readme = os.path.join('benchmarks', use_case, framework,
model_name, 'README.md')
model_package_files.append({'source': model_readme,
'destination': model_readme})
# __init__.py files in teh benchmarks directory
path = 'benchmarks'
for folder in [use_case, framework, model_name, mode]:
init_file_path = os.path.join(model_dir, path, folder, '__init__.py')
if os.path.exists(init_file_path):
model_package_files.append({'source': os.path.join(path, folder, '__init__.py'),
'destination': os.path.join(path, folder, '__init__.py')})
path = os.path.join(path, folder)
# models directory folders (these don't exist for every model - check before appending)
model_folder = os.path.join('models', use_case, framework, model_name,
mode, precision)
if os.path.exists(os.path.join(model_dir, model_folder)):
model_package_files.append({'source': model_folder,
'destination': model_folder})
else:
# try without the precision folder
model_folder = os.path.join('models', use_case, framework,
model_name, mode)
if os.path.exists(os.path.join(model_dir, model_folder)):
model_package_files.append({'source': model_folder,
'destination': model_folder})
# try without the mode folder
model_folder = os.path.join('models', use_case, framework,
model_name, precision)
if os.path.exists(os.path.join(model_dir, model_folder)):
model_package_files.append({'source': model_folder,
'destination': model_folder})
# add the model's quickstart folder
quickstart_folder = os.path.join('quickstart', use_case, framework,
model_name, mode, precision)
quickstart_folder_full_path = os.path.join(model_dir, quickstart_folder)
if not os.path.exists(quickstart_folder_full_path):
os.makedirs(quickstart_folder_full_path)
template_script = "quickstart_template.sh"
shutil.copyfile("./{}".format(template_script), os.path.join(
quickstart_folder_full_path, template_script))
eprint("Added a template for a quickstart script at: {}\n".format(
os.path.join(quickstart_folder, template_script)), quiet=FLAGS.quiet)
model_package_files.append({'source': quickstart_folder,
'destination': 'quickstart'})
# look for extra 'common' folders
for folder in ['benchmarks', 'models', 'quickstart']:
# check for a common folder for the model/mode
common_model_folder = os.path.join(folder, 'use_case', framework,
model_name, mode, 'common')
if os.path.exists(os.path.join(model_dir, common_model_folder)):
model_package_files.append({'source': common_model_folder,
'destination': common_model_folder})
# check for a common folder for the model (without mode)
common_model_folder = os.path.join(folder, 'use_case', framework,
model_name, 'common')
if os.path.exists(os.path.join(model_dir, common_model_folder)):
model_package_files.append({'source': common_model_folder,
'destination': common_model_folder})
# return the list of dictionaries sorted by 'source'
return sorted(model_package_files, key=lambda f: f['source'])
def auto_generate_documentation_list(framework, use_case, model_name, precision, mode):
"""
Auto-generates the list of model documentation entries {name: <section>, uri: <uri>} for the specified model.
Entries that are included are:
- name: Title
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/title.md
- name: Description
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/description.md
- name: Download link
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/download.md
- name: Datasets
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/datasets.md
- name: Quick Start Scripts
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/quickstart.md
- name: Bare Metal
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/baremetal.md
- name: Docker
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/docker.md
- name: License link
uri: models/quickstart/use_case/tensorflow/model_name/mode/docs/license.md
"""
model_dir = os.path.join(os.getcwd(), FLAGS.model_dir)
# the model's documentation folder
docs_folder = os.path.join('quickstart', use_case, framework,
model_name, mode, precision, ".docs")
docs_folder_full_path = os.path.join(model_dir, docs_folder)
if os.path.exists(docs_folder_full_path) == False:
shutil.copytree("./docs", docs_folder_full_path)
eprint("> Copied {} to {}".format("./docs", docs_folder_full_path), quiet=FLAGS.quiet)
markdowns = os.listdir(docs_folder_full_path)
readme_folder = os.path.join(os.path.basename(model_dir), 'quickstart', use_case, framework,
model_name, mode, precision)
documentation = {"name": "README.md", "uri": readme_folder, "docs": []}
doc_partials = []
for markdown in markdowns:
markdown_source_path = os.path.join(os.getcwd(), "docs", markdown)
markdown_destination_path = os.path.join(os.path.basename(model_dir), docs_folder, markdown)
if markdown.endswith('.md'):
with open(markdown_source_path, 'r', encoding='utf8') as markdown_file:
try:
first_line = markdown_file.readline().strip()
matched = re.search("^.* ([0-9]*)\. (.*) -->$", first_line)
if matched:
order = matched.group(1).strip()
name = matched.group(2).strip()
doc_partials.append({"name": name, "order": int(order), "uri": markdown_destination_path})
except Exception as e:
eprint("exception in {}: {}".format(markdown_source_path, e))
raise e
doc_partials_sorted = sorted(doc_partials, key=lambda f: f['order'])
for doc_partial in doc_partials_sorted:
if 'name' in doc_partial and 'uri' in doc_partial:
documentation['docs'].append({"name": doc_partial['name'], "uri": doc_partial['uri']})
return documentation
def generate_doc_text_replace_options(use_case, model_name, precision, mode):
"""
Returns a dictionary of the default text replace options that are used for
generating documentation. This is used as the default key/value pairs when
"""
# Define dictionary of keywords to replace and try to set proper
# capitalization for common words in model names and precisions. This
# does not cover the preferred formatting for *ALL* models, but it covers
# some of the basics. The final model name is defined in the spec yaml.
formatted_model_name = model_name.title().replace("-", " "). \
replace("Ssd ", "SSD-"). \
replace("Resnet", "ResNet"). \
replace("Mobilenet", "MobileNet"). \
replace("Densenet", "DenseNet"). \
replace("Bert", "BERT"). \
replace("Rfcn", "RFCN"). \
replace("Gnmt", "GNMT")
formatted_precision = precision.replace("fp32", "FP32"). \
replace("int8", "Int8"). \
replace("bfloat16", "BFloat16")
return {
"<model name>": formatted_model_name,
"<precision>": formatted_precision,
"<mode>": mode,
"<package url>": "",
"<package name>": "{}-{}-{}.tar.gz".format(model_name, precision, mode),
"<package dir>": "{}-{}-{}".format(model_name, precision, mode),
"<docker image>": "",
"<use case>": use_case
}
def auto_generate_model_spec(spec_name):
"""
Creates a new spec file for the specified model. The spec_name should be
formatted like modelname-precision-mode (i.e. resnet50-fp32-inference).
The spec yaml file will be written to the tools/docker/specs
directory. If a spec file already exists with the same name, the script
will exit with an error message.
This function parses the model name, precision, and mode out of the
spec_name and then maps that to directories in the model zoo.
"""
# check if spec file for this model/precision/mode already exists
spec_file_name = '{}_spec.yml'.format(spec_name)
spec_file_path = os.path.join(FLAGS.spec_dir, spec_file_name)
if os.path.isfile(spec_file_path):
sys.exit('The spec file already exists: {}'.format(spec_file_name))
# regex to parse out the model name-precision-mode from the spec name
regex_pattern = r'(\S+)-(\S+)-(\S+)'
matched_groups = re.findall(regex_pattern, spec_name)[0]
if len(matched_groups) != 3:
error_message = 'Unexpected slice name format: {}. Regex couldn\'t parse ' \
'out model name, precision, and mode. Expected 3 ' \
'groups but found {}'.format(spec_name, len(matched_groups))
sys.exit(error_message)
model_name = matched_groups[0]
precision = matched_groups[1]
mode = matched_groups[2]
framework = FLAGS.framework
zoo_use_case, zoo_model_name = get_use_case_directory(framework, model_name, precision, mode)
use_case_dashes = zoo_use_case.replace('_', '-')
eprint('\nUse case: {}\nFramework: {}\nModel name: {}\nMode: {}\nPrecision: {}\n'.format(
use_case_dashes, framework, model_name, mode, precision), quiet=FLAGS.quiet)
# grab at copy of the model spec template and edit it for this model
model_spec = model_spec_template.copy()
model_spec['releases']['versioned']['tag_specs'] = \
['{_TAG_PREFIX}{intel-tf}{' + use_case_dashes + '}{' + spec_name + '}']
model_spec['releases']['dockerfiles']['tag_specs'] = \
['{intel-tf}{' + use_case_dashes + '}{' + spec_name + '}']
# grab a copy of the slice set template and edit it for this model
model_slice_set = slice_set_template.copy()
model_slice_set[0]['add_to_name'] = '-{}'.format(spec_name)
model_slice_set[0]['args'].append("PACKAGE_NAME={}".format(spec_name))
model_slice_set[0]['files'] = auto_generate_package_file_list(
framework, zoo_use_case, zoo_model_name, precision, mode)
model_slice_set[0]['documentation'] = auto_generate_documentation_list(
framework, zoo_use_case, zoo_model_name, precision, mode)
# add text replace options for the documentation
text_replace_dict = generate_doc_text_replace_options(
zoo_use_case, zoo_model_name, precision, mode)
model_slice_set[0]['documentation']['text_replace'] = text_replace_dict
# add a download, if there's one defined
if FLAGS.model_download:
model_url = FLAGS.model_download
model_filename = os.path.basename(model_url)
model_slice_set[0]['downloads'].append(
{'source': model_url, 'destination': model_filename})
# add slice set section
model_spec['slice_sets'][spec_name] = model_slice_set
# write the model spec to a file
with open(spec_file_path, 'w') as f:
yaml.dump(model_spec, f)
# print out info for the user to see the spec and file name
eprint(yaml.dump(model_spec), quiet=FLAGS.quiet)
eprint("Wrote the spec file to your directory at "
"tools/docker/specs/{}\nPlease edit the file if additional "
"files, partials, or downloads are needed.\n".format(spec_file_name))
# print out the documentation text_replace options
eprint("The spec file has documentation text replacement setup for the following key/values:")
for k, v in text_replace_dict.items():
eprint(" {}: {}".format(k, v))
eprint("The text replacement will happen when the README.md is generated "
"from the doc partials.\nThe key/values can be edited in the spec "
"yaml file.\n")
# print out a note about the doc partials
eprint("Documentation partial files were written to your intelai/models "
"directory at:\n{}\nPlease edit these files to fill in the "
"information for your model.\n".format(
os.path.join('quickstart', zoo_use_case, framework, zoo_model_name,
mode, precision, ".docs")))
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if FLAGS.generate_new_spec:
auto_generate_model_spec(FLAGS.generate_new_spec.strip())
# exit since the user needs to fill in things like partials and build args
# we can't build dockerfiles/images in the same run as creating the spec
sys.exit(0)
# Read the spec files into one dict, used for everything
tag_spec = {}
for spec_file in os.listdir(FLAGS.spec_dir):
with open(os.path.join(FLAGS.spec_dir, spec_file), 'r') as spec_file:
try:
spec_contents = yaml.safe_load(spec_file)
update_spec(tag_spec, spec_contents)
except Exception as e:
eprint("exception in {}: {}".format(spec_file, e))
raise e
# Get existing partial contents
partials = gather_existing_partials(FLAGS.partial_dir)
# Abort if spec yaml is invalid
schema = yaml.safe_load(SCHEMA_TEXT)
v = TfDockerTagValidator(schema, partials=partials)
if not v.validate(tag_spec):
eprint('> Error: Combined spec is invalid! The errors are:')
eprint(yaml.dump(v.errors, indent=2))
exit(1)
tag_spec = v.normalized(tag_spec)
# Assemble tags and images used to build them
all_tags = assemble_tags(tag_spec, FLAGS.arg, FLAGS.release, partials)
if FLAGS.print_models:
models=["all"]
for tag, tag_defs in all_tags.items():
for tag_def in tag_defs:
if 'is_dockerfiles' in tag_def:
if tag_def['is_dockerfiles'] == True:
lst = re.findall('{([^{}]*)}',tag_def['tag_spec'])
if lst is not None and len(lst) > 0:
target = lst[len(lst)-1]
models.append(target)
eprint('{}'.format(" ".join(models)))
sys.exit(0)
if FLAGS.list_images:
for tag, tag_defs in all_tags.items():
for tag_def in tag_defs:
if 'tag_spec' in tag_def:
lst = re.findall('{([^{}]*)}',tag_def['tag_spec'])
if lst is not None and len(lst) > 0:
target = lst[len(lst)-1]
image = '{}:{}'.format(FLAGS.repository, tag)
eprint('{} {}'.format(target,image))
sys.exit(0)
if FLAGS.list_packages:
for tag, tag_defs in all_tags.items():
for tag_def in tag_defs:
if 'tag_spec' in tag_def:
lst = re.findall('{([^{}]*)}',tag_def['tag_spec'])
if lst is not None and len(lst) > 0:
target = lst[len(lst)-1]
package = get_package_name(tag_def)
if package != None:
tar_file = os.path.join(FLAGS.output_dir, "{}.tar.gz".format(package))
eprint('{} {}'.format(target,tar_file))
sys.exit(0)
# Empty Dockerfile directory if building new Dockerfiles
if FLAGS.construct_dockerfiles and not FLAGS.only_tags_matching:
eprint('> Emptying Dockerfile dir "{}"'.format(FLAGS.dockerfile_dir), quiet=FLAGS.quiet)
delete_dockerfiles(FLAGS.dockerfile_dir)
# Set up Docker helper
dock = docker.from_env()
# Login to Docker if uploading images
if FLAGS.upload_to_hub:
if not FLAGS.hub_username:
print('> Error: please set --hub_username when uploading to Dockerhub.')
exit(1)
if not FLAGS.hub_repository:
eprint(
'> Error: please set --hub_repository when uploading to Dockerhub.')
exit(1)
if not FLAGS.hub_password:
eprint('> Error: please set --hub_password when uploading to Dockerhub.')
exit(1)
dock.login(
username=FLAGS.hub_username,
password=FLAGS.hub_password,
)
# Each tag has a name ('tag') and a definition consisting of the contents
# of its Dockerfile, its build arg list, etc.
failed_tags = []
succeeded_tags = []
for tag, tag_defs in all_tags.items():
for tag_def in tag_defs:
eprint('> Working on {}'.format(tag), quiet=FLAGS.quiet)
if FLAGS.exclude_tags_matching and re.match(FLAGS.exclude_tags_matching,
tag):
eprint('>> Excluded due to match against "{}".'.format(
FLAGS.exclude_tags_matching), quiet=FLAGS.quiet)
continue
if FLAGS.only_tags_matching and not [x for x in FLAGS.only_tags_matching if re.match(x, tag)]:
eprint('>> Excluded due to failure to match against "{}".'.format(
FLAGS.only_tags_matching), quiet=FLAGS.quiet)
continue
# Write packages to the output_dir
if FLAGS.build_packages:
write_package(tag_def)
# Write releases marked "is_dockerfiles" into the Dockerfile directory
if FLAGS.construct_dockerfiles and tag_def['is_dockerfiles']:
path = os.path.join(FLAGS.dockerfile_dir,
tag_def['dockerfile_subdirectory'],
tag + '.Dockerfile')
eprint('>> Writing {}...'.format(path), quiet=FLAGS.quiet)
if not FLAGS.dry_run:
mkdir_p(os.path.dirname(path))
with open(path, 'w') as f:
f.write(tag_def['dockerfile_contents'])
if FLAGS.generate_documentation:
documentation = tag_def['documentation']
text_replace = tag_def['documentation']['text_replace'] \
if 'text_replace' in tag_def['documentation'] else {}
if 'contents' in documentation:
readme = os.path.join(documentation['uri'], documentation['name'])
with open(readme, 'w', encoding="utf-8") as f:
for k, v in text_replace.items():
documentation['contents'] = documentation['contents'].replace(k, v)
f.write(documentation['contents'])
# Don't build any images for dockerfile-only releases
if not FLAGS.build_images:
continue
# Only build images for host architecture
proc_arch = platform.processor()
is_x86 = proc_arch.startswith('x86')
if (is_x86 and any([arch in tag for arch in ['ppc64le']]) or
not is_x86 and proc_arch not in tag):
continue
# Generate a temporary Dockerfile to use to build, since docker-py
# needs a filepath relative to the build context (i.e. the current
# directory)
dockerfile = os.path.join(FLAGS.dockerfile_dir, tag + '.temp.Dockerfile')
if not FLAGS.dry_run:
with open(dockerfile, 'w') as f:
f.write(tag_def['dockerfile_contents'])
eprint('>> (Temporary) writing {}...'.format(dockerfile), quiet=FLAGS.quiet)
repo_tag = '{}:{}'.format(FLAGS.repository, tag)
eprint('>> Building {} using build args:'.format(repo_tag), quiet=FLAGS.quiet)
for arg, value in tag_def['cli_args'].items():
eprint('>>> {}={}'.format(arg, value), quiet=FLAGS.quiet)
# Note that we are NOT using cache_from, which appears to limit
# available cache layers to those from explicitly specified layers. Many
# of our layers are similar between local builds, so we want to use the
# implied local build cache.
tag_failed = False
image, logs = None, []
if not FLAGS.dry_run:
try:
# Use low level APIClient in order to stream log output
resp = dock.api.build(
timeout=FLAGS.hub_timeout,
path='.',
nocache=FLAGS.nocache,
quiet=FLAGS.quiet,
dockerfile=dockerfile,
buildargs=tag_def['cli_args'],
tag=repo_tag)
last_event = None
image_id = None
# Manually process log output extracting build success and image id
# in order to get built image
while True:
try:
output = next(resp).decode('utf-8')
json_output = json.loads(output.strip('\r\n'))
if 'stream' in json_output:
eprint(json_output['stream'], end='', quiet=FLAGS.quiet)
match = re.search(r'(^Successfully built |sha256:)([0-9a-f]+)$',
json_output['stream'])
if match:
image_id = match.group(2)
last_event = json_output['stream']
# collect all log lines into the logs object
logs.append(json_output)
except StopIteration:
eprint(('Docker image build complete.'), quiet=FLAGS.quiet)
break
except ValueError:
eprint('> Error parsing from docker image build: {}'.format(output))
# If Image ID is not set, the image failed to built properly. Raise
# an error in this case with the last log line and all logs
if image_id:
image = dock.images.get(image_id)
else:
raise docker.errors.BuildError(last_event or 'Unknown', logs)
# Run tests if requested, and dump output
# Could be improved by backgrounding, but would need better
# multiprocessing support to track failures properly.
if FLAGS.run_tests_path:
if not tag_def['tests']:
eprint(('>>> No tests to run.'), quiet=FLAGS.quiet)
for test in tag_def['tests']:
eprint(('>> Testing {}...'.format(test)), quiet=FLAGS.quiet)
container, = dock.containers.run(
image,
'/tests/' + test,
working_dir='/',
log_config={'type': 'journald'},
detach=True,
stderr=True,
stdout=True,
volumes={
FLAGS.run_tests_path: {
'bind': '/tests',
'mode': 'ro'
}
},
runtime=tag_def['test_runtime']),
ret = container.wait()
code = ret['StatusCode']
out = container.logs(stdout=True, stderr=False)
err = container.logs(stdout=False, stderr=True)
container.remove()
if out:
eprint('>>> Output stdout:', quiet=FLAGS.quiet)
eprint(out.decode('utf-8'), quiet=FLAGS.quiet)
else:
eprint('>>> No test standard out.', quiet=FLAGS.quiet)
if err:
eprint('>>> Output stderr:')
eprint(err.decode('utf-8'))
else:
eprint('>>> No test standard err.')
if code != 0:
eprint('>> {} failed tests with status: "{}"'.format(
repo_tag, code))
failed_tags.append(repo_tag)
tag_failed = True
if FLAGS.stop_on_failure:
eprint('>> ABORTING due to --stop_on_failure!')
exit(1)
else:
eprint('>> Tests look good!', quiet=FLAGS.quiet)
except docker.errors.BuildError as e:
eprint('>> {} failed to build with message: "{}"'.format(
repo_tag, e.msg))
eprint(json_output)
eprint('>> Build logs follow:')
log_lines = [l.get('stream', '') for l in e.build_log]
eprint(''.join(log_lines))
failed_tags.append(repo_tag)
tag_failed = True
if FLAGS.stop_on_failure:
eprint('>> ABORTING due to --stop_on_failure!')
exit(1)
# Clean temporary dockerfiles if they were created earlier
if not FLAGS.keep_temp_dockerfiles:
os.remove(dockerfile)
# Upload new images to DockerHub as long as they built + passed tests
if FLAGS.upload_to_hub:
if not tag_def['upload_images']:
continue
if tag_failed:
continue
eprint('>> Uploading to {}:{}'.format(FLAGS.hub_repository, tag), quiet=FLAGS.quiet)
if not FLAGS.dry_run:
p = multiprocessing.Process(
target=upload_in_background,
args=(FLAGS.hub_repository, dock, image, tag))
p.start()
if not tag_failed:
succeeded_tags.append(repo_tag)
if failed_tags:
eprint(
'> Some tags failed to build or failed testing, check scrollback for '
'errors: {}'.format(','.join(failed_tags)))
exit(1)
eprint('> Writing built{} tags to standard out.'.format(
' and tested' if FLAGS.run_tests_path else ''), quiet=FLAGS.quiet)
images_built_var = os.environ.get("IMAGES_BUILT", "")
for tag in succeeded_tags:
eprint('{}:{}'.format(FLAGS.repository, tag), quiet=FLAGS.quiet)
images_built_var = tag if not images_built_var else "{},{}".format(images_built_var, tag)
# Update the env var of images built
if FLAGS.build_images and images_built_var:
print("IMAGES_BUILT={}".format(images_built_var))
if __name__ == '__main__':
app.run(main)
|
[
"platform.processor",
"os.mkdir",
"os.remove",
"os.unlink",
"os.walk",
"yaml.dump",
"collections.defaultdict",
"os.path.isfile",
"yaml.safe_load",
"absl.flags.DEFINE_boolean",
"glob.glob",
"shutil.rmtree",
"os.path.join",
"shutil.copy",
"os.path.dirname",
"os.path.exists",
"re.findall",
"absl.flags.DEFINE_integer",
"tempfile.mkdtemp",
"itertools.product",
"tarfile.open",
"itertools.chain",
"re.search",
"distutils.dir_util.copy_tree",
"docker.from_env",
"copy.deepcopy",
"os.path.basename",
"docker.errors.BuildError",
"re.match",
"urllib.request.urlretrieve",
"os.listdir",
"sys.exit",
"re.compile",
"os.makedirs",
"os.getcwd",
"os.path.isdir",
"absl.flags.DEFINE_multi_string",
"absl.flags.DEFINE_string",
"os.environ.get",
"absl.app.run",
"shutil.copytree",
"multiprocessing.Process",
"absl.app.UsageError"
] |
[((1484, 1583), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""hub_username"""', 'None', '"""Dockerhub username, only used with --upload_to_hub"""'], {}), "('hub_username', None,\n 'Dockerhub username, only used with --upload_to_hub')\n", (1503, 1583), False, 'from absl import flags\n'), ((1601, 1768), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""hub_password"""', 'None', '"""Dockerhub password, only used with --upload_to_hub. Use from an env param so your password isn\'t in your history."""'], {}), '(\'hub_password\', None,\n "Dockerhub password, only used with --upload_to_hub. Use from an env param so your password isn\'t in your history."\n )\n', (1620, 1768), False, 'from absl import flags\n'), ((1781, 1876), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""hub_timeout"""', '(3600)', '"""Abort Hub upload if it takes longer than this."""'], {}), "('hub_timeout', 3600,\n 'Abort Hub upload if it takes longer than this.')\n", (1801, 1876), False, 'from absl import flags\n'), ((1895, 2048), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""repository"""', '"""tensorflow"""', '"""Tag local images as {repository}:tag (in addition to the hub_repository, if uploading to hub)"""'], {}), "('repository', 'tensorflow',\n 'Tag local images as {repository}:tag (in addition to the hub_repository, if uploading to hub)'\n )\n", (1914, 2048), False, 'from absl import flags\n'), ((2057, 2175), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""hub_repository"""', 'None', '"""Push tags to this Docker Hub repository, e.g. tensorflow/tensorflow"""'], {}), "('hub_repository', None,\n 'Push tags to this Docker Hub repository, e.g. tensorflow/tensorflow')\n", (2076, 2175), False, 'from absl import flags\n'), ((2182, 2363), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""upload_to_hub"""', '(False)', '"""Push built images to Docker Hub (you must also provide --hub_username, --hub_password, and --hub_repository)"""'], {'short_name': '"""u"""'}), "('upload_to_hub', False,\n 'Push built images to Docker Hub (you must also provide --hub_username, --hub_password, and --hub_repository)'\n , short_name='u')\n", (2202, 2363), False, 'from absl import flags\n'), ((2385, 2480), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""print_models"""', '(False)', '"""print out the list of models"""'], {'short_name': '"""t"""'}), "('print_models', False, 'print out the list of models',\n short_name='t')\n", (2405, 2480), False, 'from absl import flags\n'), ((2483, 2573), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""build_packages"""', '(False)', '"""Do not build packages"""'], {'short_name': '"""z"""'}), "('build_packages', False, 'Do not build packages',\n short_name='z')\n", (2503, 2573), False, 'from absl import flags\n'), ((2576, 2676), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""construct_dockerfiles"""', '(False)', '"""Do not build Dockerfiles"""'], {'short_name': '"""d"""'}), "('construct_dockerfiles', False,\n 'Do not build Dockerfiles', short_name='d')\n", (2596, 2676), False, 'from absl import flags\n'), ((2679, 2779), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""generate_documentation"""', '(False)', '"""Do not create README.md"""'], {'short_name': '"""e"""'}), "('generate_documentation', False,\n 'Do not create README.md', short_name='e')\n", (2699, 2779), False, 'from absl import flags\n'), ((2782, 2913), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""keep_temp_dockerfiles"""', '(False)', '"""Retain .temp.Dockerfiles created while building images."""'], {'short_name': '"""k"""'}), "('keep_temp_dockerfiles', False,\n 'Retain .temp.Dockerfiles created while building images.', short_name='k')\n", (2802, 2913), False, 'from absl import flags\n'), ((2928, 3014), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""build_images"""', '(False)', '"""Do not build images"""'], {'short_name': '"""b"""'}), "('build_images', False, 'Do not build images',\n short_name='b')\n", (2948, 3014), False, 'from absl import flags\n'), ((3017, 3121), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""list_images"""', '(False)', '"""Do not list images that would be built"""'], {'short_name': '"""c"""'}), "('list_images', False,\n 'Do not list images that would be built', short_name='c')\n", (3037, 3121), False, 'from absl import flags\n'), ((3124, 3216), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""list_packages"""', '(False)', '"""Do not list packages that would be built"""'], {}), "('list_packages', False,\n 'Do not list packages that would be built')\n", (3144, 3216), False, 'from absl import flags\n'), ((3219, 3486), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""run_tests_path"""', 'None', '"""Execute test scripts on generated Dockerfiles before pushing them. Flag value must be a full path to the "tests" directory, which is usually $(realpath ./tests). A failed tests counts the same as a failed build."""'], {}), '(\'run_tests_path\', None,\n \'Execute test scripts on generated Dockerfiles before pushing them. Flag value must be a full path to the "tests" directory, which is usually $(realpath ./tests). A failed tests counts the same as a failed build.\'\n )\n', (3238, 3486), False, 'from absl import flags\n'), ((3506, 3694), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""stop_on_failure"""', '(False)', '"""Stop processing tags if any one build fails. If False or not specified, failures are reported but do not affect the other images."""'], {}), "('stop_on_failure', False,\n 'Stop processing tags if any one build fails. If False or not specified, failures are reported but do not affect the other images.'\n )\n", (3526, 3694), False, 'from absl import flags\n'), ((3706, 3807), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""dry_run"""', '(False)', '"""Do not build or deploy anything at all."""'], {'short_name': '"""n"""'}), "('dry_run', False,\n 'Do not build or deploy anything at all.', short_name='n')\n", (3726, 3807), False, 'from absl import flags\n'), ((3824, 4029), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""exclude_tags_matching"""', 'None', '"""Regular expression that skips processing on any tag it matches. Must match entire string, e.g. ".*gpu.*" ignores all GPU tags."""'], {'short_name': '"""x"""'}), '(\'exclude_tags_matching\', None,\n \'Regular expression that skips processing on any tag it matches. Must match entire string, e.g. ".*gpu.*" ignores all GPU tags.\'\n , short_name=\'x\')\n', (3843, 4029), False, 'from absl import flags\n'), ((4049, 4264), 'absl.flags.DEFINE_multi_string', 'flags.DEFINE_multi_string', (['"""only_tags_matching"""', '[]', '"""Regular expression that skips processing on any tag it does not match. Must match entire string, e.g. ".*gpu.*" includes only GPU tags."""'], {'short_name': '"""i"""'}), '(\'only_tags_matching\', [],\n \'Regular expression that skips processing on any tag it does not match. Must match entire string, e.g. ".*gpu.*" includes only GPU tags.\'\n , short_name=\'i\')\n', (4074, 4264), False, 'from absl import flags\n'), ((4284, 4348), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model_dir"""', '"""."""', '"""Path to the model repo."""'], {}), "('model_dir', '.', 'Path to the model repo.')\n", (4303, 4348), False, 'from absl import flags\n'), ((4359, 4499), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""output_dir"""', '"""output"""', '"""Path to an output directory for model packages. Will be created if it doesn\'t exist."""'], {}), '(\'output_dir\', \'output\',\n "Path to an output directory for model packages. Will be created if it doesn\'t exist."\n )\n', (4378, 4499), False, 'from absl import flags\n'), ((4509, 4758), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dockerfile_dir"""', '"""models/dockerfiles"""', '"""Path to an output directory for Dockerfiles. Will be created if it doesn\'t exist. Existing files in this directory will be deleted when new Dockerfiles are made."""'], {'short_name': '"""o"""'}), '(\'dockerfile_dir\', \'models/dockerfiles\',\n "Path to an output directory for Dockerfiles. Will be created if it doesn\'t exist. Existing files in this directory will be deleted when new Dockerfiles are made."\n , short_name=\'o\')\n', (4528, 4758), False, 'from absl import flags\n'), ((4786, 4991), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""partial_dir"""', '"""./partials"""', '"""Path to a directory containing foo.partial.Dockerfile partial files. can have subdirectories, e.g. "bar/baz.partial.Dockerfile"."""'], {'short_name': '"""p"""'}), '(\'partial_dir\', \'./partials\',\n \'Path to a directory containing foo.partial.Dockerfile partial files. can have subdirectories, e.g. "bar/baz.partial.Dockerfile".\'\n , short_name=\'p\')\n', (4805, 4991), False, 'from absl import flags\n'), ((5008, 5141), 'absl.flags.DEFINE_multi_string', 'flags.DEFINE_multi_string', (['"""release"""', '[]', '"""Set of releases to build and tag. Defaults to every release type."""'], {'short_name': '"""r"""'}), "('release', [],\n 'Set of releases to build and tag. Defaults to every release type.',\n short_name='r')\n", (5033, 5141), False, 'from absl import flags\n'), ((5148, 5382), 'absl.flags.DEFINE_multi_string', 'flags.DEFINE_multi_string', (['"""arg"""', '[]', '"""Extra build arguments. These are used for expanding tag names if needed (e.g. --arg _TAG_PREFIX=foo) and for using as build arguments (unused args will print a warning)."""'], {'short_name': '"""a"""'}), "('arg', [],\n 'Extra build arguments. These are used for expanding tag names if needed (e.g. --arg _TAG_PREFIX=foo) and for using as build arguments (unused args will print a warning).'\n , short_name='a')\n", (5173, 5382), False, 'from absl import flags\n'), ((5406, 5522), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""nocache"""', '(False)', '"""Disable the Docker build cache; identical to "docker build --no-cache\\""""'], {}), '(\'nocache\', False,\n \'Disable the Docker build cache; identical to "docker build --no-cache"\')\n', (5426, 5522), False, 'from absl import flags\n'), ((5529, 5635), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""spec_dir"""', '"""./specs"""', '"""Path to the YAML specification directory"""'], {'short_name': '"""s"""'}), "('spec_dir', './specs',\n 'Path to the YAML specification directory', short_name='s')\n", (5548, 5635), False, 'from absl import flags\n'), ((5650, 5885), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""framework"""', '"""tensorflow"""', '"""Name of the deep learning framework. This is being used with the --generate_new_spec arg to map the model zoo directory structure, which includes a directory for the framework."""'], {}), "('framework', 'tensorflow',\n 'Name of the deep learning framework. This is being used with the --generate_new_spec arg to map the model zoo directory structure, which includes a directory for the framework.'\n )\n", (5669, 5885), False, 'from absl import flags\n'), ((5905, 6109), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""generate_new_spec"""', 'None', '"""Used to auto generate a spec with model package files. Specify the name for the new spec, which should be formatted like modelname-precision-mode"""'], {}), "('generate_new_spec', None,\n 'Used to auto generate a spec with model package files. Specify the name for the new spec, which should be formatted like modelname-precision-mode'\n )\n", (5924, 6109), False, 'from absl import flags\n'), ((6122, 6262), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""model_download"""', 'None', '"""Use only with --generate_new_spec to specify the URL to download a pretrained model."""'], {}), "('model_download', None,\n 'Use only with --generate_new_spec to specify the URL to download a pretrained model.'\n )\n", (6141, 6262), False, 'from absl import flags\n'), ((6275, 6341), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""quiet"""', '(False)', '"""quiet mode"""'], {'short_name': '"""q"""'}), "('quiet', False, 'quiet mode', short_name='q')\n", (6295, 6341), False, 'from absl import flags\n'), ((11200, 11233), 'copy.deepcopy', 'copy.deepcopy', (["spec['slice_sets']"], {}), "(spec['slice_sets'])\n", (11213, 11233), False, 'import copy\n'), ((11687, 11706), 'copy.deepcopy', 'copy.deepcopy', (['args'], {}), '(args)\n', (11700, 11706), False, 'import copy\n'), ((12937, 12964), 're.compile', 're.compile', (['"""\\\\{([^}]+)\\\\}"""'], {}), "('\\\\{([^}]+)\\\\}')\n", (12947, 12964), False, 'import re\n'), ((14650, 14679), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (14673, 14679), False, 'import collections\n'), ((18351, 18371), 'os.listdir', 'os.listdir', (['dir_path'], {}), '(dir_path)\n', (18361, 18371), False, 'import os\n'), ((18991, 19012), 'os.walk', 'os.walk', (['partial_path'], {}), '(partial_path)\n', (18998, 19012), False, 'import os\n'), ((24154, 24176), 'glob.glob', 'glob.glob', (['search_path'], {}), '(search_path)\n', (24163, 24176), False, 'import glob\n'), ((26365, 26441), 'os.path.join', 'os.path.join', (['"""benchmarks"""', 'use_case', 'framework', 'model_name', 'mode', 'precision'], {}), "('benchmarks', use_case, framework, model_name, mode, precision)\n", (26377, 26441), False, 'import os\n'), ((26625, 26697), 'os.path.join', 'os.path.join', (['"""benchmarks"""', 'use_case', 'framework', 'model_name', '"""README.md"""'], {}), "('benchmarks', use_case, framework, model_name, 'README.md')\n", (26637, 26697), False, 'import os\n'), ((27451, 27523), 'os.path.join', 'os.path.join', (['"""models"""', 'use_case', 'framework', 'model_name', 'mode', 'precision'], {}), "('models', use_case, framework, model_name, mode, precision)\n", (27463, 27523), False, 'import os\n'), ((28548, 28624), 'os.path.join', 'os.path.join', (['"""quickstart"""', 'use_case', 'framework', 'model_name', 'mode', 'precision'], {}), "('quickstart', use_case, framework, model_name, mode, precision)\n", (28560, 28624), False, 'import os\n'), ((28694, 28736), 'os.path.join', 'os.path.join', (['model_dir', 'quickstart_folder'], {}), '(model_dir, quickstart_folder)\n', (28706, 28736), False, 'import os\n'), ((31618, 31707), 'os.path.join', 'os.path.join', (['"""quickstart"""', 'use_case', 'framework', 'model_name', 'mode', 'precision', '""".docs"""'], {}), "('quickstart', use_case, framework, model_name, mode, precision,\n '.docs')\n", (31630, 31707), False, 'import os\n'), ((31767, 31803), 'os.path.join', 'os.path.join', (['model_dir', 'docs_folder'], {}), '(model_dir, docs_folder)\n', (31779, 31803), False, 'import os\n'), ((32023, 32056), 'os.listdir', 'os.listdir', (['docs_folder_full_path'], {}), '(docs_folder_full_path)\n', (32033, 32056), False, 'import os\n'), ((35524, 35568), 'os.path.join', 'os.path.join', (['FLAGS.spec_dir', 'spec_file_name'], {}), '(FLAGS.spec_dir, spec_file_name)\n', (35536, 35568), False, 'import os\n'), ((35576, 35606), 'os.path.isfile', 'os.path.isfile', (['spec_file_path'], {}), '(spec_file_path)\n', (35590, 35606), False, 'import os\n'), ((39875, 39901), 'os.listdir', 'os.listdir', (['FLAGS.spec_dir'], {}), '(FLAGS.spec_dir)\n', (39885, 39901), False, 'import os\n'), ((40345, 40372), 'yaml.safe_load', 'yaml.safe_load', (['SCHEMA_TEXT'], {}), '(SCHEMA_TEXT)\n', (40359, 40372), False, 'import yaml\n'), ((42403, 42420), 'docker.from_env', 'docker.from_env', ([], {}), '()\n', (42418, 42420), False, 'import docker\n'), ((51770, 51804), 'os.environ.get', 'os.environ.get', (['"""IMAGES_BUILT"""', '""""""'], {}), "('IMAGES_BUILT', '')\n", (51784, 51804), False, 'import os\n'), ((52168, 52181), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (52175, 52181), False, 'from absl import app\n'), ((11447, 11495), 'itertools.product', 'itertools.product', (['*slices_grouped_but_not_keyed'], {}), '(*slices_grouped_but_not_keyed)\n', (11464, 11495), False, 'import itertools\n'), ((13959, 14013), 'itertools.chain', 'itertools.chain', (['*[s[key] for s in slices if key in s]'], {}), '(*[s[key] for s in slices if key in s])\n', (13974, 14013), False, 'import itertools\n'), ((18137, 18154), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (18148, 18154), False, 'import os\n'), ((18389, 18418), 'os.path.join', 'os.path.join', (['dir_path', 'afile'], {}), '(dir_path, afile)\n', (18401, 18418), False, 'import os\n'), ((19955, 19966), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (19964, 19966), False, 'import os\n'), ((19995, 20020), 'os.path.isdir', 'os.path.isdir', (['output_dir'], {}), '(output_dir)\n', (20008, 20020), False, 'import os\n'), ((20108, 20128), 'os.mkdir', 'os.mkdir', (['output_dir'], {}), '(output_dir)\n', (20116, 20128), False, 'import os\n'), ((24016, 24027), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (24025, 24027), False, 'import os\n'), ((25877, 25888), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (25886, 25888), False, 'import os\n'), ((27009, 27061), 'os.path.join', 'os.path.join', (['model_dir', 'path', 'folder', '"""__init__.py"""'], {}), "(model_dir, path, folder, '__init__.py')\n", (27021, 27061), False, 'import os\n'), ((27073, 27103), 'os.path.exists', 'os.path.exists', (['init_file_path'], {}), '(init_file_path)\n', (27087, 27103), False, 'import os\n'), ((27312, 27338), 'os.path.join', 'os.path.join', (['path', 'folder'], {}), '(path, folder)\n', (27324, 27338), False, 'import os\n'), ((27578, 27615), 'os.path.join', 'os.path.join', (['model_dir', 'model_folder'], {}), '(model_dir, model_folder)\n', (27590, 27615), False, 'import os\n'), ((27820, 27881), 'os.path.join', 'os.path.join', (['"""models"""', 'use_case', 'framework', 'model_name', 'mode'], {}), "('models', use_case, framework, model_name, mode)\n", (27832, 27881), False, 'import os\n'), ((28180, 28246), 'os.path.join', 'os.path.join', (['"""models"""', 'use_case', 'framework', 'model_name', 'precision'], {}), "('models', use_case, framework, model_name, precision)\n", (28192, 28246), False, 'import os\n'), ((28748, 28791), 'os.path.exists', 'os.path.exists', (['quickstart_folder_full_path'], {}), '(quickstart_folder_full_path)\n', (28762, 28791), False, 'import os\n'), ((28801, 28841), 'os.makedirs', 'os.makedirs', (['quickstart_folder_full_path'], {}), '(quickstart_folder_full_path)\n', (28812, 28841), False, 'import os\n'), ((29484, 29555), 'os.path.join', 'os.path.join', (['folder', '"""use_case"""', 'framework', 'model_name', 'mode', '"""common"""'], {}), "(folder, 'use_case', framework, model_name, mode, 'common')\n", (29496, 29555), False, 'import os\n'), ((29916, 29981), 'os.path.join', 'os.path.join', (['folder', '"""use_case"""', 'framework', 'model_name', '"""common"""'], {}), "(folder, 'use_case', framework, model_name, 'common')\n", (29928, 29981), False, 'import os\n'), ((31530, 31541), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (31539, 31541), False, 'import os\n'), ((31811, 31848), 'os.path.exists', 'os.path.exists', (['docs_folder_full_path'], {}), '(docs_folder_full_path)\n', (31825, 31848), False, 'import os\n'), ((31865, 31913), 'shutil.copytree', 'shutil.copytree', (['"""./docs"""', 'docs_folder_full_path'], {}), "('./docs', docs_folder_full_path)\n", (31880, 31913), False, 'import shutil\n'), ((32091, 32118), 'os.path.basename', 'os.path.basename', (['model_dir'], {}), '(model_dir)\n', (32107, 32118), False, 'import os\n'), ((35821, 35857), 're.findall', 're.findall', (['regex_pattern', 'spec_name'], {}), '(regex_pattern, spec_name)\n', (35831, 35857), False, 'import re\n'), ((36150, 36173), 'sys.exit', 'sys.exit', (['error_message'], {}), '(error_message)\n', (36158, 36173), False, 'import sys\n'), ((37937, 37964), 'os.path.basename', 'os.path.basename', (['model_url'], {}), '(model_url)\n', (37953, 37964), False, 'import os\n'), ((38253, 38277), 'yaml.dump', 'yaml.dump', (['model_spec', 'f'], {}), '(model_spec, f)\n', (38262, 38277), False, 'import yaml\n'), ((38354, 38375), 'yaml.dump', 'yaml.dump', (['model_spec'], {}), '(model_spec)\n', (38363, 38375), False, 'import yaml\n'), ((39463, 39513), 'absl.app.UsageError', 'app.UsageError', (['"""Too many command-line arguments."""'], {}), "('Too many command-line arguments.')\n", (39477, 39513), False, 'from absl import app\n'), ((39768, 39779), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (39776, 39779), False, 'import sys\n'), ((41175, 41186), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (41183, 41186), False, 'import sys\n'), ((41576, 41587), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (41584, 41587), False, 'import sys\n'), ((42091, 42102), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (42099, 42102), False, 'import sys\n'), ((19054, 19078), 'os.path.join', 'os.path.join', (['path', 'name'], {}), '(path, name)\n', (19066, 19078), False, 'import os\n'), ((20369, 20387), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (20385, 20387), False, 'import tempfile\n'), ((22539, 22562), 'shutil.rmtree', 'shutil.rmtree', (['temp_dir'], {}), '(temp_dir)\n', (22552, 22562), False, 'import shutil\n'), ((24735, 24757), 'glob.glob', 'glob.glob', (['search_path'], {}), '(search_path)\n', (24744, 24757), False, 'import glob\n'), ((27944, 27981), 'os.path.join', 'os.path.join', (['model_dir', 'model_folder'], {}), '(model_dir, model_folder)\n', (27956, 27981), False, 'import os\n'), ((28309, 28346), 'os.path.join', 'os.path.join', (['model_dir', 'model_folder'], {}), '(model_dir, model_folder)\n', (28321, 28346), False, 'import os\n'), ((28949, 29007), 'os.path.join', 'os.path.join', (['quickstart_folder_full_path', 'template_script'], {}), '(quickstart_folder_full_path, template_script)\n', (28961, 29007), False, 'import os\n'), ((29625, 29669), 'os.path.join', 'os.path.join', (['model_dir', 'common_model_folder'], {}), '(model_dir, common_model_folder)\n', (29637, 29669), False, 'import os\n'), ((30051, 30095), 'os.path.join', 'os.path.join', (['model_dir', 'common_model_folder'], {}), '(model_dir, common_model_folder)\n', (30063, 30095), False, 'import os\n'), ((32393, 32404), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (32402, 32404), False, 'import os\n'), ((32473, 32500), 'os.path.basename', 'os.path.basename', (['model_dir'], {}), '(model_dir)\n', (32489, 32500), False, 'import os\n'), ((39292, 39389), 'os.path.join', 'os.path.join', (['"""quickstart"""', 'zoo_use_case', 'framework', 'zoo_model_name', 'mode', 'precision', '""".docs"""'], {}), "('quickstart', zoo_use_case, framework, zoo_model_name, mode,\n precision, '.docs')\n", (39304, 39389), False, 'import os\n'), ((40534, 40563), 'yaml.dump', 'yaml.dump', (['v.errors'], {'indent': '(2)'}), '(v.errors, indent=2)\n', (40543, 40563), False, 'import yaml\n'), ((45206, 45226), 'platform.processor', 'platform.processor', ([], {}), '()\n', (45224, 45226), False, 'import platform\n'), ((45587, 45647), 'os.path.join', 'os.path.join', (['FLAGS.dockerfile_dir', "(tag + '.temp.Dockerfile')"], {}), "(FLAGS.dockerfile_dir, tag + '.temp.Dockerfile')\n", (45599, 45647), False, 'import os\n'), ((18437, 18462), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (18451, 18462), False, 'import os\n'), ((18510, 18530), 'os.unlink', 'os.unlink', (['file_path'], {}), '(file_path)\n', (18519, 18530), False, 'import os\n'), ((18542, 18566), 'os.path.isdir', 'os.path.isdir', (['file_path'], {}), '(file_path)\n', (18555, 18566), False, 'import os\n'), ((20458, 20469), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (20467, 20469), False, 'import os\n'), ((22358, 22388), 'tarfile.open', 'tarfile.open', (['tar_file', '"""w:gz"""'], {}), "(tar_file, 'w:gz')\n", (22370, 22388), False, 'import tarfile\n'), ((24554, 24565), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (24563, 24565), False, 'import os\n'), ((29109, 29157), 'os.path.join', 'os.path.join', (['quickstart_folder', 'template_script'], {}), '(quickstart_folder, template_script)\n', (29121, 29157), False, 'import os\n'), ((39917, 39956), 'os.path.join', 'os.path.join', (['FLAGS.spec_dir', 'spec_file'], {}), '(FLAGS.spec_dir, spec_file)\n', (39929, 39956), False, 'import os\n'), ((40018, 40043), 'yaml.safe_load', 'yaml.safe_load', (['spec_file'], {}), '(spec_file)\n', (40032, 40043), False, 'import yaml\n'), ((43314, 43356), 're.match', 're.match', (['FLAGS.exclude_tags_matching', 'tag'], {}), '(FLAGS.exclude_tags_matching, tag)\n', (43322, 43356), False, 'import re\n'), ((44073, 44168), 'os.path.join', 'os.path.join', (['FLAGS.dockerfile_dir', "tag_def['dockerfile_subdirectory']", "(tag + '.Dockerfile')"], {}), "(FLAGS.dockerfile_dir, tag_def['dockerfile_subdirectory'], tag +\n '.Dockerfile')\n", (44085, 44168), False, 'import os\n'), ((20589, 20628), 'os.path.join', 'os.path.join', (['model_dir', "item['source']"], {}), "(model_dir, item['source'])\n", (20601, 20628), False, 'import os\n'), ((20653, 20696), 'os.path.join', 'os.path.join', (['temp_dir', "item['destination']"], {}), "(temp_dir, item['destination'])\n", (20665, 20696), False, 'import os\n'), ((20710, 20731), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (20723, 20731), False, 'import os\n'), ((21857, 21900), 'os.path.join', 'os.path.join', (['temp_dir', "item['destination']"], {}), "(temp_dir, item['destination'])\n", (21869, 21900), False, 'import os\n'), ((22114, 22161), 'urllib.request.urlretrieve', 'urllib.request.urlretrieve', (['source', 'destination'], {}), '(source, destination)\n', (22140, 22161), False, 'import urllib\n'), ((27155, 27196), 'os.path.join', 'os.path.join', (['path', 'folder', '"""__init__.py"""'], {}), "(path, folder, '__init__.py')\n", (27167, 27196), False, 'import os\n'), ((27253, 27294), 'os.path.join', 'os.path.join', (['path', 'folder', '"""__init__.py"""'], {}), "(path, folder, '__init__.py')\n", (27265, 27294), False, 'import os\n'), ((32745, 32795), 're.search', 're.search', (['"""^.* ([0-9]*)\\\\. (.*) -->$"""', 'first_line'], {}), "('^.* ([0-9]*)\\\\. (.*) -->$', first_line)\n", (32754, 32795), False, 'import re\n'), ((41336, 41381), 're.findall', 're.findall', (['"""{([^{}]*)}"""', "tag_def['tag_spec']"], {}), "('{([^{}]*)}', tag_def['tag_spec'])\n", (41346, 41381), False, 'import re\n'), ((41739, 41784), 're.findall', 're.findall', (['"""{([^{}]*)}"""', "tag_def['tag_spec']"], {}), "('{([^{}]*)}', tag_def['tag_spec'])\n", (41749, 41784), False, 'import re\n'), ((44730, 44787), 'os.path.join', 'os.path.join', (["documentation['uri']", "documentation['name']"], {}), "(documentation['uri'], documentation['name'])\n", (44742, 44787), False, 'import os\n'), ((50858, 50879), 'os.remove', 'os.remove', (['dockerfile'], {}), '(dockerfile)\n', (50867, 50879), False, 'import os\n'), ((51227, 51331), 'multiprocessing.Process', 'multiprocessing.Process', ([], {'target': 'upload_in_background', 'args': '(FLAGS.hub_repository, dock, image, tag)'}), '(target=upload_in_background, args=(FLAGS.\n hub_repository, dock, image, tag))\n', (51250, 51331), False, 'import multiprocessing\n'), ((19623, 19634), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (19631, 19634), False, 'import sys\n'), ((20865, 20895), 'distutils.dir_util.copy_tree', 'copy_tree', (['source', 'destination'], {}), '(source, destination)\n', (20874, 20895), False, 'from distutils.dir_util import copy_tree\n'), ((21014, 21048), 'os.path.join', 'os.path.join', (['destination', '""".docs"""'], {}), "(destination, '.docs')\n", (21026, 21048), False, 'import os\n'), ((21064, 21095), 'os.path.isdir', 'os.path.isdir', (['doc_partials_dir'], {}), '(doc_partials_dir)\n', (21077, 21095), False, 'import os\n'), ((21178, 21200), 'os.path.isfile', 'os.path.isfile', (['source'], {}), '(source)\n', (21192, 21200), False, 'import os\n'), ((40960, 41005), 're.findall', 're.findall', (['"""{([^{}]*)}"""', "tag_def['tag_spec']"], {}), "('{([^{}]*)}', tag_def['tag_spec'])\n", (40970, 41005), False, 'import re\n'), ((44336, 44357), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (44351, 44357), False, 'import os\n'), ((48103, 48158), 'docker.errors.BuildError', 'docker.errors.BuildError', (["(last_event or 'Unknown')", 'logs'], {}), "(last_event or 'Unknown', logs)\n", (48127, 48158), False, 'import docker\n'), ((21111, 21162), 'shutil.rmtree', 'shutil.rmtree', (['doc_partials_dir'], {'ignore_errors': '(True)'}), '(doc_partials_dir, ignore_errors=True)\n', (21124, 21162), False, 'import shutil\n'), ((21423, 21455), 'shutil.copy', 'shutil.copy', (['source', 'destination'], {}), '(source, destination)\n', (21434, 21455), False, 'import shutil\n'), ((21652, 21663), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (21660, 21663), False, 'import sys\n'), ((22019, 22047), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (22034, 22047), False, 'import os\n'), ((22074, 22102), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (22089, 22102), False, 'import os\n'), ((43633, 43649), 're.match', 're.match', (['x', 'tag'], {}), '(x, tag)\n', (43641, 43649), False, 'import re\n'), ((20766, 20794), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (20781, 20794), False, 'import os\n'), ((20823, 20851), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (20838, 20851), False, 'import os\n'), ((47273, 47351), 're.search', 're.search', (['"""(^Successfully built |sha256:)([0-9a-f]+)$"""', "json_output['stream']"], {}), "('(^Successfully built |sha256:)([0-9a-f]+)$', json_output['stream'])\n", (47282, 47351), False, 'import re\n'), ((21324, 21352), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (21339, 21352), False, 'import os\n'), ((21381, 21409), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (21396, 21409), False, 'import os\n')]
|
import torch
import torch.nn.functional as F
from torch import nn
from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1
class SEBlock(nn.Module):
def __init__(self, n_features, r=16):
super(SEBlock, self).__init__()
self.scorer = nn.Sequential(nn.AdaptiveAvgPool2d(1),
nn.Conv2d(n_features, n_features // r, kernel_size=1, padding=0, stride=1),
nn.ReLU(True),
nn.Conv2d(n_features // r, n_features, kernel_size=1, padding=0, stride=1))
def forward(self, x):
return torch.sigmoid(self.scorer(x))*x
class HGResidual(nn.Module):
def __init__(self, n_inp, n_out, se=False, se_ratio=16):
super().__init__()
self.bottleneck = conv_block_1x1(n_inp, n_out // 2, 'relu')
self.conv = conv_block_3x3(n_out // 2, n_out // 2, 'relu')
self.out = conv_block_1x1(n_out // 2, n_out, None)
if n_inp != n_out:
self.skip = conv_block_1x1(n_inp, n_out, None)
else:
self.skip = Identity()
if se:
self.se_block = SEBlock(n_out, r=se_ratio)
def forward(self, x):
o1 = self.bottleneck(x)
o2 = self.conv(o1)
o3 = self.out(o2)
if hasattr(self, 'se_block'):
o3 = self.se_block(o3)
return o3 + self.skip(x)
class MultiScaleHGResidual(nn.Module):
"""
https://arxiv.org/pdf/1808.04803.pdf
"""
def __init__(self, n_inp, n_out, se=False, se_ratio=16):
super().__init__()
self.scale1 = conv_block_3x3(n_inp, n_out // 2, 'relu')
self.scale2 = conv_block_3x3(n_out // 2, n_out // 4, 'relu')
self.scale3 = conv_block_3x3(n_out // 4, n_out - n_out // 4 - n_out // 2, None)
if n_inp != n_out:
self.skip = conv_block_1x1(n_inp, n_out, None)
else:
self.skip = Identity()
if se:
self.se_block = SEBlock(n_out, r=se_ratio)
def forward(self, x):
o1 = self.scale1(x)
o2 = self.scale2(o1)
o3 = self.scale3(o2)
o4 = torch.cat([o1, o2, o3], 1)
if hasattr(self, 'se_block'):
o4 = self.se_block(o4)
return o4 + self.skip(x)
class SoftArgmax2D(nn.Module):
def __init__(self, beta=1):
super(SoftArgmax2D, self).__init__()
self.beta = beta
def forward(self, hm):
hm = hm.mul(self.beta)
bs, nc, h, w = hm.size()
hm = hm.squeeze()
softmax = F.softmax(hm.view(bs, nc, h * w), dim=2).view(bs, nc, h, w)
weights = torch.ones(bs, nc, h, w).float().to(hm.device)
w_x = torch.arange(w).float().div(w)
w_x = w_x.to(hm.device).mul(weights)
w_y = torch.arange(h).float().div(h)
w_y = w_y.to(hm.device).mul(weights.transpose(2, 3)).transpose(2, 3)
approx_x = softmax.mul(w_x).view(bs, nc, h * w).sum(2).unsqueeze(2)
approx_y = softmax.mul(w_y).view(bs, nc, h * w).sum(2).unsqueeze(2)
res_xy = torch.cat([approx_x, approx_y], 2)
return res_xy
class Hourglass(nn.Module):
def __init__(self, n, hg_width, n_inp, n_out, upmode='nearest', multiscale_block=False, se=False, se_ratio=16):
super(Hourglass, self).__init__()
self.multiscale_block = multiscale_block
self.upmode = upmode
self.se = se
self.se_ratio = se_ratio
self.lower1 = self.__make_block(n_inp, hg_width)
self.lower2 = self.__make_block(hg_width, hg_width)
self.lower3 = self.__make_block(hg_width, hg_width)
if n > 1:
self.lower4 = Hourglass(n - 1, hg_width, hg_width, n_out, upmode, multiscale_block,
se=se, se_ratio=se_ratio)
else:
self.lower4 = self.__make_block(hg_width, n_out)
self.lower5 = self.__make_block(n_out, n_out)
self.upper1 = self.__make_block(n_inp, hg_width)
self.upper2 = self.__make_block(hg_width, hg_width)
self.upper3 = self.__make_block(hg_width, n_out)
def __make_block(self, inp, out):
if self.multiscale_block:
return MultiScaleHGResidual(inp, out, self.se, self.se_ratio)
else:
return HGResidual(inp, out, self.se, self.se_ratio)
def forward(self, x):
o_pooled = F.max_pool2d(x, 2)
o1 = self.lower1(o_pooled)
o2 = self.lower2(o1)
o3 = self.lower3(o2)
o4 = self.lower4(o3)
o5 = self.lower5(o4)
o1_u = self.upper1(x)
o2_u = self.upper2(o1_u)
o3_u = self.upper3(o2_u)
return o3_u + F.interpolate(o5, x.size()[-2:], mode=self.upmode, align_corners=True)
|
[
"torch.nn.AdaptiveAvgPool2d",
"torch.ones",
"torch.nn.ReLU",
"torch.nn.Conv2d",
"torch.cat",
"deeppipeline.common.modules.conv_block_3x3",
"deeppipeline.common.modules.Identity",
"deeppipeline.common.modules.conv_block_1x1",
"torch.arange",
"torch.nn.functional.max_pool2d"
] |
[((813, 854), 'deeppipeline.common.modules.conv_block_1x1', 'conv_block_1x1', (['n_inp', '(n_out // 2)', '"""relu"""'], {}), "(n_inp, n_out // 2, 'relu')\n", (827, 854), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((875, 921), 'deeppipeline.common.modules.conv_block_3x3', 'conv_block_3x3', (['(n_out // 2)', '(n_out // 2)', '"""relu"""'], {}), "(n_out // 2, n_out // 2, 'relu')\n", (889, 921), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((941, 980), 'deeppipeline.common.modules.conv_block_1x1', 'conv_block_1x1', (['(n_out // 2)', 'n_out', 'None'], {}), '(n_out // 2, n_out, None)\n', (955, 980), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1617, 1658), 'deeppipeline.common.modules.conv_block_3x3', 'conv_block_3x3', (['n_inp', '(n_out // 2)', '"""relu"""'], {}), "(n_inp, n_out // 2, 'relu')\n", (1631, 1658), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1681, 1727), 'deeppipeline.common.modules.conv_block_3x3', 'conv_block_3x3', (['(n_out // 2)', '(n_out // 4)', '"""relu"""'], {}), "(n_out // 2, n_out // 4, 'relu')\n", (1695, 1727), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1750, 1815), 'deeppipeline.common.modules.conv_block_3x3', 'conv_block_3x3', (['(n_out // 4)', '(n_out - n_out // 4 - n_out // 2)', 'None'], {}), '(n_out // 4, n_out - n_out // 4 - n_out // 2, None)\n', (1764, 1815), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((2148, 2174), 'torch.cat', 'torch.cat', (['[o1, o2, o3]', '(1)'], {}), '([o1, o2, o3], 1)\n', (2157, 2174), False, 'import torch\n'), ((3064, 3098), 'torch.cat', 'torch.cat', (['[approx_x, approx_y]', '(2)'], {}), '([approx_x, approx_y], 2)\n', (3073, 3098), False, 'import torch\n'), ((4373, 4391), 'torch.nn.functional.max_pool2d', 'F.max_pool2d', (['x', '(2)'], {}), '(x, 2)\n', (4385, 4391), True, 'import torch.nn.functional as F\n'), ((294, 317), 'torch.nn.AdaptiveAvgPool2d', 'nn.AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (314, 317), False, 'from torch import nn\n'), ((355, 429), 'torch.nn.Conv2d', 'nn.Conv2d', (['n_features', '(n_features // r)'], {'kernel_size': '(1)', 'padding': '(0)', 'stride': '(1)'}), '(n_features, n_features // r, kernel_size=1, padding=0, stride=1)\n', (364, 429), False, 'from torch import nn\n'), ((467, 480), 'torch.nn.ReLU', 'nn.ReLU', (['(True)'], {}), '(True)\n', (474, 480), False, 'from torch import nn\n'), ((518, 592), 'torch.nn.Conv2d', 'nn.Conv2d', (['(n_features // r)', 'n_features'], {'kernel_size': '(1)', 'padding': '(0)', 'stride': '(1)'}), '(n_features // r, n_features, kernel_size=1, padding=0, stride=1)\n', (527, 592), False, 'from torch import nn\n'), ((1033, 1067), 'deeppipeline.common.modules.conv_block_1x1', 'conv_block_1x1', (['n_inp', 'n_out', 'None'], {}), '(n_inp, n_out, None)\n', (1047, 1067), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1106, 1116), 'deeppipeline.common.modules.Identity', 'Identity', ([], {}), '()\n', (1114, 1116), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1868, 1902), 'deeppipeline.common.modules.conv_block_1x1', 'conv_block_1x1', (['n_inp', 'n_out', 'None'], {}), '(n_inp, n_out, None)\n', (1882, 1902), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((1941, 1951), 'deeppipeline.common.modules.Identity', 'Identity', ([], {}), '()\n', (1949, 1951), False, 'from deeppipeline.common.modules import Identity, conv_block_3x3, conv_block_1x1\n'), ((2633, 2657), 'torch.ones', 'torch.ones', (['bs', 'nc', 'h', 'w'], {}), '(bs, nc, h, w)\n', (2643, 2657), False, 'import torch\n'), ((2694, 2709), 'torch.arange', 'torch.arange', (['w'], {}), '(w)\n', (2706, 2709), False, 'import torch\n'), ((2785, 2800), 'torch.arange', 'torch.arange', (['h'], {}), '(h)\n', (2797, 2800), False, 'import torch\n')]
|
# -*- coding: utf-8 -*-
"""
Module implementing MainWindow.
"""
import os
from .RadarUI import Ui_MainWindow
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from PyQt5 import QtWidgets
from PyQt5.QtCore import pyqtSlot
from PyQt5.QtWidgets import QMainWindow
from ..io import read_auto
from ..io.util import radar_format
from ..draw.SingleRadarPlot import RadarGraph
from ..draw.SingleRadarPlotMap import RadarGraphMap
from ..configure.location_config import last_open_dir
from glob import glob
import json
import sys
from .station_info import Ui_Dialog
from ..draw.VerticalSectionPlot import VerticalSection
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
field_name = ["dBZ", "V", "W", "ZDR", "KDP", "CC"]
class LineBuilder:
def __init__(self, fig, ax, radar_data, product, map_bool):
self.ax = ax
self.xs = []
self.ys = []
self.fig = fig
self.map = map_bool
self.cid = self.fig.canvas.mpl_connect('button_press_event', self)
self.cursor = self.fig.canvas.mpl_connect('motion_notify_event', self.mouse_move)
self.radar_dat = radar_data
self.product = product
def __call__(self, event):
if len(self.xs) < 2:
self.xs.append(event.xdata)
self.ys.append(event.ydata)
if len(self.xs) == 1:
self.start = self.ax.scatter(event.xdata, event.ydata, color="r", marker="+", s=60,
zorder=len(self.xs)+10)
else:
self.end = self.ax.scatter(event.xdata, event.ydata, color="r", marker="+", s=60,
zorder=len(self.xs) + 10)
self.rline = self.ax.plot(self.xs, self.ys, color="r", linewidth=1, zorder=13)
cv = FigureCanvas(Figure(figsize=(8, 6)))
ax = cv.figure.add_axes([0.1, 0.3, 0.8, 0.6])
cax = cv.figure.add_axes([0.1, 0.1, 0.8, 0.06])
if not self.map:
VerticalSection.GUI_section(cv.figure, ax, cax, self.radar_dat, [self.xs[0]*1000, self.ys[0]*1000],\
[self.xs[1]*1000, self.ys[1]*1000], field_name[self.product])
else:
VerticalSection.GUI_section_map(cv.figure, ax, cax, self.radar_dat,
[self.xs[0], self.ys[0]], \
[self.xs[1], self.ys[1]], field_name[self.product])
cv.show()
self.fig.canvas.draw()
else:
self.rline[0].remove()
self.start.remove()
self.end.remove()
self.xs = []
self.ys = []
self.xs.append(event.xdata)
self.ys.append(event.ydata)
self.start = self.ax.scatter(event.xdata, event.ydata, color="r", marker="+", s=60,
zorder=len(self.xs) + 10)
self.fig.canvas.draw()
def mouse_move(self, event):
try:
self.move_line[0].remove()
except Exception:
pass
if len(self.xs) == 1:
self.move_line = self.ax.plot([self.xs[0], event.xdata], [self.ys[0], event.ydata], color="r",
linewidth=1, linestyle="--", zorder=100)
self.fig.canvas.draw()
class Dialog(QDialog, Ui_Dialog):
"""
Class documentation goes here.
"""
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent widget
@type QWidget
"""
super(Dialog, self).__init__(parent)
self.setupUi(self)
@pyqtSlot()
def on_pushButton_clicked(self):
"""
Slot documentation goes here.
"""
self.lon = float(self.lineEdit.text())
self.lat = float(self.lineEdit_2.text())
self.height = float(self.lineEdit_3.text())
self.close()
@pyqtSlot()
def on_pushButton_2_clicked(self):
"""
Slot documentation goes here.
"""
self.close()
@pyqtSlot()
def on_toolButton_clicked(self):
"""
Slot documentation goes here.
"""
lon, LonTrue = QInputDialog.getDouble(self, r"经度", "雷达站点经度(单位:度)", 131.3, -180, 180)
if LonTrue:
self.lineEdit.setText(str(lon))
@pyqtSlot()
def on_toolButton_2_clicked(self):
"""
Slot documentation goes here.
"""
# TODO: not implemented yet
lat, LatTrue = QInputDialog.getDouble(self, r"纬度", "雷达站点纬度(单位:度)", 23, -90, 90)
if LatTrue:
self.lineEdit.setText(str(lat))
@pyqtSlot()
def on_toolButton_3_clicked(self):
"""
Slot documentation goes here.
"""
# TODO: not implemented yet
height, HeightTrue = QInputDialog.getDouble(self, r"高度", "雷达站点高度(单位:米)", 57, -2000, 5000)
if HeightTrue:
self.lineEdit.setText(str(height))
class MainWindow(QMainWindow, Ui_MainWindow):
"""
Class documentation goes here.
"""
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent widget
@type QWidget
"""
super(MainWindow, self).__init__(parent)
self.setupUi(self)
self.lastOpenDir = self.open_last_opendir()
self.radar_dat = None
self.dualPOL = False
self.openbasename = None
self.files = None
self.radar_type = None
self.org_lat = 131.3
self.org_lon = 23
self.org_height = 57
def open_last_opendir(self):
"""打开上次关闭文件的位置"""
with open(last_open_dir, "r") as f:
dir_dict = json.load(f)
return dir_dict["lastOpenDir"]
def write_last_opendir(self, filedir):
"""将打开的位置写入json文件中"""
with open(last_open_dir, "w") as f:
json.dump({"lastOpenDir":filedir}, f)
@pyqtSlot()
def on_actionvertical_changed(self):
"""垂直剖面的绘制"""
if self.actionvertical.isChecked():
try:
self.linebuilder = LineBuilder(self.fig, self.ax, self.radar_dat, self.find_var_in_groupBox(),\
self.actionwithmap.isChecked())
self.clickevent = True
except AttributeError:
pass
else:
self.fig.canvas.mpl_disconnect(self.linebuilder.cid)
self.fig.canvas.mpl_disconnect(self.linebuilder.cursor)
self.linebuilder.rline[0].remove()
self.linebuilder.start.remove()
self.linebuilder.end.remove()
self.fig.canvas.draw()
@pyqtSlot()
def on_actionwithmap_changed(self):
"""
Slot documentation goes here.
"""
pass
@pyqtSlot()
def on_actioncontinuous_changed(self):
"""
Slot documentation goes here.
"""
pass
def Read_radar(self, filename):
if radar_format(filename) is not None:
NRadar = read_auto(filename)
self.org_lat = NRadar.scan_info.latitude.values
self.org_lon = NRadar.scan_info.longitude.values
self.org_height = NRadar.scan_info.altitude.values
if "KDP" in NRadar.fields[0].keys():
self.open_dual()
else:
self.close_non_dual()
return NRadar
else:
QMessageBox.warning(self, "数据错误警告", "非SA/SB/CA/CB/98D/CC/CCJ/SC/CD数据",
QMessageBox.Yes)
return 0
def close_non_dual(self):
"""关闭非双偏振雷达变量"""
self.radioButton_13.hide()
self.radioButton_14.hide()
self.radioButton_15.hide()
def open_dual(self):
"""关闭非双偏振雷达变量"""
self.radioButton_13.show()
self.radioButton_14.show()
self.radioButton_15.show()
def setSelected(self, filename):
"""将选中数据高亮"""
basename = os.path.basename(filename)
self.openbasename = basename
items = self.listWidget.findItems(basename, Qt.MatchExactly)
if len(items) > 0:
for item in items:
self.listWidget.setCurrentItem(item)
def import_basedat(self, direc):
"""查找文件夹中的所有雷达文件名,并以list返回"""
self.lastOpenDir = direc
self.write_last_opendir(direc)
extensions = ["*.*A", "*.*V", "*.bz2", "*.bin",
"*.AR2", "*.gz", ".GZ"]
files = []
for iextend in extensions:
file = glob(os.path.join(direc, iextend))
files.extend(file)
return [os.path.basename(ifile) for ifile in files]
def add_listwidget(self, files):
"""将files添加到listWidget"""
self.listWidget.clear()
for item in files:
self.listWidget.addItem(item)
@pyqtSlot(QListWidgetItem)
def on_listWidget_itemDoubleClicked(self, item):
"""
Slot documentation goes here.
@param item DESCRIPTION
@type QListWidgetItem
"""
filename = self.lastOpenDir + os.sep + item.text()
self.radar_dat = self.Read_radar(filename)
if self.radar_dat != 0:
self.setSelected(filename)
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_actionopen_2_triggered(self):
"""
Slot documentation goes here.
"""
if self.lastOpenDir and os.path.exists(self.lastOpenDir):
defaultOpenDirPath = self.lastOpenDir
else:
defaultOpenDirPath = '.'
filename = QFileDialog.getOpenFileName(self, "打开一个雷达基数据", defaultOpenDirPath,
"天气雷达基数据(*bin *bz2 *A *V *BIN *BZ2 *AR2 *GZ *gz)")
ReadFile = filename[0]
if ReadFile.strip() == "":
return
PathDir = os.path.dirname(ReadFile)
self.files = self.import_basedat(PathDir)
self.add_listwidget(self.files)
self.radar_dat = self.Read_radar(ReadFile)
if self.radar_dat != 0:
self.setSelected(ReadFile)
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_actionopendir_2_triggered(self):
"""
Slot documentation goes here.
"""
if self.lastOpenDir and os.path.exists(self.lastOpenDir):
defaultOpenDirPath = self.lastOpenDir
else:
defaultOpenDirPath = '.'
self.targetDirPath = QFileDialog.getExistingDirectory(self, "打开新一代天气雷达数据文件夹",
defaultOpenDirPath,
QFileDialog.ShowDirsOnly | QFileDialog.DontResolveSymlinks)
if self.targetDirPath.strip() == '':
return
self.files = self.import_basedat(self.targetDirPath)
self.add_listwidget(self.files)
@pyqtSlot()
def on_actionquit_2_triggered(self):
"""
Slot documentation goes here.
"""
sys.exit(0)
@pyqtSlot()
def on_actionstation_triggered(self):
"""
Slot documentation goes here.
"""
self.my_info = Dialog()
self.my_info.lineEdit.setText(str(self.org_lon))
self.my_info.lineEdit_2.setText(str(self.org_lat))
self.my_info.lineEdit_3.setText(str(self.org_height))
self.my_info.lat = self.org_lat
self.my_info.lon = self.org_lon
self.my_info.height = self.org_height
self.my_info.exec_()
self.org_lat = self.my_info.lat
self.org_lon = self.my_info.lon
self.org_height = self.my_info.height
def find_checked_radiobutton(self, radiobuttons):
''' find the checked radiobutton '''
for items in radiobuttons:
if items.isChecked():
checked_radiobutton = items.text()
return checked_radiobutton
def find_level_in_groupBox(self):
"""查找仰角"""
level = self.find_checked_radiobutton(self.groupBox.findChildren(QtWidgets.QRadioButton))
levels = ["第1层", "第2层", "第3层",
"第4层", "第5层", "第6层",
"第7层", "第8层", "第9层"]
for i in range(9):
if level == levels[i]:
return i
return 0
def find_var_in_groupBox(self):
"""查找变量"""
var = self.find_checked_radiobutton(self.groupBox_2.findChildren(QtWidgets.QRadioButton))
vars = ["反射率因子", "径向速度", "谱宽", "差分反射率", "差分相位比", "相关系数"]
for i in range(6):
if var == vars[i]:
return i
return 0
def plot_graph_PPI(self, radar, level, product, map, continuously):
self.MplWidget.canvas.update()
self.MplWidget.canvas.flush_events()
try:
self.fig.clf()
self.ax.clear()
self.cax.clear()
except AttributeError:
pass
if not map:
self.fig, self.ax, self.cax = self.MplWidget.canvas.get_fig_ax()
self.ax.set_facecolor((0.95, 0.95, 0.95))
self.pm = RadarGraph.GUI_plot(radar, self.fig, self.ax, self.cax, level, \
field_name[product], continuously=continuously)
else:
self.fig, self.ax, self.cax = self.MplWidget.canvas.get_fig_ax_map()
self.ax.set_facecolor((0.95, 0.95, 0.95))
self.pm = RadarGraphMap.GUI_plot(radar, self.fig, self.ax, self.cax, level, \
field_name[product], continuously=continuously)
self.ax.tick_params(axis="y", which="both", direction='in')
self.ax.tick_params(axis="x", which="both", direction='in')
self.MplWidget.canvas.draw()
if self.actionvertical.isChecked(): #尝试重新绑定
try:
self.fig.canvas.mpl_disconnect(self.linebuilder.cid)
self.fig.canvas.mpl_disconnect(self.linebuilder.cursor)
self.linebuilder = LineBuilder(self.fig, self.ax, self.radar_dat, self.find_var_in_groupBox(), \
self.actionwithmap.isChecked())
self.clickevent = True
except AttributeError:
pass
@pyqtSlot()
def on_pushButton_clicked(self):
"""
Slot documentation goes here.
"""
if self.files is not None:
items = self.listWidget.findItems(self.openbasename, Qt.MatchExactly)
row = self.listWidget.row(items[0])
nrows = len(self.files)
res_row = row - 1
if res_row < 0:
res_row = nrows - 1
self.radar_dat = self.Read_radar(self.lastOpenDir + os.sep + self.files[res_row])
if self.radar_dat != 0:
self.setSelected(self.lastOpenDir + os.sep + self.files[res_row])
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_pushButton_2_clicked(self):
"""
Slot documentation goes here.
动画播放部分
"""
if self.files is not None:
items = self.listWidget.findItems(self.openbasename, Qt.MatchExactly)
row = self.listWidget.row(items[0])
nrows = len(self.files)
for irow in range(row, nrows):
self.radar_dat = self.Read_radar(os.path.join(self.lastOpenDir, self.files[irow]))
if self.radar_dat != 0:
self.setSelected(self.lastOpenDir + os.sep + self.files[irow])
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_pushButton_3_clicked(self):
"""
Slot documentation goes here.
"""
if self.files is not None:
items = self.listWidget.findItems(self.openbasename, Qt.MatchExactly)
row = self.listWidget.row(items[0])
nrows = len(self.files)
res_row = row + 1
if res_row == nrows:
res_row = 0
self.radar_dat = self.Read_radar(self.lastOpenDir + os.sep + self.files[res_row])
if self.radar_dat != 0:
self.setSelected(self.lastOpenDir + os.sep + self.files[res_row])
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_15_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_12_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_14_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_10_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_13_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_11_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_2_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_4_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_5_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_3_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_1_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_7_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_8_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_6_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
@pyqtSlot()
def on_radioButton_9_clicked(self):
"""
Slot documentation goes here.
"""
if self.radar_dat is not None:
self.plot_graph_PPI(self.radar_dat, self.find_level_in_groupBox(), self.find_var_in_groupBox(),
self.actionwithmap.isChecked(), self.actioncontinuous.isChecked())
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
ui = MainWindow()
ui.show()
sys.exit(app.exec_())
|
[
"json.dump",
"json.load",
"os.path.basename",
"os.path.dirname",
"os.path.exists",
"matplotlib.figure.Figure",
"PyQt5.QtCore.pyqtSlot",
"PyQt5.QtWidgets.QApplication",
"os.path.join",
"sys.exit"
] |
[((3787, 3797), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (3795, 3797), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((4072, 4082), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (4080, 4082), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((4211, 4221), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (4219, 4221), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((4484, 4494), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (4492, 4494), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((4790, 4800), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (4798, 4800), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((6086, 6096), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (6094, 6096), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((6828, 6838), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (6836, 6838), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((6960, 6970), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (6968, 6970), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((8998, 9023), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', (['QListWidgetItem'], {}), '(QListWidgetItem)\n', (9006, 9023), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((9604, 9614), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (9612, 9614), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((10623, 10633), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (10631, 10633), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((11370, 11380), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (11378, 11380), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((11510, 11520), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (11518, 11520), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((14697, 14707), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (14705, 14707), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((15535, 15545), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (15543, 15545), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((16357, 16367), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (16365, 16367), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((17194, 17204), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (17202, 17204), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((17560, 17570), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (17568, 17570), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((17926, 17936), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (17934, 17936), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((18292, 18302), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (18300, 18302), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((18658, 18668), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (18666, 18668), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((19024, 19034), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (19032, 19034), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((19390, 19400), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (19398, 19400), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((19755, 19765), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (19763, 19765), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((20120, 20130), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (20128, 20130), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((20485, 20495), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (20493, 20495), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((20850, 20860), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (20858, 20860), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((21215, 21225), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (21223, 21225), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((21580, 21590), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (21588, 21590), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((21945, 21955), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (21953, 21955), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((22310, 22320), 'PyQt5.QtCore.pyqtSlot', 'pyqtSlot', ([], {}), '()\n', (22318, 22320), False, 'from PyQt5.QtCore import pyqtSlot\n'), ((22708, 22740), 'PyQt5.QtWidgets.QApplication', 'QtWidgets.QApplication', (['sys.argv'], {}), '(sys.argv)\n', (22730, 22740), False, 'from PyQt5 import QtWidgets\n'), ((8126, 8152), 'os.path.basename', 'os.path.basename', (['filename'], {}), '(filename)\n', (8142, 8152), False, 'import os\n'), ((10172, 10197), 'os.path.dirname', 'os.path.dirname', (['ReadFile'], {}), '(ReadFile)\n', (10187, 10197), False, 'import os\n'), ((11492, 11503), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (11500, 11503), False, 'import sys\n'), ((5860, 5872), 'json.load', 'json.load', (['f'], {}), '(f)\n', (5869, 5872), False, 'import json\n'), ((6042, 6080), 'json.dump', 'json.dump', (["{'lastOpenDir': filedir}", 'f'], {}), "({'lastOpenDir': filedir}, f)\n", (6051, 6080), False, 'import json\n'), ((8775, 8798), 'os.path.basename', 'os.path.basename', (['ifile'], {}), '(ifile)\n', (8791, 8798), False, 'import os\n'), ((9750, 9782), 'os.path.exists', 'os.path.exists', (['self.lastOpenDir'], {}), '(self.lastOpenDir)\n', (9764, 9782), False, 'import os\n'), ((10772, 10804), 'os.path.exists', 'os.path.exists', (['self.lastOpenDir'], {}), '(self.lastOpenDir)\n', (10786, 10804), False, 'import os\n'), ((8698, 8726), 'os.path.join', 'os.path.join', (['direc', 'iextend'], {}), '(direc, iextend)\n', (8710, 8726), False, 'import os\n'), ((1880, 1902), 'matplotlib.figure.Figure', 'Figure', ([], {'figsize': '(8, 6)'}), '(figsize=(8, 6))\n', (1886, 1902), False, 'from matplotlib.figure import Figure\n'), ((15955, 16003), 'os.path.join', 'os.path.join', (['self.lastOpenDir', 'self.files[irow]'], {}), '(self.lastOpenDir, self.files[irow])\n', (15967, 16003), False, 'import os\n')]
|
import torch
import torch.nn as nn
from models.SpectralNorm import set_spectral_norm
class FCDiscriminator(nn.Module):
def __init__(self, input_dim, spectral_norm=True, preprocess_func=None):
'''
Fully connected discriminator network
:param input_dim: Number of inputs
:param spectral_norm: Whether to use spectral normalisation
:param preprocess_func: Function that preprocesses the input before feeding to the network
'''
super(FCDiscriminator, self).__init__()
self.preprocess_func = preprocess_func
self.fc1 = set_spectral_norm(nn.Linear(input_dim, 128), spectral_norm)
self.fc2 = set_spectral_norm(nn.Linear(128, 128), spectral_norm)
self.fc3 = set_spectral_norm(nn.Linear(128, 1), spectral_norm)
self.activation = nn.LeakyReLU()
self.output_activation = nn.Sigmoid()
def forward(self, input):
if self.preprocess_func != None:
input = self.preprocess_func(input)
x = self.fc1(input)
x = self.activation(x)
x = self.fc2(x)
x = self.activation(x)
x = self.fc3(x)
return torch.squeeze(x, 1)
|
[
"torch.squeeze",
"torch.nn.Linear",
"torch.nn.LeakyReLU",
"torch.nn.Sigmoid"
] |
[((823, 837), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {}), '()\n', (835, 837), True, 'import torch.nn as nn\n'), ((871, 883), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (881, 883), True, 'import torch.nn as nn\n'), ((1158, 1177), 'torch.squeeze', 'torch.squeeze', (['x', '(1)'], {}), '(x, 1)\n', (1171, 1177), False, 'import torch\n'), ((611, 636), 'torch.nn.Linear', 'nn.Linear', (['input_dim', '(128)'], {}), '(input_dim, 128)\n', (620, 636), True, 'import torch.nn as nn\n'), ((690, 709), 'torch.nn.Linear', 'nn.Linear', (['(128)', '(128)'], {}), '(128, 128)\n', (699, 709), True, 'import torch.nn as nn\n'), ((763, 780), 'torch.nn.Linear', 'nn.Linear', (['(128)', '(1)'], {}), '(128, 1)\n', (772, 780), True, 'import torch.nn as nn\n')]
|
#!/usr/bin/env python
import os, logging, gym
from baselines import logger
from baselines.common import set_global_seeds
from baselines import bench
from baselines.a2c.a2c import learn
from baselines.common.vec_env.subproc_vec_env import SubprocVecEnv
from baselines.common.atari_wrappers import make_atari, wrap_deepmind
from baselines.a2c.policies import CnnPolicy, LstmPolicy, LnLstmPolicy
def train(env_id, num_timesteps, seed, policy, lrschedule, num_cpu):
def make_env(rank):
def _thunk():
env = make_atari(env_id)
env.seed(seed + rank)
env = bench.Monitor(env, logger.get_dir() and os.path.join(logger.get_dir(), str(rank)))
gym.logger.setLevel(logging.WARN)
return wrap_deepmind(env)
return _thunk
set_global_seeds(seed)
env = SubprocVecEnv([make_env(i) for i in range(num_cpu)])
if policy == 'cnn':
policy_fn = CnnPolicy
elif policy == 'lstm':
policy_fn = LstmPolicy
elif policy == 'lnlstm':
policy_fn = LnLstmPolicy
learn(policy_fn, env, seed, total_timesteps=int(num_timesteps * 1.1), lrschedule=lrschedule)
env.close()
def main():
import argparse
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--env', help='environment ID', default='BreakoutNoFrameskip-v4')
parser.add_argument('--seed', help='RNG seed', type=int, default=0)
parser.add_argument('--policy', help='Policy architecture', choices=['cnn', 'lstm', 'lnlstm'], default='cnn')
parser.add_argument('--lrschedule', help='Learning rate schedule', choices=['constant', 'linear'], default='constant')
parser.add_argument('--num-timesteps', type=int, default=int(10e6))
args = parser.parse_args()
logger.configure()
train(args.env, num_timesteps=args.num_timesteps, seed=args.seed,
policy=args.policy, lrschedule=args.lrschedule, num_cpu=16)
if __name__ == '__main__':
main()
|
[
"gym.logger.setLevel",
"baselines.common.atari_wrappers.make_atari",
"argparse.ArgumentParser",
"baselines.logger.get_dir",
"baselines.common.atari_wrappers.wrap_deepmind",
"baselines.common.set_global_seeds",
"baselines.logger.configure"
] |
[((791, 813), 'baselines.common.set_global_seeds', 'set_global_seeds', (['seed'], {}), '(seed)\n', (807, 813), False, 'from baselines.common import set_global_seeds\n'), ((1210, 1289), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), '(formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n', (1233, 1289), False, 'import argparse\n'), ((1796, 1814), 'baselines.logger.configure', 'logger.configure', ([], {}), '()\n', (1812, 1814), False, 'from baselines import logger\n'), ((527, 545), 'baselines.common.atari_wrappers.make_atari', 'make_atari', (['env_id'], {}), '(env_id)\n', (537, 545), False, 'from baselines.common.atari_wrappers import make_atari, wrap_deepmind\n'), ((693, 726), 'gym.logger.setLevel', 'gym.logger.setLevel', (['logging.WARN'], {}), '(logging.WARN)\n', (712, 726), False, 'import os, logging, gym\n'), ((746, 764), 'baselines.common.atari_wrappers.wrap_deepmind', 'wrap_deepmind', (['env'], {}), '(env)\n', (759, 764), False, 'from baselines.common.atari_wrappers import make_atari, wrap_deepmind\n'), ((617, 633), 'baselines.logger.get_dir', 'logger.get_dir', ([], {}), '()\n', (631, 633), False, 'from baselines import logger\n'), ((651, 667), 'baselines.logger.get_dir', 'logger.get_dir', ([], {}), '()\n', (665, 667), False, 'from baselines import logger\n')]
|
#!/usr/bin/python
#
# Copyright (c) 2016 <NAME>, <<EMAIL>>
# <NAME>, <<EMAIL>>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_virtualnetwork
version_added: "2.1"
short_description: Manage Azure virtual networks
description:
- Create, update or delete a virtual networks. Allows setting and updating the available IPv4 address ranges
and setting custom DNS servers. Use the M(azure_rm_subnet) module to associate subnets with a virtual network.
options:
resource_group:
description:
- Name of resource group.
required: true
address_prefixes_cidr:
description:
- List of IPv4 address ranges where each is formatted using CIDR notation.
- Required when creating a new virtual network or using I(purge_address_prefixes).
aliases:
- address_prefixes
dns_servers:
description:
- Custom list of DNS servers. Maximum length of two.
- The first server in the list will be treated as the Primary server. This is an explicit list.
- Existing DNS servers will be replaced with the specified list.
- Use the I(purge_dns_servers) option to remove all custom DNS servers and revert to default Azure servers.
location:
description:
- Valid Azure location. Defaults to location of the resource group.
name:
description:
- Name of the virtual network.
required: true
purge_address_prefixes:
description:
- Use with I(state=present) to remove any existing I(address_prefixes).
type: bool
default: 'no'
aliases:
- purge
purge_dns_servers:
description:
- Use with I(state=present) to remove existing DNS servers, reverting to default Azure servers. Mutually exclusive with DNS servers.
type: bool
default: 'no'
state:
description:
- State of the virtual network. Use C(present) to create or update and C(absent) to delete.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- <NAME> (@chouseknecht)
- <NAME> (@nitzmahone)
'''
EXAMPLES = '''
- name: Create a virtual network
azure_rm_virtualnetwork:
resource_group: myResourceGroup
name: myVirtualNetwork
address_prefixes_cidr:
- "10.1.0.0/16"
- "172.16.31.10/16"
dns_servers:
- "127.0.0.1"
- "127.0.0.2"
tags:
testing: testing
delete: on-exit
- name: Delete a virtual network
azure_rm_virtualnetwork:
resource_group: myResourceGroup
name: myVirtualNetwork
state: absent
'''
RETURN = '''
state:
description:
- Current state of the virtual network.
returned: always
type: complex
contains:
address_prefixes:
description:
- The virtual network IPv4 address ranges.
returned: always
type: list
sample: [
"10.1.0.0/16",
"172.16.31.10/16"
]
dns_servers:
description:
- DNS servers.
returned: always
type: list
sample: [
"127.0.0.1",
"127.0.0.3"
]
etag:
description:
- A unique read-only string that changes whenever the resource is update.
returned: always
type: str
sample: 'W/"0712e87c-f02f-4bb3-8b9e-2da0390a3886"'
id:
description:
- Resource ID.
returned: always
type: str
sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroup/myResourceGroup/providers/
Microsoft.Network/virtualNetworks/myVirtualNetwork"
location:
description:
- The Geo-location where the resource lives.
returned: always
type: str
sample: eastus
name:
description:
- Resource name.
returned: always
type: str
sample: my_test_network
provisioning_state:
description:
- Provisioning state of the virtual network.
returned: always
type: str
sample: Succeeded
tags:
description:
- Resource tags, such as { 'tags1':'value1' }
returned: always
type: dict
sample: { 'key1':'value1' }
type:
descriptioin:
- Resource type.
returned: always
type: str
sample: Microsoft.Network/virtualNetworks
'''
try:
from msrestazure.azure_exceptions import CloudError
except ImportError:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase, CIDR_PATTERN
def virtual_network_to_dict(vnet):
'''
Convert a virtual network object to a dict.
:param vnet: VirtualNet object
:return: dict
'''
results = dict(
id=vnet.id,
name=vnet.name,
location=vnet.location,
type=vnet.type,
tags=vnet.tags,
provisioning_state=vnet.provisioning_state,
etag=vnet.etag
)
if vnet.dhcp_options and len(vnet.dhcp_options.dns_servers) > 0:
results['dns_servers'] = []
for server in vnet.dhcp_options.dns_servers:
results['dns_servers'].append(server)
if vnet.address_space and len(vnet.address_space.address_prefixes) > 0:
results['address_prefixes'] = []
for space in vnet.address_space.address_prefixes:
results['address_prefixes'].append(space)
return results
class AzureRMVirtualNetwork(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(type='str', required=True),
name=dict(type='str', required=True),
state=dict(type='str', default='present', choices=['present', 'absent']),
location=dict(type='str'),
address_prefixes_cidr=dict(type='list', aliases=['address_prefixes']),
dns_servers=dict(type='list',),
purge_address_prefixes=dict(type='bool', default=False, aliases=['purge']),
purge_dns_servers=dict(type='bool', default=False),
)
mutually_exclusive = [
('dns_servers', 'purge_dns_servers')
]
required_if = [
('purge_address_prefixes', True, ['address_prefixes_cidr'])
]
self.resource_group = None
self.name = None
self.state = None
self.location = None
self.address_prefixes_cidr = None
self.purge_address_prefixes = None
self.dns_servers = None
self.purge_dns_servers = None
self.results = dict(
changed=False,
state=dict()
)
super(AzureRMVirtualNetwork, self).__init__(self.module_arg_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
def exec_module(self, **kwargs):
for key in list(self.module_arg_spec.keys()) + ['tags']:
setattr(self, key, kwargs[key])
self.results['check_mode'] = self.check_mode
resource_group = self.get_resource_group(self.resource_group)
if not self.location:
# Set default location
self.location = resource_group.location
if self.state == 'present' and self.purge_address_prefixes:
for prefix in self.address_prefixes_cidr:
if not CIDR_PATTERN.match(prefix):
self.fail("Parameter error: invalid address prefix value {0}".format(prefix))
if self.dns_servers and len(self.dns_servers) > 2:
self.fail("Parameter error: You can provide a maximum of 2 DNS servers.")
changed = False
results = dict()
try:
self.log('Fetching vnet {0}'.format(self.name))
vnet = self.network_client.virtual_networks.get(self.resource_group, self.name)
results = virtual_network_to_dict(vnet)
self.log('Vnet exists {0}'.format(self.name))
self.log(results, pretty_print=True)
self.check_provisioning_state(vnet, self.state)
if self.state == 'present':
if self.address_prefixes_cidr:
existing_address_prefix_set = set(vnet.address_space.address_prefixes)
requested_address_prefix_set = set(self.address_prefixes_cidr)
missing_prefixes = requested_address_prefix_set - existing_address_prefix_set
extra_prefixes = existing_address_prefix_set - requested_address_prefix_set
if len(missing_prefixes) > 0:
self.log('CHANGED: there are missing address_prefixes')
changed = True
if not self.purge_address_prefixes:
# add the missing prefixes
for prefix in missing_prefixes:
results['address_prefixes'].append(prefix)
if len(extra_prefixes) > 0 and self.purge_address_prefixes:
self.log('CHANGED: there are address_prefixes to purge')
changed = True
# replace existing address prefixes with requested set
results['address_prefixes'] = self.address_prefixes_cidr
update_tags, results['tags'] = self.update_tags(results['tags'])
if update_tags:
changed = True
if self.dns_servers:
existing_dns_set = set(vnet.dhcp_options.dns_servers) if vnet.dhcp_options else set([])
requested_dns_set = set(self.dns_servers)
if existing_dns_set != requested_dns_set:
self.log('CHANGED: replacing DNS servers')
changed = True
results['dns_servers'] = self.dns_servers
if self.purge_dns_servers and vnet.dhcp_options and len(vnet.dhcp_options.dns_servers) > 0:
self.log('CHANGED: purging existing DNS servers')
changed = True
results['dns_servers'] = []
elif self.state == 'absent':
self.log("CHANGED: vnet exists but requested state is 'absent'")
changed = True
except CloudError:
self.log('Vnet {0} does not exist'.format(self.name))
if self.state == 'present':
self.log("CHANGED: vnet {0} does not exist but requested state is 'present'".format(self.name))
changed = True
self.results['changed'] = changed
self.results['state'] = results
if self.check_mode:
return self.results
if changed:
if self.state == 'present':
if not results:
# create a new virtual network
self.log("Create virtual network {0}".format(self.name))
if not self.address_prefixes_cidr:
self.fail('Parameter error: address_prefixes_cidr required when creating a virtual network')
vnet_param = self.network_models.VirtualNetwork(
location=self.location,
address_space=self.network_models.AddressSpace(
address_prefixes=self.address_prefixes_cidr
)
)
if self.dns_servers:
vnet_param.dhcp_options = self.network_models.DhcpOptions(
dns_servers=self.dns_servers
)
if self.tags:
vnet_param.tags = self.tags
self.results['state'] = self.create_or_update_vnet(vnet_param)
else:
# update existing virtual network
self.log("Update virtual network {0}".format(self.name))
vnet_param = self.network_models.VirtualNetwork(
location=results['location'],
address_space=self.network_models.AddressSpace(
address_prefixes=results['address_prefixes']
),
tags=results['tags'],
subnets=vnet.subnets
)
if results.get('dns_servers'):
vnet_param.dhcp_options = self.network_models.DhcpOptions(
dns_servers=results['dns_servers']
)
self.results['state'] = self.create_or_update_vnet(vnet_param)
elif self.state == 'absent':
self.delete_virtual_network()
self.results['state']['status'] = 'Deleted'
return self.results
def create_or_update_vnet(self, vnet):
try:
poller = self.network_client.virtual_networks.create_or_update(self.resource_group, self.name, vnet)
new_vnet = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error creating or updating virtual network {0} - {1}".format(self.name, str(exc)))
return virtual_network_to_dict(new_vnet)
def delete_virtual_network(self):
try:
poller = self.network_client.virtual_networks.delete(self.resource_group, self.name)
result = self.get_poller_result(poller)
except Exception as exc:
self.fail("Error deleting virtual network {0} - {1}".format(self.name, str(exc)))
return result
def main():
AzureRMVirtualNetwork()
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.azure_rm_common.CIDR_PATTERN.match"
] |
[((8368, 8394), 'ansible.module_utils.azure_rm_common.CIDR_PATTERN.match', 'CIDR_PATTERN.match', (['prefix'], {}), '(prefix)\n', (8386, 8394), False, 'from ansible.module_utils.azure_rm_common import AzureRMModuleBase, CIDR_PATTERN\n')]
|
import tkinter
from tkinter import ttk
from tkinter import font
import threading
import time
import collections
import likeyoubot_worker
import queue
import pickle
import sys
import os
import likeyoubot_message
import likeyoubot_rohan as LYBROHAN
from likeyoubot_configure import LYBConstant as lybconstant
import datetime
import copy
import webbrowser
import likeyoubot_license
from belfrywidgets import ToolTip
from PIL import Image, ImageTk, ImageGrab
import likeyoubot_rest
import likeyoubot_logger
import traceback
import random
import string
import shutil
import requests
from subprocess import Popen, PIPE
ct = [255, 255, 0]
brightness = int(round(0.299 * ct[0] + 0.587 * ct[1] + 0.114 * ct[2]))
ct_hex = "%02x%02x%02x" % tuple(ct)
bg_colour = '#' + "".join(ct_hex)
def resource_path(relative):
return os.path.join(
os.environ.get(
"_MEIPASS2",
os.path.abspath(".")
),
relative
)
class LYBGUI:
def __init__(self, master, configure, httplogin=None):
self.master = master
self.configure = configure
self.logger = likeyoubot_logger.LYBLogger.getLogger()
try:
self.log_fp = open(likeyoubot_logger.LYBLogger.logPath)
except:
self.logger.error(traceback.format_exc())
self.rest = httplogin
self.last_check_telegram = time.time()
self.last_check_ip = time.time()
self.last_check_server = 0
self.master.geometry('%dx%d+%d+%d' % self.configure.getGeometry())
self.width = self.configure.getGeometry()[0]
self.height = self.configure.getGeometry()[1]
# self.master.configure(background='black')
self.master.title(self.configure.window_title + ' ' + str(lybconstant.LYB_VERSION))
self.note = ttk.Notebook(self.master,
width=self.width,
height=self.height
)
self.note.bind('<Button-1>', self.clicked_main_tab)
self.tab_frame = []
self.game_frame = {}
self.game_options = {}
self.gui_config_dic = {}
self.option_dic = {}
self.game_object = {}
self.monitor_check_point = {}
self.wlist_stringvar_dic = {}
self.wlist_stringvar_skip_dic = {}
self.wlist_combobox_dic = {}
self.current_work_dic = {}
self.ready_to_search_queue = []
self.ready_to_start_queue = []
self.stop_app_player_list = []
self.restart_app_player_list = []
self.restart_app_player_count = 0
self.restart_app_player_search = False
self.timeClickedAds = 0
self.mb_point = None
self.first_for_ads = True
# --- COMMON TAB
self.monitor_button_index = [-1, -1, -1, -1, -1]
# TEST = 'groov', RELEASE = 'flat'
self.frame_relief = 'flat'
frame_relief = self.frame_relief
self.gui_style = ttk.Style()
# print(self.gui_style.layout("TNotebook.Tab"))
# self.gui_style.layout("Tab",
# [ ('Notebook.tab', {'sticky': 'nswe', 'children':
# [ ('Notebook.padding', { 'side': 'top', 'sticky': 'nswe', 'children':
# #[ ('Notebook.focus', {'side': 'top', 'sticky': 'nswe', 'children':
# [ ('Notebook.label', {'side': 'top', 'sticky': ''
# })]
# # })]
# })]
# })]
# )
self.gui_style.theme_use('vista')
self.gui_style.configure('.', font=lybconstant.LYB_FONT)
self.gui_style.configure("Tab", focuscolor=self.gui_style.configure(".")["background"])
self.gui_style.configure("TButton", focuscolor=self.gui_style.configure(".")["background"])
self.gui_style.configure("TCheckbutton", focuscolor=self.gui_style.configure(".")["background"])
self.tab_frame.append(ttk.Frame(
master=self.note,
width=self.width * 0.2 + 10 * lybconstant.LYB_PADDING,
height=self.height - lybconstant.LYB_PADDING,
relief=frame_relief
))
self.note.add(self.tab_frame[-1], text='일반')
# 녹스 창 이름
# frame = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# frame.pack(pady=5)
# frame = ttk.Frame(
# master = self.tab_frame[-1],
# relief = frame_relief
# )
# s = ttk.Style()
# s.configure('label_0.TLabel', foreground='red')
# label_begging = ttk.Label(
# master = frame,
# text = "※ 개발자에게 고마움을 느끼셨다면 홈페이지 광고 한 번 클릭해주세요 → ",
# justify = tkinter.LEFT,
# style = 'label_0.TLabel'
# # fg='White' if brightness < 120 else 'Black',
# # bg=bg_colour
# )
# label_begging.pack(side=tkinter.LEFT)
# s = ttk.Style()
# s.configure('label_link.TLabel', foreground='blue', font=('굴림체', 9, 'underline'))
# link_url = "www.dogfooter.com"
# label_hompage = ttk.Label(
# master = frame,
# text = link_url,
# justify = tkinter.LEFT,
# style = 'label_link.TLabel',
# cursor = 'hand2'
# # fg='White' if brightness < 120 else 'Black',
# # bg=bg_colour
# )
# # f = font.Font(label_hompage, label_hompage.cget("font"))
# # f.configure(underline = True)
# # f.configure(weight='bold')
# # label_hompage.configure(font=f)
# label_hompage.pack(side=tkinter.LEFT)
# label_hompage.bind("<Button-1>", self.callback_hompage)
# frame.pack(anchor=tkinter.W, fill=tkinter.BOTH)
# frame = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# label_begging = ttk.Label(
# master = frame,
# text = "※ 소스가 궁금하신 분들은 오른쪽 링크를 클릭하세요 → ",
# justify = tkinter.LEFT
# )
# label_begging.pack(side=tkinter.LEFT)
# link_url = "www.bitbucket.org/dogfooter/dogfooter"
# label_hompage = ttk.Label(
# master = frame,
# text = link_url,
# justify = tkinter.LEFT,
# style = 'label_link.TLabel',
# cursor = 'hand2'
# )
# f = font.Font(label_hompage, label_hompage.cget("font"))
# f.configure(underline = True)
# f.configure(weight='bold')
# label_hompage.configure(font=f)
# label_hompage.pack(side=tkinter.LEFT)
# label_hompage.bind("<Button-1>", self.callbac_bitbucket)
# frame.pack(anchor=tkinter.W, fill=tkinter.BOTH, pady=5)
# s = ttk.Style()
# s.configure('blue_label.TLabel', foreground='blue')
frame = ttk.Frame(self.tab_frame[-1])
# self.keyword_label = ttk.Label(
# master = frame,
# text = "앱 플레이어 창 이름",
# justify = tkinter.LEFT,
# style = 'blue_label.TLabel'
# )
# self.keyword_label.pack(side=tkinter.LEFT)
# self.tooltip(self.keyword_label, lybconstant.LYB_TOOLTIP_APP_TITLE)
# self.keyword_entry = ttk.Entry(
# master = frame,
# # relief = 'sunken',
# justify = tkinter.LEFT,
# # font = lybconstant.LYB_FONT,
# width = 32
# )
# self.keyword_entry.pack(side=tkinter.LEFT, padx=10)
# self.keyword_entry.insert(0, self.configure.keyword)
# self.keyword_entry.focus()
label = ttk.Label(
master=frame,
text="앱 플레이어: "
)
label.pack(side=tkinter.LEFT)
self.app_player_process = tkinter.StringVar(frame)
self.app_player_process.set('')
self.app_player_process.trace('w', lambda *args: self.callback_select_app_player_process_stringvar(args))
self.app_player_process_list = ttk.Combobox(
master=frame,
values=[],
textvariable=self.app_player_process,
state="readonly",
height=20,
width=30,
font=lybconstant.LYB_FONT
)
self.app_player_process_list.pack(anchor=tkinter.W, side=tkinter.LEFT)
self.keyword_entry = ttk.Entry(
master=frame,
justify=tkinter.LEFT,
font=lybconstant.LYB_FONT,
width=20
)
self.keyword_entry.pack(side=tkinter.LEFT, padx=10)
self.keyword_entry.insert(0, self.configure.keyword)
self.tooltip(self.keyword_entry, lybconstant.LYB_TOOLTIP_APP_TITLE)
s = ttk.Style()
s.configure('button_0.TButton', font=('굴림체', 9))
self.search_button = ttk.Button(
master=frame,
text="갱신",
width=10,
style='button_0.TButton',
command=lambda: self.searchWindow(None)
)
self.search_button.pack(side=tkinter.LEFT, padx=5)
lybhttp = self.login()
base_point = lybhttp.get_elem('dogfootermacro_point')
if base_point == None:
base_point = 0
else:
base_point = int(base_point)
if int(self.get_mb_point()) >= base_point:
# s = ttk.Style()
# s.configure('button_dogfootermacro.TButton', font=('굴림체', 9, 'bold'), foreground='blue', background='red')
self.dogfootermacro_button = ttk.Button(
master=frame,
text="라이트버전",
width=10,
style='button_0.TButton',
# bg = 'red',
# fg = 'yellow',
# relief = 'flat',
# style = 'button_dogfootermacro.TButton',
command=lambda: self.callback_fork_dogfootermacro(None)
)
self.dogfootermacro_button.pack(side=tkinter.LEFT, padx=5)
lybhttp = self.login()
base_point = lybhttp.get_elem('lybcfg_point')
if base_point == None:
base_point = 0
else:
base_point = int(base_point)
if int(self.get_mb_point()) >= base_point:
# s = ttk.Style()
# s.configure('button_dogfootermacro.TButton', font=('굴림체', 9, 'bold'), foreground='blue', background='red')
self.dogfootermacro_button = ttk.Button(
master=frame,
text="설정받기",
width=10,
style='button_0.TButton',
# bg = 'red',
# fg = 'yellow',
# relief = 'flat',
# style = 'button_dogfootermacro.TButton',
command=lambda: self.callback_download_lybcfg(None)
)
self.dogfootermacro_button.pack(side=tkinter.LEFT, padx=5)
# ads_image = Image.open(resource_path("ads_image.jpg"))
# # if ads_image.size != (128, 32):
# # ads_image = ads_image.resize((128, 32), Image.ANTIALIAS)
# ads_image = ImageTk.PhotoImage(ads_image)
# frame_ads = ttk.Frame(frame)
# label = ttk.Label(
# master = frame_ads,
# image = ads_image,
# cursor = 'hand2'
# )
# label.image = ads_image
# label.place(x=0, y=0)
# label.pack()
# label.bind("<Button-1>", self.callback_hompage)
# frame_ads.pack(fill=tkinter.X, expand=True, anchor=tkinter.E)
# self.security_authority = False
# self.keyword_label = ttk.Label(
# master = frame,
# text = "실행 인증 코드",
# justify = tkinter.LEFT,
# font = lybconstant.LYB_FONT
# )
# self.keyword_label.pack(side=tkinter.LEFT, padx=10)
# self.security_code = tkinter.StringVar(frame)
# security_code_entry = tkinter.Entry(
# master = frame,
# relief = 'sunken',
# justify = tkinter.LEFT,
# font = lybconstant.LYB_FONT,
# textvariable = self.security_code,
# width = 32
# )
# self.security_code.trace(
# 'w', lambda *args: self.callback_security_code_stringvar(args)
# )
# security_code_entry.pack(side=tkinter.LEFT)
# if not 'security_code' in self.configure.common_config:
# self.configure.common_config['security_code'] = ''
# security_code_entry.insert(0, self.configure.common_config['security_code'])
frame.pack(side=tkinter.TOP, pady=5)
frame_s = ttk.Frame(
master=self.tab_frame[-1],
relief=frame_relief
)
frame_l = ttk.Frame(frame_s, relief=frame_relief)
# s = ttk.Style()
# s.configure('label_1.TLabel', font=('굴림체', 9, 'underline'))
# self.configure_label = ttk.Label(
# master = frame_l,
# text = lybconstant.LYB_LABEL_SELECT_WINDOW_TEXT,
# style = 'label_1.TLabel'
# )
# self.configure_label.pack(side=tkinter.TOP)
# label_font = tkinter.font.Font(self.configure_label, self.configure_label.cget('font'))
# label_font.configure(underline=True)
# self.configure_label.configure(font=label_font)
self.gui_config_dic = {}
self.games = [
lybconstant.LYB_GAME_ROHAN,
# lybconstant.LYB_GAME_LIN2REV,
# lybconstant.LYB_GAME_CLANS,
# lybconstant.LYB_GAME_YEOLHYUL
]
# 헌드레드 소울
# lybhttp = self.login()
# base_point = lybhttp.get_elem('hundredsoul_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# self.games.append(lybconstant.LYB_GAME_HUNDREDSOUL)
# # 검은사막
# lybhttp = self.login()
# base_point = lybhttp.get_elem('blackdesert_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# self.games.append(lybconstant.LYB_GAME_BLACKDESERT)
#
# # 블레이드2
# base_point = lybhttp.get_elem('blade2_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# self.games.append(lybconstant.LYB_GAME_BLADE2)
#
# # 이카루스
# base_point = lybhttp.get_elem('icarus_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# self.games.append(lybconstant.LYB_GAME_ICARUS)
#
# # TALION
# base_point = lybhttp.get_elem('talion_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# self.games.append(lybconstant.LYB_GAME_TALION)
frame_app_player_config = ttk.LabelFrame(frame_l, text='앱 플레이어 설정')
frame_inner = ttk.Frame(frame_app_player_config)
frame_game = ttk.Frame(frame_inner)
s = ttk.Style()
s.configure('fgWhite_bgGreen.TLabel', foreground='white', background='blue')
label = ttk.Label(
master=frame_game,
text="게임 선택 ☞ ",
style="fgWhite_bgGreen.TLabel"
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic['games'] = tkinter.StringVar(frame_l)
if not 'games' in self.configure.common_config:
self.configure.common_config['games'] = self.games[0]
self.gui_config_dic['games'].set(self.configure.common_config['games'])
self.gui_config_dic['games'].trace('w',
lambda *args: self.selected_game(args))
combobox = ttk.Combobox(
master=frame_game,
values=self.games,
textvariable=self.gui_config_dic['games'],
state='readonly',
width=22,
font=lybconstant.LYB_FONT
)
# self.inactive_flag_option_menu.set(inactive_mode_flag_list[0])
# combobox.configure(stat=tkinter.DISABLED)
combobox.pack(anchor=tkinter.W, padx=2)
frame_game.pack(anchor=tkinter.W)
# option_menu = ttk.OptionMenu(
# frame_l,
# self.gui_config_dic['games'],
# '',
# *self.games,
# command = self.selected_game
# )
# option_menu.configure(width=20)
# # option_menu.configure(font=lybconstant.LYB_FONT)
# option_menu.pack(side=tkinter.TOP)
if not 'multi_account' in self.configure.common_config:
self.configure.common_config['multi_account'] = False
# 로컬변수로 선언하면 안된다. 가비지컬렉터한테 먹혀서 안됨.. UI 는 계속 루프를 도니까
self.gui_config_dic['multi_account'] = tkinter.BooleanVar()
self.gui_config_dic['multi_account'].set(self.configure.common_config['multi_account'])
check_box = ttk.Checkbutton(
master=frame_inner,
text='구글 멀티 계정 사용',
variable=self.gui_config_dic['multi_account'],
onvalue=True,
offvalue=False,
command=lambda: self.toggleCommonCheckBox('multi_account')
)
check_box.pack(anchor=tkinter.W)
if not 'debug_booleanvar' in self.configure.common_config:
self.configure.common_config['debug_booleanvar'] = True
self.gui_config_dic['debug_booleanvar'] = tkinter.BooleanVar()
self.gui_config_dic['debug_booleanvar'].set(self.configure.common_config['debug_booleanvar'])
# check_box = ttk.Checkbutton(
# master = frame_l,
# text = '디버깅 모드',
# variable = self.gui_config_dic['debug_booleanvar'],
# onvalue = True,
# offvalue = False,
# command = lambda: self.toggle_debug_checkbox('debug_booleanvar')
# )
# check_box.pack(anchor=tkinter.W)
frame = ttk.Frame(frame_inner, relief=frame_relief)
if not lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE] = False
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE] = tkinter.BooleanVar()
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE])
check_box = ttk.Checkbutton(
master=frame,
text='비활성 모드',
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE],
onvalue=True,
offvalue=False,
command=lambda: self.callback_use_inactive_mode_booleanvar()
)
check_box.pack(side=tkinter.LEFT)
inactive_mode_flag_list = [
'윈7',
'윈10'
]
self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG] = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG] = inactive_mode_flag_list[1]
self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG].set(
self.configure.common_config[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG])
self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG].trace('w',
lambda
*args: self.callback_inactive_mode_flag_stringvar(
args))
# self.inactive_flag_option_menu = ttk.OptionMenu(
# frame,
# self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG],
# '',
# *inactive_mode_flag_list,
# command = self.callback_inactive_mode_flag_stringvar
# )
# self.inactive_flag_option_menu.configure(width=4)
# self.inactive_flag_option_menu.configure(stat=tkinter.DISABLED)
# # self.inactive_flag_option_menu.configure(font=lybconstant.LYB_FONT)
# self.inactive_flag_option_menu.pack(side=tkinter.LEFT, anchor=tkinter.SW)
s = ttk.Style()
s.map('TCombobox', fieldbackground=[('disabled', '#afafaf')])
s.map('TCombobox', foreground=[('disabled', '#424242')])
self.inactive_flag_option_menu = ttk.Combobox(
master=frame,
values=inactive_mode_flag_list,
textvariable=self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG],
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
# self.inactive_flag_option_menu.set(inactive_mode_flag_list[0])
# self.inactive_flag_option_menu.configure(stat=tkinter.DISABLED)
self.inactive_flag_option_menu.pack(anchor=tkinter.W, side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(frame_inner, relief=frame_relief)
if not lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'] = False
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'] = tkinter.BooleanVar()
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'])
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'].trace('w',
lambda
*args: self.callback_fix_window_location_booleanvar(
args))
check_box = ttk.Checkbutton(
master=frame,
text='창 고정',
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'],
onvalue=True,
offvalue=False
)
check_box.pack(side=tkinter.LEFT)
combobox_list = []
for i in range(0, 7681):
combobox_list.append(str(i))
label = ttk.Label(
master=frame,
text="X:"
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'] = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'] = 0
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'])
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'].trace('w',
lambda
*args: self.callback_fix_window_location_x_stringvar(
args))
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'],
state="readonly",
height=20,
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(anchor=tkinter.W, side=tkinter.LEFT)
combobox_list = []
for i in range(0, 2161):
combobox_list.append(str(i))
label = ttk.Label(
master=frame,
text=" Y:"
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'] = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'] = 0
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'])
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'].trace('w',
lambda
*args: self.callback_fix_window_location_y_stringvar(
args))
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'],
state="readonly",
height=20,
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(anchor=tkinter.W, side=tkinter.LEFT)
self.get_location_window_button = ttk.Button(
master=frame,
text="GET",
width=3,
style='button_0.TButton',
command=lambda: self.getWindowLocation(None)
)
self.get_location_window_button.pack(side=tkinter.LEFT, padx=1)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(frame_inner, relief=frame_relief)
combobox_list = []
for i in range(1, 6):
combobox_list.append(str(i))
label = ttk.Label(
master=frame,
text="멀티 플레이어에 설치된 순서:"
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'] = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'] = 1
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'])
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'].trace('w',
lambda
*args: self.callback_fix_window_location_number_stringvar(
args))
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number'],
state="readonly",
height=20,
width=1,
font=lybconstant.LYB_FONT
)
combobox.pack(anchor=tkinter.W, side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame_inner.pack(side=tkinter.LEFT, padx=5, pady=3)
frame_app_player_config.pack(anchor=tkinter.NW, pady=5)
frame_l.pack(side=tkinter.LEFT, anchor=tkinter.NW, padx=2)
# frame_c = ttk.Frame(frame_s, relief=frame_relief)
# ----- WINDOW LIST -----
# self.search_window = tkinter.Listbox(
# master = frame_c,
# selectmode = tkinter.MULTIPLE,
# font = ("돋움", 10),
# height = 8,
# activestyle = 'none',
# selectbackground = "#BC80CC"
# )
# self.search_window.pack(side=tkinter.LEFT, anchor=tkinter.NW)
# self.selected_window_list = []
# self.search_window.insert('end', '')
# self.search_window.bind('<<ListboxSelect>>', self.selectedWindowList)
# self.is_clicked_common_tab = False
# frame_c.pack(side=tkinter.LEFT, anchor=tkinter.NW, padx=10)
frame_r = ttk.Frame(frame_s, relief=frame_relief)
frame_label = ttk.LabelFrame(
master=frame_r,
text='공지 사항'
)
# label_font = tkinter.font.Font(label, label.cget('font'))
# label_font.configure(underline=True)
# label_font.configure(weight='bold')
# label.configure(font=label_font)
# usage_text = tkinter.Text(
# master = frame_r,
# spacing1 = 3,
# height = 6,
# font = lybconstant.LYB_FONT
# )
# vsb = tkinter.Scrollbar(
# master = usage_text,
# orient = 'vertical',
# command = usage_text.yview
# )
# usage_text.configure(yscrollcommand=vsb.set)
# vsb.pack(side='right', fill='y')
# hsb = tkinter.Scrollbar(
# master = usage_text,
# orient = 'horizontal',
# command = usage_text.xview
# )
# usage_text.configure(xscrollcommand=hsb.set)
# hsb.pack(side='bottom', fill='x')
# usage_text.pack(anchor=tkinter.NW, fill=tkinter.X, expand=True)
# usage_list = lybconstant.LYB_USAGE.split('\n')
# lybhttp = self.login()
# notice_count = int(lybhttp.get_elem('notice_count'))
# notice_index = int(lybhttp.get_elem('notice_index'))
# notice_dic = lybhttp.get_notice()
# self.notice_link_list = []
# self.notice_subject_list = []
# i = 0
# for key, value in notice_dic.items():
# label = ttk.Label(
# master=frame_label,
# text=key,
# font=lybconstant.LYB_FONT,
# cursor='hand2',
# width=17
# )
# label.pack(anchor=tkinter.NW)
#
# f = font.Font(label, label.cget("font"))
# f.configure(underline=True)
# label.configure(font=f)
# self.notice_subject_list.append(key)
# self.notice_link_list.append(value)
# if i == 0:
# label.bind("<Button-1>", lambda event: self.callback_link_url0(event, url=self.notice_link_list[0]))
# elif i == 1:
# label.bind("<Button-1>", lambda event: self.callback_link_url1(event, url=self.notice_link_list[1]))
# elif i == 2:
# label.bind("<Button-1>", lambda event: self.callback_link_url2(event, url=self.notice_link_list[2]))
# elif i == 3:
# label.bind("<Button-1>", lambda event: self.callback_link_url3(event, url=self.notice_link_list[3]))
# elif i == 4:
# label.bind("<Button-1>", lambda event: self.callback_link_url3(event, url=self.notice_link_list[4]))
# elif i == 5:
# label.bind("<Button-1>", lambda event: self.callback_link_url4(event, url=self.notice_link_list[5]))
# else:
# break
#
# if i == notice_index:
# label.configure(foreground='red')
#
# i += 1
#
# if i >= notice_count:
# break
#
# # usage_text.insert('end', usage_list)
# # for each_usage in usage_list:
# # usage_text.insert('end', each_usage + '\n')
#
frame_label.pack(anchor=tkinter.NW, side=tkinter.LEFT)
# frame_inner = ttk.Frame(frame_r)
# frame_inner.pack(side=tkinter.LEFT, padx=2)
#
# self.notice_frame_label = ttk.LabelFrame(
# master=frame_r,
# text='공지'
# )
#
# frame_notice_text = ttk.Frame(self.notice_frame_label)
# self.notice_text = tkinter.Text(
# master=frame_notice_text,
# spacing1=3,
# wrap=None,
# height=6,
# font=lybconstant.LYB_FONT
# )
#
# vsb = tkinter.Scrollbar(
# master=frame_notice_text,
# orient='vertical',
# command=self.notice_text.yview
# )
# self.notice_text.configure(yscrollcommand=vsb.set)
# vsb.pack(side=tkinter.RIGHT, fill=tkinter.Y)
# self.information_scrollbar = ttk.Scrollbar(self.information_frame)
# self.information.configure(yscrollcommand=self.information_scrollbar.set)
# self.information_scrollbar.configure(command=self.information.yview)
# self.information_scrollbar.pack(side=tkinter.RIGHT, fill=tkinter.Y)
# hsb = tkinter.Scrollbar(
# master = frame_notice_text,
# orient = 'horizontal',
# command = frame_notice_text.xview
# )
# frame_notice_text.configure(xscrollcommand=hsb.set)
# hsb.pack(side='bottom', fill='x')
# self.notice_text.pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True)
#
# content_list = lybhttp.get_notice_content(self.notice_link_list[notice_index])
#
# # usage_text.insert('end', usage_list)
# # for each_usage in usage_list:
# # usage_text.insert('end', each_usage + '\n')
# for each_line in content_list:
# self.notice_text.insert('end', each_line + '\n')
#
# frame_notice_text.pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True)
# self.notice_frame_label.pack(anchor=tkinter.NW, side=tkinter.LEFT, fill=tkinter.BOTH, expand=True)
#
frame_r.pack(side=tkinter.LEFT, anchor=tkinter.NW, fill=tkinter.BOTH, expand=True, padx=5, pady=5)
frame_s.pack(anchor=tkinter.NW, fill=tkinter.BOTH, pady=5)
# - TAB
# 탭 추가
self.option_dic['common_tab'] = ttk.Notebook(
master=self.tab_frame[-1]
)
self.option_dic['common_tab'].bind('<Button-1>', self.clicked_common_tab)
# 모니터링 탭
self.option_dic['monitoring_tab'] = ttk.Frame(
master=self.option_dic['common_tab']
)
# self.gui_config_dic['monitoring_tab'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True)
self.option_dic['common_tab'].add(self.option_dic['monitoring_tab'], text='모니터링')
# 공통 설정 탭
self.option_dic['common_config_tab'] = ttk.Frame(
master=self.option_dic['common_tab']
)
# self.gui_config_dic['common_config_tab'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True)
self.option_dic['common_tab'].add(self.option_dic['common_config_tab'], text='공통 설정')
# 로그 탭
# self.option_dic['logging_tab'] = ttk.Frame(
# master = self.option_dic['common_tab']
# )
# self.option_dic['common_tab'].add(self.option_dic['logging_tab'], text='로그')
self.option_dic['common_tab'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, expand=True)
self.option_dic['monitor_master'] = ttk.Frame(self.option_dic['monitoring_tab'])
# frame_label = self.add_monitor_master_frame()
self.option_dic['monitor_master'].pack(anchor=tkinter.NW, fill=tkinter.BOTH, padx=2, pady=2)
frame_bottom = ttk.Frame(self.option_dic['monitoring_tab'])
frame_log = ttk.Frame(frame_bottom)
# ----- INFORMATION LOGGING ------
# self.logger.critical('CRITICAL')
# self.logger.error('ERROR')
# self.logger.warn('WARN')
# self.logger.info('INFO')
# self.logger.debug('DEBUG')
# frame = ttk.Frame(frame_log)
# frame.pack(pady=2)
frame = ttk.Frame(frame_log)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'] = True
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'].trace('w',
lambda
*args: self.callback_log_level_critical(
args)
)
s = ttk.Style(frame)
s.configure('green_checkbutton.TCheckbutton', foreground='green')
checkbutton = ttk.Checkbutton(
master=frame,
text="필수정보",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'],
style='green_checkbutton.TCheckbutton',
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'] = True
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'].trace('w',
lambda
*args: self.callback_log_level_error(
args)
)
s = ttk.Style(frame)
s.configure('red_checkbutton.TCheckbutton', foreground='red')
checkbutton = ttk.Checkbutton(
master=frame,
text="에러",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'],
style='red_checkbutton.TCheckbutton',
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'] = True
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'].trace('w',
lambda
*args: self.callback_log_level_warn(
args)
)
s = ttk.Style(frame)
s.configure('orange_checkbutton.TCheckbutton', foreground='#f97436')
checkbutton = ttk.Checkbutton(
master=frame,
text="경고",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'],
style='orange_checkbutton.TCheckbutton',
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'] = True
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'].trace('w',
lambda
*args: self.callback_log_level_info(
args)
)
checkbutton = ttk.Checkbutton(
master=frame,
text="게임정보",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'],
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'] = False
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'].trace('w',
lambda
*args: self.callback_log_level_debug(
args)
)
checkbutton = ttk.Checkbutton(
master=frame,
text="디버깅",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'],
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text=' '
)
label.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text='필터링:'
)
label.pack(side=tkinter.LEFT)
self.log_filter_entry = tkinter.StringVar(frame)
self.log_filter_entry.trace('w', lambda *args: self.callback_log_filter_entry_stringvar(args))
self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER] = ''
entry = ttk.Entry(
master=frame,
textvariable=self.log_filter_entry,
justify=tkinter.LEFT,
font=lybconstant.LYB_FONT,
width=15
)
entry.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text=' '
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'] = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'] = False
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'].set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock']
)
self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'].trace('w',
lambda *args: self.callback_log_lock(
args)
)
checkbutton = ttk.Checkbutton(
master=frame,
text="스크롤 잠금",
variable=self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'],
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
# self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'] = tkinter.BooleanVar(frame)
# if not lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove' in self.configure.common_config:
# self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'] = False
# self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'].set(
# self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove']
# )
# self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'].trace('w',
# lambda *args: self.callback_log_remove(args)
# )
# checkbutton = ttk.Checkbutton(
# master = frame,
# text = "로그 파일 자동 삭제",
# variable = self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'],
# onvalue = True,
# offvalue = False
# )
# checkbutton.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W, padx=2)
self.information_frame = ttk.Frame(frame_log)
self.information = tkinter.Text(
master=self.information_frame,
width=90,
height=25,
# spacing1 = 3,
font=('Consolas', 8),
# font = lybconstant.LYB_FONT
)
# self.vsb = tkinter.Scrollbar(self.information_frame,
# orient='vertical',
# command=self.information.yview)
# self.hsb = tkinter.Scrollbar(self.information_frame,
# orient='horizontal',
# command=self.information.xview)
# self.information.configure(
# wrap=tkinter.NONE
# )
# yscrollcommand=self.vsb.set,
# xscrollcommand=self.hsb.set)
self.information.tag_configure('FAIL', foreground='red')
self.information.tag_configure('SUCCESS', foreground='green')
self.information.tag_configure('GOOD', foreground='#ad42f4')
self.information.tag_configure('NICE', foreground='#00ad56')
self.information.tag_configure('SUB', foreground='#d13e83')
self.information.tag_configure('BAD', foreground='#fcab97')
# self.information.tag_configure('GOOD', foreground='#ad42f4')
self.information.tag_configure('INFO', foreground='blue')
self.information.tag_configure('D', foreground='black')
self.information.tag_configure('I', foreground='black')
self.information.tag_configure('E', foreground='red')
self.information.tag_configure('C', foreground='green')
self.information.tag_configure('W', foreground='#f97436')
# self.vsb.pack(side='right', fill='y')
# self.hsb.pack(side='bottom', fill='x')
self.information_scrollbar = ttk.Scrollbar(self.information_frame)
self.information.configure(yscrollcommand=self.information_scrollbar.set)
self.information_scrollbar.configure(command=self.information.yview)
self.information_scrollbar.pack(side=tkinter.RIGHT, fill=tkinter.Y)
# self.information_scrollbar_h = ttk.Scrollbar(self.information_frame, orient=tkinter.HORIZONTAL)
# self.information.configure(xscrollcommand=self.information_scrollbar_h.set)
# self.information_scrollbar_h.configure(command=self.information.xview)
# self.information_scrollbar_h.pack(side=tkinter.BOTTOM, fill=tkinter.X)
self.information.pack(side=tkinter.BOTTOM)
self.information_frame.pack(side=tkinter.LEFT, anchor=tkinter.SW, padx=2, pady=2)
frame_log.pack(side=tkinter.LEFT)
frame_log = ttk.Frame(frame_bottom)
s = ttk.Style()
s.configure('button_1.TButton', justify=tkinter.CENTER)
s = ttk.Style()
s.configure('button_homepage.TButton', justify=tkinter.CENTER, background='green')
s = ttk.Style()
s.configure('button_kakao.TButton', justify=tkinter.CENTER, background='yellow')
frame = ttk.Frame(frame_log)
frame_sub = ttk.LabelFrame(frame, text='바로가기')
# button = ttk.Button(
# master=frame_sub,
# text="홈페이지",
# width=14,
# style='button_homepage.TButton',
# command=lambda: self.callback_hompage(None)
# )
# button.pack()
# button = ttk.Button(
# master=frame_sub,
# text="블로그",
# width=14,
# style='button_homepage.TButton',
# command=lambda: self.callback_blog(None)
# )
# button.pack()
button = ttk.Button(
master=frame_sub,
text="기능명세서",
width=14,
style='button_homepage.TButton',
command=lambda: self.callback_docs(None)
)
button.pack()
frame_sub.pack()
frame_sub = ttk.LabelFrame(frame, text='오픈채팅방')
# button = ttk.Button(
# master=frame_sub,
# text="테라M",
# width=14,
# style='button_kakao.TButton',
# command=lambda: self.callback_tera_kakaotalk(None)
# )
# button.pack()
# button = ttk.Button(
# master=frame_sub,
# text="검은사막M",
# width=14,
# style='button_kakao.TButton',
# command=lambda: self.callback_blackdesert_kakaotalk(None)
# )
# button.pack()
# button = ttk.Button(
# master = frame_sub,
# text = "카이저",
# width = 14,
# style = 'button_kakao.TButton',
# command = lambda: self.callback_kaiser_kakaotalk(None)
# )
# button.pack()
# button = ttk.Button(
# master=frame_sub,
# text="블레이드2",
# width=14,
# style='button_kakao.TButton',
# command=lambda: self.callback_blade2_kakaotalk(None)
# )
# button.pack()
#
# button = ttk.Button(
# master=frame_sub,
# text="이카루스M",
# width=14,
# style='button_kakao.TButton',
# command=lambda: self.callback_icarus_kakaotalk(None)
# )
# button.pack()
#
# button = ttk.Button(
# master=frame_sub,
# text="탈리온",
# width=14,
# style='button_kakao.TButton',
# command=lambda: self.callback_talion_kakaotalk(None)
# )
# button.pack()
# button = ttk.Button(
# master = frame_sub,
# text = "소스보기",
# width = 14,
# command = lambda: self.callback_bitbucket(None)
# )
# button.pack()
frame_sub.pack()
frame.pack(side=tkinter.LEFT, anchor=tkinter.NW, padx=7)
# ----- BUTTON FRAME -----
frame_br = ttk.Frame(frame_log)
login_frame = ttk.LabelFrame(frame_br, text='계정')
user_account = self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_SAVE_LOGIN_ACCOUNT + '_id']
if len(user_account) > 10:
user_account = user_account[0:10]
label = ttk.Label(
master=login_frame,
text=user_account
)
label.pack()
login_frame.pack(fill=tkinter.X, padx=2)
self.mb_point_label = tkinter.StringVar(frame_br)
login_frame = ttk.LabelFrame(frame_br, text='포인트')
label = ttk.Label(
master=login_frame,
textvariable=self.mb_point_label
)
label.pack()
self.mb_point_label.set(self.get_mb_point())
# self.mb_ip_label = tkinter.StringVar(frame_bottom)
# login_frame = ttk.LabelFrame(frame_bottom, text='IP 주소')
# label = ttk.Label(
# master = login_frame,
# textvariable = self.mb_ip_label
# )
# label.pack()
# self.mb_ip_label.set(self.get_mb_ip())
login_frame.pack(fill=tkinter.X, padx=2, pady=1)
login_frame = ttk.LabelFrame(frame_br, text='뒷통수조심')
self.hide_button = ttk.Button(
master=login_frame,
text="숨기기",
width=14,
style='button_1.TButton',
command=lambda: self.callback_hide_window(None, None)
)
self.show_button = ttk.Button(
master=login_frame,
text="보이기",
width=14,
style='button_1.TButton',
command=lambda: self.callback_show_window(None, None)
)
self.hide_button.pack(pady=lybconstant.LYB_PADDING)
self.show_button.pack(pady=lybconstant.LYB_PADDING)
login_frame.pack(fill=tkinter.X, padx=2)
login_frame = ttk.LabelFrame(frame_br, text='텔레그램')
image1 = Image.open(resource_path("images/t_logo.png"))
if image1.size != (32, 32):
image1 = image1.resize((32, 32), Image.ANTIALIAS)
image1 = ImageTk.PhotoImage(image1)
label = ttk.Label(
master=login_frame,
image=image1
)
label.image = image1
label.place(x=0, y=0)
label.pack()
self.tooltip(label, lybconstant.LYB_TOOLTIP_TELEGRAM)
self.telegram_entry = tkinter.StringVar(login_frame)
entry = ttk.Entry(
master=login_frame,
textvariable=self.telegram_entry,
justify=tkinter.LEFT,
font=lybconstant.LYB_FONT,
width=15
)
entry.pack(anchor=tkinter.W, fill=tkinter.X)
self.telegram_entry.set('')
self.telegram_button_label = tkinter.StringVar(login_frame)
button = ttk.Button(
master=login_frame,
textvariable=self.telegram_button_label,
command=lambda: self.callback_telegram(None)
)
button.pack(anchor=tkinter.W, fill=tkinter.X)
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_SAVE_LOGIN_ACCOUNT + '_chatid'] == '':
self.telegram_button_label.set('연동하기')
self.telegram_entry.set(self.generate_token())
# entry.select_range(0, tkinter.END)
else:
self.telegram_button_label.set('연동해제')
s = ttk.Style(login_frame)
s.configure('green_label.TLabel', foreground="green")
self.telegram_chatid_label = tkinter.StringVar(login_frame)
label = ttk.Label(
master=login_frame,
textvariable=self.telegram_chatid_label,
justify=tkinter.LEFT,
style='green_label.TLabel'
)
label.pack()
self.telegram_chatid_label.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_SAVE_LOGIN_ACCOUNT + '_chatid'])
login_frame.pack(fill=tkinter.X, padx=2)
button_frame = ttk.Frame(frame_br)
self.start_button = ttk.Button(
master=button_frame,
text="시작",
width=14,
style='button_1.TButton',
command=lambda: self.startWorker(None)
)
self.pause_button = ttk.Button(
master=button_frame,
text="일시정지",
width=14,
style='button_1.TButton',
command=lambda: self.pauseWorker(None)
)
self.stop_button = ttk.Button(
master=button_frame,
text="정지",
width=14,
style='button_1.TButton',
command=lambda: self.terminateWorker(None)
)
self.start_button.pack(pady=lybconstant.LYB_PADDING)
self.pause_button.pack(pady=lybconstant.LYB_PADDING)
self.stop_button.pack(pady=lybconstant.LYB_PADDING)
button_frame.pack(side=tkinter.BOTTOM, padx=2)
frame_br.pack(fill=tkinter.BOTH, anchor=tkinter.NW, expand=True, padx=2)
frame_log.pack(fill=tkinter.BOTH, anchor=tkinter.NW, expand=True)
frame_bottom.pack(side=tkinter.BOTTOM, anchor=tkinter.NW)
# ----- CONFIGURATION -----
self.common_top_frame = ttk.Frame(self.option_dic['common_config_tab'])
self.configure_frame = ttk.LabelFrame(
master=self.common_top_frame,
text='봇 설정'
)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
threshold_label = ttk.Label(
master=frame,
text="이미지를 인식할 때 비교 대상과 ",
anchor=tkinter.W,
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
# countif.place(
# x=lybconstant.LYB_PADDING,
# y=lybconstant.LYB_PADDING,
# width=lybconstant.LYB_LABEL_WIDTH, height=lybconstant.LYB_LABEL_HEIGHT
# )
threshold_label.pack(side=tkinter.LEFT)
self.threshold_entry = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.threshold_entry,
width=3
)
entry.pack(side=tkinter.LEFT)
if not 'threshold_entry' in self.configure.common_config:
self.configure.common_config['threshold_entry'] = 0.7
self.threshold_entry.set(str(int(self.configure.common_config['threshold_entry'] * 100)))
self.threshold_entry.trace('w', lambda *args: self.callback_threshold_entry(args))
label = ttk.Label(
master=frame,
text="% 이상 동일하면 감지하도록 설정합니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="이미지를 인식할 때 RGB 값의 차이가 ",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.pixel_tolerance_entry = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.pixel_tolerance_entry,
width=3
)
entry.pack(side=tkinter.LEFT)
if not 'pixel_tolerance_entry' in self.configure.common_config:
self.configure.common_config['pixel_tolerance_entry'] = 30
self.pixel_tolerance_entry.set(str(int(self.configure.common_config['pixel_tolerance_entry'])))
self.pixel_tolerance_entry.trace('w', lambda *args: self.callback_pixel_tolerance_entry(args))
label = ttk.Label(
master=frame,
text="이하는 같은 이미지로 간주합니다.",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="이미지 인식이 안 될 경우 찾을 때까지 지속적으로",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.adjust_entry = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.adjust_entry,
width=3
)
entry.pack(side=tkinter.LEFT)
if not 'adjust_entry' in self.configure.common_config:
self.configure.common_config['adjust_entry'] = 1
self.adjust_entry.set(str(int(self.configure.common_config['adjust_entry'])))
self.adjust_entry.trace('w', lambda *args: self.callback_adjust_entry(args))
label = ttk.Label(
master=frame,
text="% 씩 가중치를 줍니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
# frame.pack(anchor=tkinter.W)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="봇의 작업 주기를 ",
anchor=tkinter.W,
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
# countif.place(
# x=lybconstant.LYB_PADDING,
# y=lybconstant.LYB_PADDING,
# width=lybconstant.LYB_LABEL_WIDTH, height=lybconstant.LYB_LABEL_HEIGHT
# )
label.pack(side=tkinter.LEFT)
self.wakeup_period_entry = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.wakeup_period_entry,
width=6
)
entry.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="초로 설정합니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
if not 'wakeup_period_entry' in self.configure.common_config:
self.configure.common_config['wakeup_period_entry'] = float(1.0)
frame.pack(anchor=tkinter.W)
self.wakeup_period_entry.set(str(self.configure.common_config['wakeup_period_entry']))
self.wakeup_period_entry.trace('w', lambda *args: self.callback_wakeup_period_entry(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="UI 갱신 주기를 ",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.update_period_ui_entry = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.update_period_ui_entry,
width=6
)
entry.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="초로 설정합니다",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI] = float(0.5)
frame.pack(anchor=tkinter.W)
self.update_period_ui_entry.set(str(self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI]))
self.update_period_ui_entry.trace('w', lambda *args: self.callback_update_period_ui_entry(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.use_monitoring_flag = tkinter.BooleanVar(frame)
label = ttk.Checkbutton(
master=frame,
text="모니터링 기능을 사용합니다",
variable=self.use_monitoring_flag,
onvalue=True,
offvalue=False
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_BOOLEAN_USE_MONITORING in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_MONITORING] = True
frame.pack(anchor=tkinter.W)
self.use_monitoring_flag.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_MONITORING]
)
self.use_monitoring_flag.trace('w',
lambda *args: self.callback_use_monitoring_booleanvar(args)
)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="게임 화면 전환 후",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE] = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE],
width=3
)
entry.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="초 동안 대기합니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE] = 0
frame.pack(anchor=tkinter.W)
self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE].set(
self.configure.common_config[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE])
self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE].trace('w', lambda
*args: self.callback_wait_time_scene_change(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="매크로 실행 중 에러가 발생하면 최대",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.recovery_count_stringvar = tkinter.StringVar(frame)
combobox_list = []
for i in range(100, 10001):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.recovery_count_stringvar,
state='readonly',
justify=tkinter.RIGHT,
width=3,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="회 재실행 시킵니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_STRING_RECOVERY_COUNT in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT] = 999
frame.pack(anchor=tkinter.W)
self.recovery_count_stringvar.set(self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT])
self.recovery_count_stringvar.trace('w', lambda *args: self.callback_recovery_count_stringvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
label = ttk.Label(
master=frame,
text="APP 종료 행동을 ",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.close_app_stringvar = tkinter.StringVar(frame)
entry = ttk.Entry(
master=frame,
justify=tkinter.RIGHT,
textvariable=self.close_app_stringvar,
width=3
)
entry.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="회 실행 시킵니다",
justify=tkinter.LEFT
# fg='White' if brightness < 120 else 'Black',
# bg=bg_colour
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_STRING_CLOSE_APP_COUNT in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_COUNT] = 5
frame.pack(anchor=tkinter.W)
self.close_app_stringvar.set(self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_COUNT])
self.close_app_stringvar.trace('w', lambda *args: self.callback_close_app_stringvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.random_click_booleanvar = tkinter.BooleanVar(frame)
checkbutton = ttk.Checkbutton(
master=frame,
text="마우스 랜덤 좌표 클릭(오차 범위:",
variable=self.random_click_booleanvar,
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
self.random_click_pixel_stringvar = tkinter.StringVar(frame)
combobox_list = []
for i in range(1, 11):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.random_click_pixel_stringvar,
state='readonly',
width=2,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="픽셀)",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK + 'pixel' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK + 'pixel'] = 5
self.random_click_pixel_stringvar.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK + 'pixel'])
self.random_click_pixel_stringvar.trace('w', lambda *args: self.callback_random_click_pixel_stringvar(args))
if not lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK] = False
self.random_click_booleanvar.set(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK])
self.random_click_booleanvar.trace('w', lambda *args: self.callback_random_click_booleanvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.thumbnail_shortcut_booleanvar = tkinter.BooleanVar(frame)
checkbutton = ttk.Checkbutton(
master=frame,
text="썸네일 단축키로 닫기 활성화",
variable=self.thumbnail_shortcut_booleanvar,
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'shortcut' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'shortcut'] = True
self.thumbnail_shortcut_booleanvar.set(
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'shortcut'])
self.thumbnail_shortcut_booleanvar.trace('w', lambda *args: self.callback_thumbnail_shortcut_booleanvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.thumbnail_width_stringvar = tkinter.StringVar(frame)
label = ttk.Label(
master=frame,
text="썸네일 크기 - 가로 ",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
combobox_list = []
for i in range(10, 1921, 10):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.thumbnail_width_stringvar,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
self.thumbnail_height_stringvar = tkinter.StringVar(frame)
label = ttk.Label(
master=frame,
text=" 세로",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
combobox_list = []
for i in range(10, 1281, 10):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.thumbnail_height_stringvar,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'width' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'width'] = 320
self.thumbnail_width_stringvar.set(
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'width'])
self.thumbnail_width_stringvar.trace('w', lambda *args: self.callback_thumbnail_width_stringvar(args))
if not lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'height' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'height'] = 180
self.thumbnail_height_stringvar.set(
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'height'])
self.thumbnail_height_stringvar.trace('w', lambda *args: self.callback_thumbnail_height_stringvar(args))
# self.threshold_entry.place(
# x=lybconstant.LYB_LABEL_WIDTH + 5*lybconstant.LYB_PADDING,
# y=lybconstant.LYB_PADDING,
# width=2*lybconstant.LYB_LABEL_WIDTH - 8*lybconstant.LYB_PADDING,
# height=lybconstant.LYB_LABEL_HEIGHT
# )
# self.keyword_entry.insert(0, self.configure.keyword)
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.mouse_pointer_away_booleanvar = tkinter.BooleanVar(frame)
checkbutton = ttk.Checkbutton(
master=frame,
text="마우스 드래그 실행할 때 강제 커서 치우기(체크 해제시 오동작할 수 있음)",
variable=self.mouse_pointer_away_booleanvar,
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_BOOLEAN_MOUSE_POINTER + 'away' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_MOUSE_POINTER + 'away'] = True
self.mouse_pointer_away_booleanvar.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_MOUSE_POINTER + 'away'])
self.mouse_pointer_away_booleanvar.trace('w', lambda *args: self.callback_mouse_pointer_away_booleanvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.close_app_nox_new_booleanvar = tkinter.BooleanVar(frame)
checkbutton = ttk.Checkbutton(
master=frame,
text="녹스 최신 버전 사용 중(앱 종료 기능이 구버전과 다름)",
variable=self.close_app_nox_new_booleanvar,
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_STRING_CLOSE_APP_NOX_NEW in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_NOX_NEW] = True
self.close_app_nox_new_booleanvar.set(self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_NOX_NEW])
self.close_app_nox_new_booleanvar.trace('w', lambda *args: self.callback_close_app_nox_new_booleanvar(args))
frame = ttk.Frame(self.configure_frame, relief=frame_relief)
self.freezing_limit_stringvar = tkinter.StringVar(frame)
label = ttk.Label(
master=frame,
text="화면 프리징 감지 제한 시간(0: 사용 안함)",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
combobox_list = []
for i in range(0, 301):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.freezing_limit_stringvar,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text="초",
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
if not lybconstant.LYB_DO_STRING_RECOVERY_COUNT + 'freezing_limit' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT + 'freezing_limit'] = 30
self.freezing_limit_stringvar.set(
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT + 'freezing_limit'])
self.freezing_limit_stringvar.trace('w', lambda *args: self.callback_freezing_limit_stringvar(args))
self.configure_frame.pack(anchor=tkinter.NW, side=tkinter.LEFT, padx=5, pady=5)
label_frame = ttk.LabelFrame(master=self.common_top_frame, text="앱플레이어 재시작 설정")
frame = ttk.Frame(label_frame)
self.use_restart_app_player = tkinter.BooleanVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER] = False
self.use_restart_app_player.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER])
self.use_restart_app_player.trace('w',
lambda *args: self.callback_use_restart_app_player_booleanvar(args))
checkbutton = ttk.Checkbutton(
master=frame,
text="앱 플레이어(녹스, 모모) 재시작 기능을 사용합니다",
variable=self.use_restart_app_player,
onvalue=True,
offvalue=False
)
checkbutton.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(label_frame)
label = ttk.Label(
master=frame,
text="앱 플레이어 재시작 주기:",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.use_restart_app_player_period = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'period' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'period'] = 4800
self.use_restart_app_player_period.set(
str(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'period']))
self.use_restart_app_player_period.trace('w',
lambda *args: self.callback_use_restart_app_player_period_stringvar(
args))
combobox_list = []
for i in range(0, 86401, 5):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.use_restart_app_player_period,
state='readonly',
width=6,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text='초',
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(label_frame)
label = ttk.Label(
master=frame,
text="앱 플레이어 종료 후 재시작 대기 시간:",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.restart_app_player_delay = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'delay' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'delay'] = 10
self.restart_app_player_delay.set(
str(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'delay']))
self.restart_app_player_delay.trace('w', lambda *args: self.callback_restart_app_player_delay_stringvar(args))
combobox_list = []
for i in range(10, 3601, 5):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.restart_app_player_delay,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text='초',
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(label_frame)
label = ttk.Label(
master=frame,
text="앱 플레이어 종료 후 재시작 시도 횟수:",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.restart_app_player_retry = tkinter.StringVar(frame)
if not lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'retry' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'retry'] = 5
self.restart_app_player_retry.set(
str(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'retry']))
self.restart_app_player_retry.trace('w', lambda *args: self.callback_restart_app_player_retry_stringvar(args))
combobox_list = []
for i in range(1, 101):
combobox_list.append(i)
combobox = ttk.Combobox(
master=frame,
values=combobox_list,
textvariable=self.restart_app_player_retry,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text='회',
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
frame.pack(anchor=tkinter.W)
label_frame.pack(anchor=tkinter.NW, padx=5, pady=5)
self.common_top_frame.pack(anchor=tkinter.NW)
self.telegram_frame = ttk.LabelFrame(
master=self.option_dic['common_config_tab'],
text='텔레그램'
)
frame = ttk.Frame(self.telegram_frame)
if not lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery' in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery'] = True
self.recovery_telegram_checkbox = tkinter.BooleanVar()
self.recovery_telegram_checkbox.set(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery'])
self.recovery_telegram_checkbox.trace('w',
lambda *args: self.callback_common_telegram_notify_recovery(args))
check_box = ttk.Checkbutton(
master=frame,
text='프로그램 오류 발생 알림',
variable=self.recovery_telegram_checkbox,
onvalue=True,
offvalue=False
)
check_box.pack(anchor=tkinter.W)
frame.pack(anchor=tkinter.W)
frame = ttk.Frame(self.telegram_frame)
label = ttk.Label(
master=frame,
text="텔레그램 메세지 수신 확인 주기:",
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
self.period_telegram_entry = tkinter.StringVar(frame)
combobox = ttk.Combobox(
master=frame,
values=[5, 10, 30, 99999],
textvariable=self.period_telegram_entry,
state='readonly',
width=5,
font=lybconstant.LYB_FONT
)
combobox.pack(side=tkinter.LEFT)
label = ttk.Label(
master=frame,
text='초',
anchor=tkinter.W,
justify=tkinter.LEFT
)
label.pack(side=tkinter.LEFT)
if not lybconstant.LYB_DO_STRING_PERIOD_TELEGRAM in self.configure.common_config:
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_TELEGRAM] = 10
frame.pack(anchor=tkinter.W)
self.period_telegram_entry.set(str(self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_TELEGRAM]))
self.period_telegram_entry.trace('w', lambda *args: self.callback_period_telegram_entry(args))
self.telegram_frame.pack(anchor=tkinter.W, padx=5, pady=5)
self.failover_frame = ttk.LabelFrame(
master=self.option_dic['common_config_tab'],
text='비정상 복구'
)
self.failover_frame.pack(anchor=tkinter.W, padx=5, pady=5)
for i in range(len(self.games)):
self.game_options[self.games[i]] = {}
self.game_frame[self.games[i]] = {}
self.tab_frame.append(ttk.Frame(self.note,
width=self.width - lybconstant.LYB_PADDING,
height=self.height - lybconstant.LYB_PADDING,
relief='groove'
))
self.note.add(self.tab_frame[i + 1], text=self.games[i])
# self.configure.common_config[self.games[0]] = {}
# self.configure.common_config[self.games[0]]['work_list'] = []
# for each_work in LYBLIN2REV.LYBLineage2Revolution.work_list:
# self.game_options[self.games[0]]['work_list_listbox'].insert('end', each_work)
# self.configure.common_config[self.games[0]]['work_list'].append(each_work)
self.game_tab_dic = {
}
# 다크에덴M
game_index = 0
lyb_game_tab = LYBROHAN.LYBRohanTab(
self.tab_frame[game_index + 1],
self.configure,
self.game_options[self.games[game_index]],
self.game_frame[self.games[game_index]],
self.width,
self.height
)
self.game_tab_dic[lybconstant.LYB_GAME_ROHAN] = lyb_game_tab
# # 헌드레드 소울
# lybhttp = self.login()
# base_point = lybhttp.get_elem('hundredsoul_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
#
# if int(self.get_mb_point()) >= base_point:
# game_index += 1
# lyb_game_tab = LYBHUNDREDSOUL.LYBHundredSoulTab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_HUNDREDSOUL] = lyb_game_tab
# # 테라M
#
# game_index = 0
# lyb_l2r_tab = LYBTERA.LYBTeraTab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_TERA] = lyb_l2r_tab
#
# # 검은 사막M
# lybhttp = self.login()
# base_point = lybhttp.get_elem('blackdesert_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
# if int(self.get_mb_point()) >= base_point:
# game_index += 1
# lyb_game_tab = LYBBLACKDESERT.LYBBlackDesertTab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_BLACKDESERT] = lyb_game_tab
#
# # 블레이드2
# lybhttp = self.login()
# base_point = lybhttp.get_elem('blade2_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
# if int(self.get_mb_point()) >= base_point:
# game_index += 1
# lyb_game_tab = LYBBLADE2.LYBBlade2Tab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_BLADE2] = lyb_game_tab
#
# # 이카루스
# lybhttp = self.login()
# base_point = lybhttp.get_elem('icarus_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
# if int(self.get_mb_point()) >= base_point:
# game_index += 1
# lyb_game_tab = LYBICARUS.LYBIcarusTab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_ICARUS] = lyb_game_tab
#
# # 탈리온
# lybhttp = self.login()
# base_point = lybhttp.get_elem('talion_point')
# if base_point == None:
# base_point = 0
# else:
# base_point = int(base_point)
# if int(self.get_mb_point()) >= base_point:
# game_index += 1
# lyb_game_tab = LYBTALION.LYBTalionTab(
# self.tab_frame[game_index + 1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_tab_dic[lybconstant.LYB_GAME_TALION] = lyb_game_tab
# game_index = 1
# lyb_l2r_tab = LYBLIN2REV.LYBLineage2RevolutionTab(
# self.tab_frame[game_index+1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# 클랜즈: 달의 그림자
# game_index += 1
# lyb_clans_tab = LYBCLANS.LYBClansTab(
# self.tab_frame[game_index+1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# 열혈강호M
# game_index += 1
# lyb_yeolhyul_tab = LYBYEOLHYUL.LYBYeolhyulTab(
# self.tab_frame[game_index+1],
# self.configure,
# self.game_options[self.games[game_index]],
# self.game_frame[self.games[game_index]],
# self.width,
# self.height
# )
# self.game_options[self.games[0]]['window_list_option_menu'] = tkinter.OptionMenu(
# self.game_frame[self.games[0]]['window_list'],
# self.game_options[self.games[0]]['window_list_stringvar'],
# ''
# )
# self.game_options[self.games[0]]['window_list_stringvar'].trace('w', lambda *args: self.select_window_list(args, game_name=self.games[0]))
# self.game_options[self.games[0]]['window_list_option_menu'].configure(width=18)
# self.game_options[self.games[0]]['window_list_option_menu'].pack(side=tkinter.TOP)
# self.game_frame[self.games[0]]['window_list'].place(
# x = 6*lybconstant.LYB_PADDING + self.width*0.5,
# y = 2*lybconstant.LYB_PADDING,
# width = self.width*0.5 - 10*lybconstant.LYB_PADDING,
# height = lybconstant.LYB_BUTTON_HEIGHT
# )
# -- 워크 리스트 라벨
# w_name = 'work_list_label'
# self.game_frame[self.games[0]][w_name] = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# self.game_options[self.games[0]][w_name] = ttk.Label(
# master = self.game_frame[self.games[0]][w_name] ,
# text = lybconstant.LYB_LABEL_AVAILABLE_WORK_LIST,
# relief = 'flat',
# )
# self.game_options[self.games[0]][w_name].pack(side=tkinter.TOP)
# self.game_frame[self.games[0]][w_name].place(
# x = 2*lybconstant.LYB_PADDING,
# y = lybconstant.LYB_BUTTON_HEIGHT + 4*lybconstant.LYB_PADDING,
# width = self.width*0.5 - 10*lybconstant.LYB_PADDING,
# height = lybconstant.LYB_BUTTON_HEIGHT
# )
# -- 스케쥴 리스트 라벨
# w_name = 'schedule_list_label'
# self.game_frame[self.games[0]][w_name] = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# self.game_options[self.games[0]][w_name] = ttk.Label(
# master = self.game_frame[self.games[0]][w_name] ,
# text = lybconstant.LYB_LABEL_SCHEDULE_WORK_LIST,
# relief = 'flat',
# )
# self.game_options[self.games[0]][w_name].pack(side=tkinter.TOP)
# self.game_frame[self.games[0]][w_name].place(
# x = 6*lybconstant.LYB_PADDING + self.width*0.5,
# y = lybconstant.LYB_BUTTON_HEIGHT + 4*lybconstant.LYB_PADDING,
# width = self.width*0.5 - 10*lybconstant.LYB_PADDING,
# height = lybconstant.LYB_BUTTON_HEIGHT
# )
# -- 작업 목록
# self.game_frame[self.games[0]]['work_list'] = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# self.game_options[self.games[0]]['work_list_listbox'] = tkinter.Listbox(
# master = self.game_frame[self.games[0]]['work_list'],
# font = ("돋움", 10),
# activestyle = 'none'
# )
# self.game_options[self.games[0]]['work_list_listbox'].pack(side=tkinter.TOP)
# self.game_options[self.games[0]]['work_list_listbox'].bind(
# '<<ListboxSelect>>',
# lambda event: self.select_work_list(event, game_name=self.games[0])
# )
# self.game_frame[self.games[0]]['work_list'].place(
# x = 2 * lybconstant.LYB_PADDING,
# y = 2 * lybconstant.LYB_BUTTON_HEIGHT + 4 * lybconstant.LYB_PADDING,
# width = self.width*0.5 - 10*lybconstant.LYB_PADDING,
# height = self.height*0.4
# )
# -- 스케쥴 목록
# self.game_frame[self.games[0]]['schedule_list'] = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# self.game_options[self.games[0]]['schedule_list_listbox'] = tkinter.Listbox(
# master = self.game_frame[self.games[0]]['schedule_list'],
# font = ("돋움", 10),
# activestyle = 'none'
# )
# self.game_options[self.games[0]]['schedule_list_listbox'].pack(side=tkinter.TOP)
# self.game_options[self.games[0]]['schedule_list_listbox'].bind(
# '<<ListboxSelect>>',
# lambda event: self.select_schedule_list(event, game_name=self.games[0])
# )
# self.game_frame[self.games[0]]['schedule_list'].place(
# x = 6 * lybconstant.LYB_PADDING + self.width * 0.5,
# y = 2 * lybconstant.LYB_BUTTON_HEIGHT + 4 * lybconstant.LYB_PADDING,
# width = self.width * 0.5 - 10*lybconstant.LYB_PADDING,
# height = self.height * 0.4
# )
# self.configure.common_config[self.games[0]]['schedule_list'] = copy.deepcopy(self.configure.common_config[self.games[0]]['work_list'])
# for each_work in self.configure.common_config[self.games[0]]['schedule_list']:
# self.game_options[self.games[0]]['schedule_list_listbox'].insert('end', each_work)
# self.game_frame[self.games[0]]['options'] = ttk.Frame(self.tab_frame[-1], relief=frame_relief)
# self.game_frame[self.games[0]]['left_option'] = ttk.Frame(
# master = self.game_frame[self.games[0]]['options'],
# relief = frame_relief
# )
# self.game_frame[self.games[0]]['left_option'].place(
# x = 2 * lybconstant.LYB_PADDING,
# y = 2 * lybconstant.LYB_PADDING,
# width = self.width * 0.5 - 8 * lybconstant.LYB_PADDING,
# height = self.height * 0.4 - 4 * lybconstant.LYB_PADDING
# )
# self.game_frame[self.games[0]]['right_option'] = ttk.Frame(
# master = self.game_frame[self.games[0]]['options'],
# relief = frame_relief
# )
# self.game_frame[self.games[0]]['right_option'].place(
# x = self.width * 0.5,
# y = 2 * lybconstant.LYB_PADDING,
# width = self.width * 0.5 - 8 * lybconstant.LYB_PADDING,
# height = self.height * 0.4 - 4 * lybconstant.LYB_PADDING
# )
# self.game_frame[self.games[0]]['options'].place(
# x = 2 * lybconstant.LYB_PADDING,
# y = self.height * 0.5,
# width = self.width - 6 * lybconstant.LYB_PADDING,
# height = self.height * 0.4 + 10 * lybconstant.LYB_PADDING
# )
self.note.pack()
# self.master.bind('<Return>', lambda event, a=0:
self.master.bind('<Return>', self.searchWindow)
# self.master.bind('<F1>', self.startWorker)
# self.master.bind('<F2>', self.pauseWorker)
# self.master.bind('<F3>', self.terminateWorker)
# -----------------------------------------------------
# Thread variable
# -----------------------------------------------------
self.workers = []
self.worker_dic = {}
self.recovery_count_dic = {}
self.search_worker = True
self.start_worker = True
self.hwnds = {}
self.side_hwnds = {}
self.parent_hwnds = {}
self.multi_hwnds = {}
self.start_flag = 0
# self.ready_to_start = False
self.logger.critical('Successfully initialized')
self.startLongPollingWorker()
self.manage_workers()
def manage_workers(self):
# self.information.insert("end", time.ctime() + "\n")
# self.information.see("end")
# print('[DEBUG] REMOVE ME:', self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG].get())
if self.search_worker == True:
self.searchWindow(None)
self.search_worker = False
if len(self.ready_to_start_queue) > 0:
for elem in self.ready_to_start_queue:
self.start_each_worker(elem)
self.ready_to_start_queue = []
self.workers = [worker for worker in self.workers if worker.isAlive()]
# print('Thread count:', threading.activeCount())
for worker in self.workers:
while True:
try:
response_message = worker.response_queue.get_nowait()
# if worker.response_queue.qsize() < 1:
# print('task done')
worker.response_queue.task_done()
self.process_message(worker, response_message)
except queue.Empty:
break
except:
self.logger.error(traceback.format_exc())
self.update_information()
# self.update_server_information()
try:
self.update_monitor_master()
except:
self.logger.error(traceback.format_exc())
self.update_restart_app_player()
currentHour = int(datetime.datetime.today().hour)
if self.first_for_ads == True:
self.check_ads()
self.first_for_ads = False
else:
if currentHour >= 9 and currentHour < 24 and len(self.workers) > 0:
if self.check_ads() == False:
self.terminateWorker(None)
rest = self.login()
chat_id = rest.get_chatid()
rest.send_telegram_message(chat_id, '※ 광고가 팝업되면서 프로그램이 중지되었습니다.')
# self.update_telegram()
# if self.ready_to_start == True and self.search_flag == True:
# self.startWorker(None)
try:
period_update_ui = float(self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI]) * 1000
except:
period_update_ui = 1000
# print(period_update_ui)
self.master.after(int(period_update_ui), self.manage_workers)
def check_ads(self):
return True
rest = self.login()
ads_interval = int(rest.get_elem('ads_interval'))
# 한 시간에 한 번만 뜨게, 10초 이상 광고를 보지 않았다면 계속 뜰 것이다!!
elapsedTimeAdsClicked = time.time() - self.timeClickedAds
if elapsedTimeAdsClicked > ads_interval:
ads_file_path = resource_path('dogfooterads.exe')
if os.path.isfile(ads_file_path) == False:
ads_file_path = resource_path('dist/dogfooterbot/dogfooterads.exe')
if os.path.isfile(ads_file_path) == False:
self.logger.error('dogfooterads.exe 파일 없음')
return False
if likeyoubot_license.LYBLicense().is_done_ads_info() == False:
cmd = [
resource_path(ads_file_path),
"dogfooter"
]
p = Popen(cmd)
self.logger.critical('하루에 한 번만 하시면 됩니다')
self.timeClickedAds = time.time()
return False
self.timeClickedAds = time.time()
return True
def update_restart_app_player(self):
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER] == False:
return
if (not 'restart_app_player_status' in self.option_dic or
self.option_dic['restart_app_player_status'] == False):
self.option_dic['restart_app_player_status'] = False
if self.option_dic['restart_app_player_status'] == False:
return
try:
# self.logger.warn('self.stop_app_player_list:' + str(self.stop_app_player_list))
game_object = self.stop_app_player_list.pop(0)
restart_info = {}
restart_info['player_type'] = copy.deepcopy(game_object.player_type)
restart_info['multi_hwnd_dic'] = copy.deepcopy(game_object.multi_hwnd_dic)
restart_info['window_title'] = copy.deepcopy(game_object.window_title)
self.restart_app_player_list.append(restart_info)
window_name = game_object.window_title
self.logger.warn(window_name + str(' 종료 시도'))
if window_name in self.game_object:
# self.logger.warn(window_name + str(' before process_restart_app_player'))
game_object.process_restart_app_player()
# self.logger.warn(window_name + str(' after process_restart_app_player'))
self.restart_app_player_count += 1
self.terminate_each_worker(window_name)
self.game_object.pop(window_name)
# self.logger.warn('DEBUG1: ' + str(self.game_object))
self.option_dic['restart_app_player_delay'] = time.time()
self.option_dic['restart_app_player_retry'] = 0
return
except:
pass
# self.logger.warn('모든 윈도우 종료함')
period_restart = int(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'period'])
delay_restart = int(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'delay'])
retry_restart = int(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'retry'])
# self.logger.warn('DEBUG99: [' + str(self.restart_app_player_count) + ':' +str(len(self.game_object)) + ']')
# self.logger.warn(self.game_object)
# self.logger.warn('DEBUG35 restart_app_player_search: ' + str(self.restart_app_player_search))
if self.restart_app_player_count <= len(self.game_object):
self.restart_app_player_count = 0
self.option_dic['restart_app_player_status'] = False
return
self.logger.warn(
'재시작 지연 시간: ' + str(int(time.time() - self.option_dic['restart_app_player_delay'])) + ' / ' + str(
delay_restart) + ' 초')
# self.logger.warn(self.option_dic['restart_app_player_retry'])
# 종료된 앱플레이어들이 전부 다시 서치됐는가?
if self.restart_app_player_search == True:
self.searchWindow(None)
if self.restart_app_player_count <= len(self.app_player_process_list['values']):
self.startWorker(None)
self.restart_app_player_search = False
return
# 앱플레이어를 재실행하는 로직(대기시간이 지났는가?)
if time.time() - self.option_dic['restart_app_player_delay'] < delay_restart:
return
# 앱플레이어를 재실행하라고 워커에게 전달(한 개씩 한개씩)
# self.logger.warn('DEBUG55 restart_app_player_list:' + str(self.restart_app_player_list))
if len(self.restart_app_player_list) > 0:
worker_thread = self.executeThread()
if worker_thread == None:
return
elem = self.restart_app_player_list.pop(0)
message = []
message.append(elem['player_type'])
message.append(elem['multi_hwnd_dic'])
message.append(elem['window_title'])
message.append(self.configure)
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('start_app_player', message))
# self.logger.warn('DEBUG12: ' + str(message))
return
self.option_dic['restart_app_player_delay'] = time.time()
self.option_dic['restart_app_player_retry'] += 1
self.search_worker = True
self.ready_to_search_queue.append('__all__')
def check_ip(self):
rest = self.login()
if time.time() - self.last_check_ip < int(rest.get_elem('ip_check_period')):
return
game_count_sub_title = ""
for each_game in self.games:
game_count_on_playing = int(rest.getGameCountOnPlaying(each_game))
if game_count_on_playing > 0:
game_count_sub_title += " " + each_game[0] + "(" + str(game_count_on_playing) + ")"
self.master.title(self.configure.window_title + ' ' + str(
lybconstant.LYB_VERSION) + ' ' + rest.getConnectCount() + game_count_sub_title)
self.last_check_ip = time.time()
base_point = rest.get_elem('ip_free_point')
# 로그인 요청 보내려고
self.mb_point = None
if int(self.get_mb_point()) < int(base_point):
if rest.is_ip_free() == False:
self.logger.error('프로그램이 다른 컴퓨터에서 사용 중입니다.')
# '(포인트 ' + str(self.get_mb_point()) + '점 필요)')
self.terminateWorker(None)
def update_telegram(self):
if time.time() - self.last_check_telegram < int(
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_TELEGRAM]):
# if time.time() - self.last_check_telegram < 1:
return
self.last_check_telegram = time.time()
rest = self.login()
chat_id = rest.get_chatid()
if chat_id is None or len(str(chat_id)) == 0:
return
update = rest.getTelegramUpdates(chat_id)
if update == None:
return
command = update.message.text
self.logger.debug('command: ' + str(command))
self.process_command(command)
def update_server_information(self):
if time.time() - self.last_check_server < 60:
return
self.last_check_server = time.time()
game_count_sub_title = ""
for each_game in self.games:
game_count_on_playing = int(self.rest.getGameCountOnPlaying(each_game))
if game_count_on_playing > 0:
game_count_sub_title += " " + each_game[0] + "(" + str(game_count_on_playing) + ")"
self.master.title(self.configure.window_title + ' ' + str(
lybconstant.LYB_VERSION) + ' ' + self.rest.getConnectCount() + game_count_sub_title)
def update_information(self):
fp = self.log_fp
defense_limit = 0
while True:
line = fp.readline()
try:
last_pos = fp.tell()
except UnicodeDecodeError:
continue
except:
self.logger.error(traceback.format_exc())
break
if line == '':
break
fp.seek(last_pos)
line_split = line.split()
if len(line_split) > 3:
debug_level = line.split()[1]
else:
debug_level = 'D'
if debug_level == 'C':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'] == False:
continue
elif debug_level == 'E':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'] == False:
continue
elif debug_level == 'W':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'] == False:
continue
elif debug_level == 'I':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'] == False:
continue
elif debug_level == 'D':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'] == False:
continue
else:
debug_level = 'E'
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'] == False:
continue
if len(self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER]) > 0:
if not self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER] in line:
continue
self.information.insert("end", line.split('\n')[0].split('FileInfo')[0] + '\n', debug_level)
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'] == False:
self.information.see('end')
defense_limit += 1
if defense_limit > 1000:
break
def process_command(self, command):
message_to_return = None
if command == None or command[0] != '/':
self.logger.error('올바르지 않은 형식의 명령: ' + str(command))
else:
rest = self.login()
base_point = rest.get_elem('telegram_point')
if base_point == None:
base_point = 0
else:
base_point = int(base_point)
if int(self.get_mb_point()) < base_point:
message_to_return = '텔레그램 원격 제어 기능은 ' + str(base_point) + \
' 포인트 이상 회원들만 사용 가능합니다. 현재 포인트: ' + str(self.get_mb_point())
else:
if command.upper() == '/SEARCH' or command.upper() == '/SEA':
self.searchWindow(None)
message_to_return = '매크로 검색 완료'
elif command.upper() == '/START' or command.upper() == '/STA':
self.startWorker(None)
message_to_return = '매크로 시작 완료'
elif command.upper() == '/STOP' or command.upper() == '/STO':
self.terminateWorker(None)
message_to_return = '매크로 정지 완료'
elif command.upper() == '/SS':
self.send_screenshot_telegram()
message_to_return = '전체 화면 스크린샷 전송 완료'
if message_to_return == None:
message_to_return = \
'지원하지 않는 명령: ' + str(command) + '\n\n' + \
'도그푸터 지원 명령어 목록:\n\n' + \
'/SEA(RCH): 검색\n' + \
'/STA(RT): 시작\n' + \
'/STO(P): 정지\n' + \
'/SS: 전체 화면 스크린샷 전송\n\n' \
'※ 짧은 시간에 너무 많은 명령어를 입력하면 매크로가 중지될 수 있습니다.\n' \
'※ 명령어 입력 후 응답까지 1 ~ 20초가 걸립니다. 응답이 올 때까지 기다리세요.\n' \
'※ 명령어 입력 후 응답이 오지 않는다면 전송에 실패 한 것입니다. 다시 입력하세요.\n'
rest = self.login()
chat_id = rest.get_chatid()
rest.send_telegram_message(chat_id, message_to_return)
def process_message(self, worker, message):
if message.type == 'end_return':
self.logging_message('INFO', message.message + " 작업 종료")
if message.message != None and len(message.message) > 0 and not message.message in self.worker_dic:
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery'] == True:
rest = self.login()
chat_id = rest.get_chatid()
rest.send_telegram_message(chat_id, '매크로 재실행됨 - ' + str(message.message))
max_recovery_count = self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT]
for window_name, worker_thread in self.worker_dic.items():
if worker == worker_thread:
if not window_name in self.recovery_count_dic:
self.recovery_count_dic[window_name] = 0
self.logger.warn('매크로 재실행 횟수 - ' + str(self.recovery_count_dic[window_name]) + ' / ' + str(
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT]) + ' 회')
if self.recovery_count_dic[window_name] < max_recovery_count:
self.update_monitor_master()
self.logging_message('INFO',
"[" + window_name + "] 에서 에러 감지됨. 재실행 합니다." +
str(self.recovery_count_dic[window_name] + 1) + '/' + str(
max_recovery_count))
self.search_worker = True
self.start_worker = True
self.ready_to_search_queue.append(window_name)
self.recovery_count_dic[window_name] += 1
break
else:
self.recovery_count_dic[window_name] = 0
elif message.type == 'search_hwnd_return':
self.hwnds = copy.deepcopy(message.message)
elif message.type == 'search_side_hwnd_return':
self.side_hwnds = copy.deepcopy(message.message)
elif message.type == 'search_parent_hwnd_return':
self.parent_hwnds = copy.deepcopy(message.message)
elif message.type == 'search_multi_hwnd_return':
self.multi_hwnds = copy.deepcopy(message.message)
elif message.type == 'search_title_return':
# self.search_window.delete(0, 'end')
new_app_player_list = []
if len(message.message) > 0:
for each_title in message.message:
new_app_player_list.append(each_title)
if self.start_worker == True:
for elem in self.ready_to_search_queue:
if elem == '__all__' or elem == each_title:
self.ready_to_start_queue.append(each_title)
self.logger.critical(str(each_title) + " 검색됨")
# self.search_window.insert('end', each_title)
# self.search_window.select_set('end')
self.ready_to_search_queue = []
self.start_worker = False
self.app_player_process_list['values'] = new_app_player_list
self.app_player_process.set(new_app_player_list[0])
# self.selectedWindowList(None)
self.search_flag = True
else:
self.app_player_process_list['values'] = []
self.app_player_process.set('')
if len(self.configure.keyword) > 0:
self.logging_message("FAIL", "[" + self.configure.keyword + "]" + " 단어가 포함된 창 검색 실패")
self.logging_message("FAIL", "창 사이즈(800x450), 창 최소화 상태인지 확인")
self.refresh_window_game()
elif message.type == 'log':
self.logging_message(None, message.message)
elif message.type.upper() == 'GOOD':
self.logging_message("GOOD", message.message)
elif message.type.upper() == 'BAD':
self.logging_message("BAD", message.message)
elif message.type.upper() == 'NICE':
self.logging_message("NICE", message.message)
elif message.type.upper() == 'SUB':
self.logging_message("SUB", message.message)
elif message.type.upper() == 'INFO':
self.logging_message("INFO", message.message)
elif message.type == 'error':
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery'] == True:
rest = self.login()
chat_id = rest.get_chatid()
rest.send_telegram_message(chat_id, '[오류 발생] ' + message.message)
self.logging_message("FAIL", message.message)
elif message.type == 'game_object':
game = message.message
self.game_object[game.window_title] = game
elif message.type == 'stop_app':
game = message.message
self.option_dic['restart_app_player_status'] = True
self.stop_app_player_list.append(game)
# self.terminateWorker(None)
elif message.type == 'end_start_app_player':
if len(self.restart_app_player_list) == 0:
self.restart_app_player_search = True
def logging_message(self, tag, logging_message):
if tag == 'GOOD' or tag == 'NICE' or tag == 'SUB':
self.logger.info(logging_message)
elif tag == 'BAD' or tag == 'FAIL':
self.logger.warn(logging_message)
else:
self.logger.debug(logging_message)
# if len(self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER]) > 0:
# if not self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER] in logging_message:
# return
# if int(self.information.index('end').split('.')[0]) > 1000000:
# self.information.delete(1.0, tkinter.END)
# self.information.insert("end", "[" + time.strftime("%H:%M:%S") + "] ", tag)
# self.information.insert("end", logging_message + "\n", tag)
# self.information.see("end")
def executeThread(self, is_system=False):
# if self.configure.common_config['security_code'] != lybconstant.LYB_SECURITY_CODE:
# self.logging_message('FAIL', '실행 인증 코드 [' + self.configure.common_config['security_code'] + '] 거부됨')
# self.security_authority = False
# return None
# else:
# if self.security_authority == False:
# self.logging_message('SUCCESS', '실행 인증 코드 [' + self.configure.common_config['security_code'] + '] 승인됨')
# self.security_authority = True
# license_limit = lybconstant.LYB_LICENSE_LIMIT - time.time()
# 20180210
# try:
# license_limit = likeyoubot_license.LYBLicense().read_license()
# if license_limit > 0:
# self.logging_message('SUCCESS', str(lybconstant.LYB_VERSION) + ' 라이센스가 ' +
# str(int(license_limit/(24*60*60))) + '일 ' + str(int((license_limit/(60*60))%24)) + '시간 ' + str(int((license_limit/60)%60)) + '분 후에 종료됩니다.')
# else:
# self.logging_message('FAIL', str(lybconstant.LYB_VERSION) + ' 라이센스가 종료 되었습니다. www.dogfooter.com 사이트에서 무료로 새버전을 다운로드 받으세요.')
# return None
# except:
# self.logging_message('FAIL', str(lybconstant.LYB_VERSION) + ' 라이센스 정보를 찾을 수 없습니다. 라이센스 정보를 www.dogfooter.com 에서 확인하세요.')
# return None
worker_thread = likeyoubot_worker.LYBWorker('Thread-' + str(self.start_flag), self.configure, queue.Queue(),
queue.Queue())
worker_thread.daemon = True
worker_thread.start()
if is_system == False:
self.workers.append(worker_thread)
return worker_thread
def startWorker(self, e):
# if self.ready_to_start == False:
# self.search_flag = False
# self.ready_to_start = True
# self.searchWindow(None)
# else:
# for i in range(self.search_window.size()):
# if not self.search_window.get(i) in self.configure.window_config:
# self.configure.window_config[self.search_window.get(i)] = copy.deepcopy(self.configure.common_config)
# for each_config, each_value in self.configure.common_config.items():
# if not each_config in self.configure.window_config[self.search_window.get(i)]:
# self.configure.window_config[self.search_window.get(i)][each_config] = self.configure.common_config[each_config]
for each_app_player in self.app_player_process_list['values']:
if not each_app_player in self.configure.window_config:
self.configure.window_config[each_app_player] = copy.deepcopy(self.configure.common_config)
for each_config, each_value in self.configure.common_config.items():
if not each_config in self.configure.window_config[each_app_player]:
self.configure.window_config[each_app_player][each_config] = self.configure.common_config[
each_config]
self.start_each_worker(each_app_player)
# items = map(int, self.search_window.curselection())
# count = 0
# for item in items:
# #self.configure.common_config['threshold_entry'] = float(int(self.threshold_entry.get()) / 100)
# # if float(self.pixel_tolerance_entry.get()) >= 50.0:
# # self.pixel_tolerance_entry.delete(0, 'end')
# # self.pixel_tolerance_entry.insert(0, '50.0')
# # elif float(self.pixel_tolerance_entry.get()) <= 0.0:
# # self.pixel_tolerance_entry.delete(0, 'end')
# # self.pixel_tolerance_entry.insert(0, '0.0')
# # self.configure.common_config['pixel_tolerance_entry'] = self.pixel_tolerance_entry.get()
# # if int(self.wakeup_period_entry.get()) <= 0:
# # self.wakeup_period_entry.set('1')
# # self.configure.common_config['wakeup_period_entry'] = self.wakeup_period_entry.get()
# # if float(self.wait_time_scene_change.get()) <= 0.0:
# # self.wait_time_scene_change.delete(0, 'end')
# # self.wait_time_scene_change.insert(0, '0')
# # self.configure.common_config[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE] = self.wait_time_scene_change.get()
# self.start_each_worker(self.search_window.get(item))
# count += 1
# if count == 0:
# self.logging_message('FAIL', '작업을 수행할 창이 선택되지 않았습니다.' )
# # self.ready_to_start = False
# return
# self.start_button.configure(stat='disabled')
# self.search_button.configure(stat='disabled')
# self.keyword_entry.configure(stat='disabled')
# for i in range(len(self.tab_frame)):
# if i !=0:
# self.note.tab(i, stat='disabled')
# self.ready_to_start = False
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def start_each_worker(self, window_name):
if self.check_ads() == False:
return
if window_name in self.worker_dic:
self.logger.debug('start: already started ' + window_name + ' ' + str(self.worker_dic))
self.logging_message('INFO', window_name + ' 이미 실행 중입니다.')
return
try:
each_hwnd = self.hwnds[window_name]
except:
self.logging_message('FAIL', '싱크 오류 발생!! 창을 검색한 후 다시 시작해주세요.')
return
self.configure.common_config['threshold_entry'] = float(int(self.threshold_entry.get()) / 100)
started_window_name = window_name
started_game_name = self.configure.get_window_config(started_window_name, 'games')
if started_game_name in self.configure.window_config[started_window_name]:
started_option = self.configure.get_window_config(started_window_name, started_game_name)
else:
started_option = self.configure.common_config[started_game_name]
started_config = self.configure
started_window_config = self.configure.window_config[started_window_name]
# if 'schedule_list' in self.configure.window_config[started_window_name]:
# started_option = self.configure.window_config[started_window_name]['schedule_list']
worker_thread = self.executeThread()
if worker_thread == None:
return
self.worker_dic[started_window_name] = worker_thread
side_window_handle = None
if each_hwnd in self.side_hwnds:
side_window_handle = self.side_hwnds[each_hwnd]
parent_window_handle = None
if each_hwnd in self.parent_hwnds:
parent_window_handle = self.parent_hwnds[each_hwnd]
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('start',
[
self.start_flag,
each_hwnd,
started_window_name,
started_game_name,
started_option,
started_config,
started_window_config,
side_window_handle,
parent_window_handle,
self.multi_hwnds,
self.game_tab_dic[started_game_name],
]
)
)
self.logging_message('INFO', window_name + ' 작업 시작')
rest = self.login()
error_message = rest.login()
self.last_check_server = 0
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def pause_each_worker(self, window_name):
if not window_name in self.worker_dic:
self.logger.debug('Not found worker ' + window_name + ' ' + str(self.worker_dic))
return
worker = self.worker_dic[window_name]
if worker.isAlive():
worker.command_queue.put_nowait(likeyoubot_message.LYBMessage('pause', None))
def terminate_each_worker(self, window_name):
if not window_name in self.worker_dic:
self.logger.debug('DEBUG terminate: Not found worker ' + window_name + ' ' + str(self.worker_dic))
return
worker = self.worker_dic[window_name]
if worker.isAlive():
worker.command_queue.put_nowait(likeyoubot_message.LYBMessage('end', None))
def pauseWorker(self, e):
if len(self.workers) < 1:
return
if self.pause_button['text'] == '일시정지':
self.pause_button.configure(text='다시시작')
else:
self.pause_button.configure(text='일시정지')
for worker in self.workers:
worker.command_queue.put_nowait(likeyoubot_message.LYBMessage('pause', None))
def terminateWorker(self, e):
for worker in self.workers:
worker.command_queue.put_nowait(likeyoubot_message.LYBMessage('end', None))
# self.start_button.configure(stat='normal')
# self.search_button.configure(stat='normal')
# self.keyword_entry.configure(stat='normal')
if self.pause_button['text'] != '일시정지':
self.pause_button.configure(text='일시정지')
# for i in range(len(self.tab_frame)):
# if i !=0:
# self.note.tab(i, stat='normal')
def startLongPollingWorker(self):
worker_thread = self.executeThread(is_system=True)
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('longPolling', self))
def getWindowLocation(self, e):
worker_thread = self.executeThread()
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('GetWindowLocation', self))
def searchWindow(self, e):
self.configure.keyword = self.keyword_entry.get()
self.master.focus()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
worker_thread = self.executeThread()
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('search', self.configure.window_config))
def callback_download_lybcfg(self, e):
dropbox_access_token = self.rest.get_elem('dropbox_access_token')
file_name = "lyb.cfg.merge"
try:
os.remove(resource_path(file_name + '.bak'))
except FileNotFoundError:
pass
except:
self.logger.error(traceback.format_exc())
try:
shutil.move(resource_path(file_name), resource_path(file_name + '.bak'))
except FileNotFoundError:
pass
except:
self.logger.error(traceback.format_exc())
self.logger.debug('New file: ' + file_name)
self.logger.debug(file_name)
lybcfg_information = self.rest.get_elem('lybcfg')
self.logger.debug('TEST: ' + str(lybcfg_information))
path = resource_path(file_name)
try:
response = requests.get(lybcfg_information, allow_redirects=True)
CHUNK_SIZE = 1024
size = 0
with open(path, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
size += CHUNK_SIZE
self.logger.critical("개발자가 사용 중인 설정 파일 다운로드 완료(파일 이름: lyb.cfg.merge)")
self.logger.critical("프로그램을 재실행하면 반영됩니다")
except:
self.logger.error(traceback.format_exc())
shutil.move(resource_path(file_name + '.bak'), resource_path(file_name))
return
try:
os.remove(resource_path(file_name + '.bak'))
except:
self.logger.debug('This is exe file: skip')
def callback_fork_dogfootermacro(self, e):
file_path = resource_path('dogfootermacro.exe')
if os.path.isfile(file_path) == False:
cmd = [
'python',
'dogfootermacro.py',
"dogfooter"
]
for each_game in self.games:
cmd.append(each_game)
p = Popen(cmd)
return
self.logger.debug(file_path)
cmd = [
resource_path(file_path),
"dogfooter"
]
for each_game in self.games:
cmd.append(each_game)
p = Popen(cmd)
def callback_fix_window_location_number_stringvar(self, args):
self.set_config(lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number')
def callback_fix_window_location_x_stringvar(self, args):
self.set_config(lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x')
def callback_fix_window_location_y_stringvar(self, args):
self.set_config(lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y')
def callback_fix_window_location_booleanvar(self, args):
self.set_config(lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean')
def callback_inactive_mode_flag_stringvar(self, args):
self.set_config(lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG)
def callback_use_inactive_mode_booleanvar(self):
self.set_config(lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE)
def callback_common_telegram_notify_recovery(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_COMMON_TELEGRAM_NOTIFY + 'recovery'] = self.recovery_telegram_checkbox.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def toggleCommonCheckBox(self, value):
self.set_config(value)
def toggle_debug_checkbox(self, value):
self.set_config(value)
# def selectedWindowList(self, event):
# # print(self.configure.window_config)
# # print(self.configure.common_config)
# # self.search_window.selection_clear(self.search_window.size() - 1)
# # self.search_window.selection_clear( 0 )
# self.logger.debug('selectedWindowList 1')
# if event != None:
# if self.note.tk.call(self.note._w, "identify", "tab", event.x, event.y) != 0:
# self.logger.debug(str(self.note.tk.call(self.note._w, "identify", "tab", event.x, event.y)))
# return
# if len(self.search_window.curselection()) == 0 and len(self.selected_window_list) > 0 and self.is_clicked_common_tab == True:
# for each_window in self.selected_window_list:
# for i in range(self.search_window.size()):
# if each_window == self.search_window.get(i):
# self.search_window.select_set(i)
# self.is_clicked_common_tab == False
# # 성능상 이슈로 return 추가함
# return
# return
# items = map(int, self.search_window.curselection())
# c_label = ''
# count = 0
# for item in items:
# # if self.search_window.get(item) == '':
# # continue
# if not '...' in c_label:
# if not c_label == '':
# c_label += ', '
# if len(c_label + self.search_window.get(item)) > 20:
# c_label += '...'
# else:
# c_label += self.search_window.get(item)
# count += 1
# if c_label == '':
# c_label = lybconstant.LYB_LABEL_SELECT_WINDOW_TEXT
# elif count > 1 and count == self.search_window.size():
# c_label = lybconstant.LYB_LABEL_SELECTED_ALL
# # print('count=', count, 'search_window=', self.search_window.size(), c_label)
# self.configure_label.configure(
# text = c_label
# )
# for each_config, each_value in self.gui_config_dic.items():
# is_selected = False
# items = map(int, self.search_window.curselection())
# for item in items:
# is_selected = True
# # if self.search_window.get(item) == '':
# # continue
# window_name = self.search_window.get(item)
# if window_name in self.configure.window_config:
# if self.configure.get_window_config(window_name, each_config) != self.gui_config_dic[each_config].get():
# self.gui_config_dic[each_config].set(self.configure.get_window_config(window_name, each_config))
# if is_selected == False:
# if self.configure.common_config[each_config] != self.gui_config_dic[each_config].get():
# self.gui_config_dic[each_config].set(self.configure.common_config[each_config])
def selected_game(self, args):
# self.logger.warn(args)
# if len(self.search_window.curselection()) > 0:
# self.selected_window_list = []
# items = map(int, self.search_window.curselection())
# for i in items:
# self.selected_window_list.append(self.search_window.get(i))
# self.logger.debug('[DEBUG] 1: ' + str(self.selected_window_list))
self.set_config('games')
# self.logger.warn(self.gui_config_dic['games'].get())
# self.logger.warn(self.configure.get_window_config(self.app_player_process.get(), 'games'))
# if self.gui_config_dic['games'].get() != self.configure.get_window_config(self.app_player_process.get(), 'games'):
self.refresh_window_game()
# self.is_clicked_common_tab = True
# 이게 왜 있는거지??
# self.selectedWindowList(None)
def set_config(self, value):
# print('[COMMON CONFIG]', value, self.configure.common_config[value])
# items = map(int, self.search_window.curselection())
# is_selected = False
# for item in items:
# is_selected = True
# self.configure.set_window_config(self.search_window.get(item), value, self.gui_config_dic[value].get())
window_name = self.app_player_process.get()
if len(window_name) > 0:
self.configure.set_window_config(window_name, value, self.gui_config_dic[value].get())
# if is_selected == False:
# # 공통적용
# self.configure.common_config[value] = self.gui_config_dic[value].get()
# print(self.configure.window_config)
# print(self.configure.common_config)
# self.refresh_window_game()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
# def select_window_list(self, *args, game_name):
# # TODO: 같은 게임안에서 윈도우마다 다르게 설정가능
# selected_window_name = self.game_options[game_name]['window_list_stringvar'].get()
# if len(selected_window_name) > 0:
# if not game_name in self.configure.window_config[selected_window_name]:
# self.configure.window_config[selected_window_name][game_name] = copy.deepcopy(self.configure.common_config[game_name])
# print('DEBUGXX', selected_window_name, game_name, self.configure.window_config[selected_window_name][game_name])
# self.set_game_config(game_name)
def refresh_window_game(self):
# self.logger.warn('refresh_window_game')
# return
# if self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_USE_INACTIVE_MODE].get() == True:
# self.inactive_flag_option_menu.configure(stat=tkinter.NORMAL)
# else:
# self.inactive_flag_option_menu.configure(stat=tkinter.DISABLED)
# 사용자가 윈도우를 검색하면 각 게임탭에서 어떤 윈도우에 어떤 게임을 실행할 지에 대한 정보를 채워준다.
# 게임 탭에서는 게임을 키로 윈도우 리스트를 붙여준다.
for each_game in self.games:
if not each_game in self.game_options:
continue
# print('DEBUGXX --- 1')
self.game_options[each_game]['window_list_stringvar'].set('')
# self.game_options[each_game]['window_list_option_menu']['menu'].delete(0, 'end')
# print('DEBUGXX --- 1')
new_window_list = []
for each_app_player in self.app_player_process_list['values']:
game_name = self.configure.get_window_config(each_app_player, 'games')
if each_game == game_name:
new_window_list.append(each_app_player)
# new_window_list = []
# for i in range(self.search_window.size()):
# game_name = self.configure.get_window_config(self.search_window.get(i), 'games')
# if each_game == game_name:
# new_window_list.append(self.search_window.get(i))
self.game_options[each_game]['window_list_option_menu']['values'] = new_window_list
if len(new_window_list) > 0:
self.game_options[each_game]['window_list_stringvar'].set(new_window_list[0])
# if len(new_window_list) == 0:
# self.game_options[each_game]['window_list_option_menu']['menu'].add_command(
# label = '',
# command = tkinter._setit(self.game_options[each_game]['window_list_stringvar'], '')
# )
# else:
# self.game_options[each_game]['window_list_stringvar'].set(new_window_list[0])
# for each_window in new_window_list:
# self.game_options[each_game]['window_list_option_menu']['menu'].add_command(
# label = each_window,
# command = tkinter._setit(self.game_options[each_game]['window_list_stringvar'], each_window)
# )
def get_game_schedule_list(self, game_name):
window_name = self.game_options[game_name]['window_list_stringvar'].get()
if len(window_name) > 0:
if not game_name in self.configure.window_config[window_name]:
self.configure.window_config[window_name][game_name] = copy.deepcopy(
self.configure.common_config[game_name])
schedule_list = self.configure.window_config[window_name][game_name]['schedule_list']
else:
schedule_list = self.configure.common_config[game_name]['schedule_list']
return schedule_list
# 리스트박스를 클릭하면 선택되는 거 같다. 그래서 마지막에 공백을 넣었다.
# def select_work_list(self, event, game_name):
# last_index = self.game_options[game_name]['work_list_listbox'].size() - 1
# self.game_options[game_name]['work_list_listbox'].selection_clear(last_index)
# schedule_list = self.get_game_schedule_list(game_name)
# if len(self.game_options[game_name]['work_list_listbox'].curselection()) > 0:
# item_index = self.game_options[game_name]['work_list_listbox'].curselection()[0]
# # 공백이면 리턴
# if item_index == last_index:
# return
# selected_work_name = self.game_options[game_name]['work_list_listbox'].get(item_index)
# print('DEBUG88:', self.game_options[game_name]['schedule_list_listbox'].size())
# if not selected_work_name in schedule_list:
# #schedule_list.append(selected_work_name)
# schedule_list.insert(len(schedule_list) - 1, selected_work_name)
# self.game_options[game_name]['schedule_list_listbox'].insert(self.game_options[game_name]['schedule_list_listbox'].size() - 1, selected_work_name)
# def select_schedule_list(self, event, game_name):
# last_index = self.game_options[game_name]['schedule_list_listbox'].size() - 1
# self.game_options[game_name]['schedule_list_listbox'].selection_clear(last_index)
# schedule_list = self.get_game_schedule_list(game_name)
# if len(self.game_options[game_name]['schedule_list_listbox'].curselection()) > 0:
# item_index = self.game_options[game_name]['schedule_list_listbox'].curselection()[0]
# if item_index == last_index:
# return
# selected_schedule_work_name = self.game_options[game_name]['schedule_list_listbox'].get(item_index)
# schedule_list.remove(selected_schedule_work_name)
# self.game_options[game_name]['schedule_list_listbox'].delete(item_index)
# print('DEBUG77:', self.game_options[game_name]['schedule_list_listbox'].size())
# window_name = self.game_options[game_name]['window_list_stringvar'].get()
# TODO: Game 설정 갱신 함수
def set_game_config(self, game_name):
size = self.game_options[game_name]['schedule_list_listbox'].size()
self.game_options[game_name]['schedule_list_listbox'].delete(0, size - 1)
schedule_work_list = self.get_game_schedule_list(game_name)
window_name = self.game_options[game_name]['window_list_stringvar'].get()
for each_work in schedule_work_list:
self.game_options[game_name]['schedule_list_listbox'].insert('end', each_work)
def clicked_main_tab(self, e):
# self.logger.warn('Main')
return
def clicked_common_tab(self, e):
# self.logger.warn('Common')
# tab_index = self.option_dic['common_tab'].tk.call(self.option_dic['common_tab']._w, "identify", "tab", e.x, e.y)
return
# def clicked_tab(self, event):
# self.logger.debug('clicked_tab')
# s = time.time()
# tab_index = self.note.tk.call(self.note._w, "identify", "tab", event.x, event.y)
# self.is_clicked_common_tab = False
# if tab_index != 0:
# if len(self.search_window.curselection()) > 0:
# self.selected_window_list = []
# items = map(int, self.search_window.curselection())
# for i in items:
# self.selected_window_list.append(self.search_window.get(i))
# else:
# if len(self.selected_window_list) > 0:
# self.is_clicked_common_tab = True
# # 이 부분을 주석 처리하면 선택된 윈도우들이 사라진다. 일단 주석 처리해보자.
# # self.selectedWindowList(None)
# e = time.time()
# self.logger.debug(str(round(e-s,2)))
def callback_security_code_stringvar(self, args):
self.logger.debug(self.security_code.get())
self.configure.common_config['security_code'] = self.security_code.get()
def callback_reopen_log(self):
self.information.delete(1.0, tkinter.END)
if self.log_fp == None:
self.log_fp = open(likeyoubot_logger.LYBLogger.logPath)
self.log_fp.seek(0)
# self.log_fp.close()
# self.log_fp = open(likeyoubot_logger.LYBLogger.logPath)
def callback_log_level_critical(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'].get()
self.callback_reopen_log()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical')
def callback_log_level_error(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'].get()
self.callback_reopen_log()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error')
def callback_log_level_warn(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'].get()
self.callback_reopen_log()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn')
def callback_log_level_info(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'].get()
self.callback_reopen_log()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info')
def callback_log_level_debug(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'].get()
self.callback_reopen_log()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug')
def callback_log_lock(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'] = self.gui_config_dic[
lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'].get()
self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock')
# def callback_log_remove(self, args):
# self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'] = self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove'].get()
# try:
# with open(self.configure.path, 'wb') as dat_file:
# pickle.dump(self.configure, dat_file)
# except:
# self.logger.error(traceback.format_exc())
# self.set_config(lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'remove')
def callback_hompage(self, event):
webbrowser.open_new(likeyoubot_http.LYBHttp.getMacroBaseUrl())
def callback_blog(self, event):
webbrowser.open_new(r"https://numaking.cafe24.com")
def callback_docs(self, event):
rest = self.login()
docs_url = rest.get_elem('docs_url')
webbrowser.open_new(docs_url)
def callback_tera_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('tera_kakao_url')
webbrowser.open_new(kakao_url)
def callback_blackdesert_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('blackdesert_kakao_url')
webbrowser.open_new(kakao_url)
def callback_kaiser_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('kaiser_kakao_url')
webbrowser.open_new(kakao_url)
def callback_blade2_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('blade2_kakao_url')
webbrowser.open_new(kakao_url)
def callback_icarus_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('icarus_kakao_url')
webbrowser.open_new(kakao_url)
def callback_talion_kakaotalk(self, event):
rest = self.login()
kakao_url = rest.get_elem('talion_kakao_url')
webbrowser.open_new(kakao_url)
def callback_bitbucket(self, event):
webbrowser.open_new(r"https://bitbucket.org/dogfooter/dogfooter/src")
def callback_link_url0(self, event, url):
self.common_link_url(url)
# webbrowser.open_new(url)
def callback_link_url1(self, event, url):
self.common_link_url(url)
# webbrowser.open_new(url)
def callback_link_url2(self, event, url):
self.common_link_url(url)
# webbrowser.open_new(url)
def callback_link_url3(self, event, url):
self.common_link_url(url)
# webbrowser.open_new(url)
def callback_link_url4(self, event, url):
self.common_link_url(url)
# webbrowser.open_new(url)
def common_link_url(self, url):
return
# index = self.notice_link_list.index(url)
# self.notice_frame_label.configure(text=self.notice_subject_list[index])
#
# self.notice_text.delete(1.0, tkinter.END)
# rest = self.login()
# # self.logger.warn(self.notice_link_list[index])
# content_list = rest.get_notice_content(self.notice_link_list[index])
# for each_line in content_list:
# # self.logger.warn(each_line)
# self.notice_text.insert('end', each_line + '\n')
def callback_wakeup_period_entry(self, args):
try:
if len(self.wakeup_period_entry.get()) < 0:
wakeup_period_entry = 0
else:
wakeup_period_entry = float(self.wakeup_period_entry.get())
# if wakeup_period_entry < 0:
# self.wakeup_period_entry.set('0')
self.configure.common_config['wakeup_period_entry'] = wakeup_period_entry
except:
self.configure.common_config['wakeup_period_entry'] = 1.0
# print(self.configure.common_config['wakeup_period_entry'])
def callback_restart_app_player_retry_stringvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'retry'] = self.restart_app_player_retry.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def callback_restart_app_player_delay_stringvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'delay'] = self.restart_app_player_delay.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def callback_use_restart_app_player_period_stringvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER + 'period'] = self.use_restart_app_player_period.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def callback_use_restart_app_player_booleanvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_USE_RESTART_APP_PLAYER] = self.use_restart_app_player.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
# print(self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_MONITORING])
def callback_use_monitoring_booleanvar(self, args):
use_monitoring_flag = self.use_monitoring_flag.get()
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_MONITORING] = use_monitoring_flag
def callback_select_app_player_process_stringvar(self, args):
window_name = self.app_player_process.get()
# self.logger.warn(window_name)
for each_config, each_value in self.gui_config_dic.items():
if window_name in self.configure.window_config:
if self.configure.get_window_config(window_name, each_config) != self.gui_config_dic[each_config].get():
self.gui_config_dic[each_config].set(self.configure.get_window_config(window_name, each_config))
else:
if self.configure.common_config[each_config] != self.gui_config_dic[each_config].get():
self.gui_config_dic[each_config].set(self.configure.common_config[each_config])
def callback_period_telegram_entry(self, args):
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_TELEGRAM] = self.period_telegram_entry.get()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def callback_update_period_ui_entry(self, args):
try:
if len(self.update_period_ui_entry.get()) < 0:
update_period_ui = 0
else:
update_period_ui = float(self.update_period_ui_entry.get())
# if update_period_ui < 0:
# self.update_period_ui_entry.set('0')
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI] = update_period_ui
except:
self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI] = float(1.0)
# print(self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI])
def callback_random_click_booleanvar(self, args):
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK] = self.random_click_booleanvar.get()
def callback_thumbnail_shortcut_booleanvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'shortcut'] = self.thumbnail_shortcut_booleanvar.get()
def callback_mouse_pointer_away_booleanvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_BOOLEAN_MOUSE_POINTER + 'away'] = self.mouse_pointer_away_booleanvar.get()
def callback_close_app_nox_new_booleanvar(self, args):
self.configure.common_config[
lybconstant.LYB_DO_STRING_CLOSE_APP_NOX_NEW] = self.close_app_nox_new_booleanvar.get()
def callback_random_click_pixel_stringvar(self, args):
if len(self.random_click_pixel_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.random_click_pixel_stringvar.get())
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_RANDOM_CLICK + 'pixel'] = config_value
def callback_thumbnail_width_stringvar(self, args):
if len(self.thumbnail_width_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.thumbnail_width_stringvar.get())
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'width'] = config_value
def callback_thumbnail_height_stringvar(self, args):
if len(self.thumbnail_height_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.thumbnail_height_stringvar.get())
self.configure.common_config[lybconstant.LYB_DO_STRING_THUMBNAIL_SIZE + 'height'] = config_value
def callback_freezing_limit_stringvar(self, args):
if len(self.freezing_limit_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.freezing_limit_stringvar.get())
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT + 'freezing_limit'] = config_value
def callback_close_app_stringvar(self, args):
if len(self.close_app_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.close_app_stringvar.get())
# if config_value <= 0:
# self.close_app_stringvar.set('0')
self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_COUNT] = config_value
# print(self.configure.common_config[lybconstant.LYB_DO_STRING_CLOSE_APP_COUNT])
def callback_recovery_count_stringvar(self, args):
if len(self.recovery_count_stringvar.get()) < 1:
config_value = 0
else:
config_value = int(self.recovery_count_stringvar.get())
# if config_value <= 0:
# self.recovery_count_stringvar.set('0')
self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT] = config_value
# print(self.configure.common_config[lybconstant.LYB_DO_STRING_RECOVERY_COUNT])
def callback_wait_time_scene_change(self, args):
if len(self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE].get()) < 1:
wait_time_scene_change = 0
else:
wait_time_scene_change = int(self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE].get())
# if wait_time_scene_change <= 0:
# self.wait_time_scene_change.set('0')
self.configure.common_config[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE] = wait_time_scene_change
self.set_config(lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE)
# print(self.configure.common_config[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE])
def callback_threshold_entry(self, args):
if len(self.threshold_entry.get()) < 1:
threshold_entry = 0.7
else:
threshold_entry = float(self.threshold_entry.get()) / 100.0
self.configure.common_config['threshold_entry'] = threshold_entry
window_name = self.app_player_process.get()
if len(window_name) > 0:
self.configure.set_window_config(window_name, 'threshold_entry', threshold_entry)
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
def callback_pixel_tolerance_entry(self, args):
if len(self.pixel_tolerance_entry.get()) < 1:
pixel_tolerance_entry = 30
else:
pixel_tolerance_entry = int(self.pixel_tolerance_entry.get())
if pixel_tolerance_entry > 255:
pixel_tolerance_entry = 255
# if pixel_tolerance_entry <= 0:
# self.pixel_tolerance_entry.set('0')
# elif pixel_tolerance_entry > 255:
# self.pixel_tolerance_entry.set('255')
self.configure.common_config['pixel_tolerance_entry'] = pixel_tolerance_entry
# print(self.configure.common_config['pixel_tolerance_entry'])
def callback_adjust_entry(self, args):
if len(self.adjust_entry.get()) < 1:
adjust_entry = 10
else:
adjust_entry = int(self.adjust_entry.get())
# if adjust_entry <= 0:
# self.adjust_entry.set('0')
# elif adjust_entry > 100:
# self.adjust_entry.set('100')
if adjust_entry > 100:
adjust_entry = 100
self.configure.common_config['adjust_entry'] = adjust_entry
# print(self.configure.common_config['adjust_entry'])
def callback_log_filter_entry_stringvar(self, args):
self.configure.common_config[lybconstant.LYB_DO_STRING_LOG_FILTER] = self.log_filter_entry.get()
self.callback_reopen_log()
def update_monitor_master(self):
# print('DEBUG:', self.worker_dic)
remove_list = []
for key, value in self.worker_dic.items():
if self.worker_dic[key].isAlive() == False:
remove_list.append(key)
if key in self.game_object:
self.game_object.pop(key)
for each_remove in remove_list:
self.worker_dic.pop(each_remove)
remove_list = []
if self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_USE_MONITORING] == False:
for key, value in self.option_dic.items():
if '_monitor' in key:
self.option_dic[key].pack_forget()
window_name = key.split('_')[0]
remove_list.append(window_name + '_monitor')
for each_remove in remove_list:
self.option_dic.pop(each_remove)
return
remove_list = []
for key, value in self.option_dic.items():
if '_monitor' in key:
window_name = key.split('_')[0]
# is_there = False
# for i in range(self.search_window.size()):
# if window_name == self.search_window.get(i):
# is_there = True
# if is_there == False:
# self.option_dic[window_name + '_monitor'].pack_forget()
# remove_list.append(window_name + '_monitor')
if not window_name in self.app_player_process_list['values']:
self.option_dic[window_name + '_monitor'].pack_forget()
remove_list.append(window_name + '_monitor')
for each_remove in remove_list:
self.option_dic.pop(each_remove)
# for i in range(self.search_window.size()):
for window_name in self.app_player_process_list['values']:
if not window_name + '_monitor' in self.option_dic:
try:
self.option_dic[window_name + '_monitor'] = self.add_monitor_master_frame(
title=window_name,
subject='통계 정보가 출력됩니다',
workname='',
status='stop'
)
except:
self.logger.error(traceback.format_exc())
try:
if window_name in self.game_object:
game_object = self.game_object[window_name]
total_elapsed_time = time.time() - game_object.start_time
if game_object.main_scene != None:
restart_period = int(
game_object.main_scene.get_game_config(lybconstant.LYB_DO_STRING_PERIOD_RESTART_GAME)) * 60
if restart_period != 0:
if total_elapsed_time > restart_period:
self.logger.warn('주기적으로 게임을 재시작합니다. 설정값: ' + str(restart_period) + '분')
game_object.request_terminate = True
game_object.start_time = time.time()
return
click_loc = str(game_object.cursor_loc)
split_str_list = click_loc.split(',')
click_loc = 'X:%5s Y:%5s' % (split_str_list[0].split('(')[1].replace(' ', '', 5),
split_str_list[1].split(')')[0].replace(' ', '', 5))
scene_name = self.adjust_monitor_name(
game_object.get_adjusted_name(game_object.current_matched_scene['name']), adj_length=10)
if len(scene_name) > 0:
scene_rate = '(' + str(game_object.current_matched_scene['rate']) + '%)'
scene_status = game_object.get_scene(game_object.current_matched_scene['name']).status
scene_name += scene_rate
else:
scene_rate = ''
scene_status = ''
if game_object.main_scene and game_object.current_schedule_work:
wlist = game_object.get_game_config(game_object.game_name, 'schedule_list')
work_name = game_object.current_schedule_work
# if work_name in game_object.main_scene.move_status:
# work_index = game_object.main_scene.move_status[work_name]
# try:
# work_name = game_object.main_scene.get_game_config('schedule_list')[work_index - 1]
# except:
# work_name = ''
# else:
if not work_name in game_object.main_scene.last_status:
return
work_index = game_object.main_scene.last_status[work_name]
new_work_name = str(work_index) + '. ' + game_object.current_schedule_work
# if game_object.main_scene.get_option('hero_current_hp') == None:
# hero_current_hp = ''
# else:
# hero_current_hp = str(game_object.main_scene.get_option('hero_current_hp'))
# if game_object.main_scene.get_option('target_current_hp') == None:
# target_current_hp = ''
# else:
# target_current_hp = str(game_object.main_scene.get_option('target_current_hp'))
hero_current_hp = ''
target_current_hp = ''
else:
wlist = []
work_index = ''
new_work_name = ''
hero_current_hp = ''
target_current_hp = ''
self.update_monitor_master_frame(
self.option_dic[window_name + '_monitor'],
arg_list=[
window_name,
'통계 정보가 출력됩니다',
new_work_name,
'start'
],
wlist=wlist
)
else:
self.update_monitor_master_frame(
self.option_dic[window_name + '_monitor'],
arg_list=[
window_name,
'통계 정보가 출력됩니다',
'',
'stop',
]
)
except KeyError:
self.logger.error(traceback.format_exc())
pass
except:
self.logger.error(traceback.format_exc())
return
# else:
# self.update_monitor_master_frame(
# self.option_dic[window_name + '_monitor']
# )
# for window_name, thread in self.worker_dic.items():
# if not window_name + '_monitor' in self.option_dic:
# self.option_dic[window_name + '_monitor'] = self.add_monitor_master(window_name)
def update_monitor_master_frame(self,
frame,
arg_list=[],
wlist=[]
):
label_list = frame.winfo_children()
i = 0
elapsed_time = 0
game_object = None
window_name = None
for each_arg in arg_list:
if i == 0:
text_arg = each_arg
window_name = each_arg
# elif i == 3 or i == 4:
# text_arg = each_arg
# if len(text_arg) > 0:
# s = ttk.Style()
# s.configure('stable.TLabel', foreground='green')
# s.configure('danger.TLabel', foreground='red')
# s.configure('warning.TLabel', foreground='#bc750a')
# hp = int(text_arg)
# if hp > 75:
# label_list[i].configure(style='stable.TLabel')
# elif hp > 30 and hp <= 75:
# label_list[i].configure(style='warning.TLabel')
# else:
# label_list[i].configure(style='danger.TLabel')
# if hp == 0:
# text_arg = ''
elif i == 1:
if window_name in self.game_object:
game_object = self.game_object[window_name]
key = list(game_object.statistics)[game_object.statistics_iterator]
value = game_object.statistics[key]
text_arg = game_object.getCurrentStatistic()
else:
text_arg = each_arg
elif i == 2:
if (not window_name in self.current_work_dic or
self.current_work_dic[window_name] != each_arg
):
self.logger.debug(
'wlist work: ' + self.wlist_stringvar_dic[window_name].get() + ' game work: ' + each_arg)
if window_name in self.current_work_dic:
self.logger.debug(str(self.current_work_dic[window_name]))
self.current_work_dic[window_name] = each_arg
new_wlist = []
windex = 1
for each_w in wlist:
if len(each_w) < 1:
continue
new_wlist.append(str(windex) + '. ' + each_w)
windex += 1
self.wlist_combobox_dic[window_name]['values'] = new_wlist
self.wlist_stringvar_skip_dic[window_name] = True
self.wlist_stringvar_dic[window_name].set(each_arg)
# print('[DEBUG ----- ]', self.current_work_dic[window_name],
# self.wlist_stringvar_dic[window_name].get(),
# each_arg)
i += 1
continue
elif i == 3:
s = ttk.Style()
s.configure('stop.TLabel', foreground='#ff3826')
s.configure('start.TLabel', foreground='#008e09')
s.configure('work.TLabel', foreground='#00FF00')
if each_arg == 'stop':
text_arg = ' ■'
label_list[i].configure(style='stop.TLabel')
elif each_arg == 'start':
if window_name != None:
if window_name in self.game_object:
game_object = self.game_object[window_name]
if game_object != None and game_object.interval != None:
bot_period = game_object.interval
else:
bot_period = float(self.configure.common_config['wakeup_period_entry'])
if not window_name in self.monitor_check_point:
self.monitor_check_point[window_name] = 0
elapsed_time = time.time() - self.monitor_check_point[window_name]
if elapsed_time > bot_period:
self.monitor_check_point[window_name] = time.time()
else:
bot_period = 1
if bot_period < 0.11:
bot_period = 0.15
update_ui_period = float(self.configure.common_config[lybconstant.LYB_DO_STRING_PERIOD_UPDATE_UI])
text_arg = ' ●'
if bot_period < update_ui_period:
if elapsed_time > update_ui_period:
label_list[i].configure(style='start.TLabel')
else:
label_list[i].configure(style='work.TLabel')
else:
if elapsed_time > bot_period:
label_list[i].configure(style='start.TLabel')
else:
label_list[i].configure(style='work.TLabel')
else:
text_arg = each_arg
# elif i == 8:
# label_list[i].configure(style='stable.TLabel')
# text_arg = each_arg
else:
continue
label_list[i].config(text=text_arg)
i += 1
def add_monitor_master_frame(self,
title,
subject,
workname,
status,
):
s = ttk.Style()
s.configure('LYB.TFrame', background='#3f74c6')
frame_label = ttk.Frame(
master=self.option_dic['monitor_master']
# style = 'LYB.TFrame'
)
column_count = 0
button = ttk.Button(
master=frame_label,
text=title,
width=18,
command=lambda: self.callback_monitoring_title_button(None, window_name=title)
)
button.pack(side=tkinter.LEFT)
column_count += 1
button = ttk.Button(
master=frame_label,
text=subject,
width=38,
command=lambda: self.callback_monitoring_subject_button(None, window_name=title)
)
button.pack(side=tkinter.LEFT, padx=5)
column_count += 1
monitor_font = ('굴림체', 9)
combo_list = [
'없음'
]
self.wlist_stringvar_dic[title] = tkinter.StringVar(frame_label)
self.wlist_stringvar_dic[title].trace('w',
lambda *args: self.callback_select_wlist_stringvar(args,
option_name=title))
self.wlist_stringvar_dic[title].set(combo_list[0])
self.wlist_combobox_dic[title] = ttk.Combobox(
master=frame_label,
values=combo_list,
textvariable=self.wlist_stringvar_dic[title],
state="readonly",
height=10,
width=24,
# font = lybconstant.LYB_FONT,
font=monitor_font,
justify=tkinter.LEFT
)
self.wlist_combobox_dic[title].set(combo_list[0])
self.wlist_combobox_dic[title].pack(anchor=tkinter.W, side=tkinter.LEFT)
column_count += 1
label = ttk.Label(
master=frame_label,
text=status,
width=4
)
label.pack(side=tkinter.LEFT, fill=tkinter.Y)
column_count += 1
# label = ttk.Label(
# master = frame_label,
# text = arg10,
# anchor = tkinter.N,
# justify = tkinter.CENTER,
# width = 11
# )
# label.pack(side=tkinter.LEFT, fill=tkinter.Y)
# column_count += 1
# s = ttk.Style()
# s.configure('mouse_up.TLabel', foreground='black', background='#f7f796', relief='groove')
# s.configure('mouse_down.TLabel', foreground='#f7f796', background='black', relief='groove')
# s = ttk.Style()
# s.configure('monitor_button.TButton', highlightbackground='green')
self.monitor_button_index[0] = column_count
button = ttk.Button(
master=frame_label,
text='보이기',
width=8,
# style = 'monitor_button.TButton',
command=lambda: self.callback_monitoring_execute_worker(None, window_name=title,
index=self.monitor_button_index[0])
)
button.pack(side=tkinter.RIGHT)
column_count += 1
self.monitor_button_index[1] = column_count
button = ttk.Button(
master=frame_label,
text='정지',
width=8,
# style = 'monitor_button.TButton',
command=lambda: self.callback_monitoring_execute_worker(None, window_name=title,
index=self.monitor_button_index[1])
)
button.pack(side=tkinter.RIGHT)
column_count += 1
self.monitor_button_index[2] = column_count
button = ttk.Button(
master=frame_label,
text='일시정지',
width=8,
# style = 'monitor_button.TButton',
command=lambda: self.callback_monitoring_execute_worker(None, window_name=title,
index=self.monitor_button_index[2])
)
button.pack(side=tkinter.RIGHT)
column_count += 1
self.monitor_button_index[3] = column_count
button = ttk.Button(
master=frame_label,
text='시작',
width=8,
# style = 'monitor_button.TButton',
command=lambda: self.callback_monitoring_execute_worker(None, window_name=title,
index=self.monitor_button_index[3])
)
button.pack(side=tkinter.RIGHT)
column_count += 1
# if arg111 != None:
# self.monitor_button_index[1] = column_count
# button = ttk.Button(
# master = frame_label,
# text = '보이기',
# command = lambda e: self.callback_monitoring_execute_worker(e, window_name=arg1, index=self.monitor_button_index[2])
# )
# button.pack(side=tkinter.LEFT)
# column_count += 1
# if arg112 != None:
# self.monitor_button_index[3] = column_count
# button = ttk.Button(
# master = frame_label,
# text = '숨기기',
# command = lambda e: self.callback_monitoring_execute_worker(e, window_name=arg1, index=self.monitor_button_index[3])
# )
# button.pack(side=tkinter.LEFT)
# column_count += 1
# if arg113 != None:
# self.monitor_button_index[4] = column_count
# button = ttk.Button(
# master = frame_label,
# text = '보이기',
# command = lambda e: self.callback_monitoring_execute_worker(e, window_name=arg1, index=self.monitor_button_index[4])
# )
# button.pack(side=tkinter.LEFT)
# column_count += 1
# if arg110 != None:
# button = ttk.Label(
# master = frame_label,
# text = '시작',
# anchor = tkinter.N,
# justify = tkinter.CENTER,
# style = 'mouse_up.TLabel',
# cursor = 'hand2',
# width = 5
# )
# button.pack(side=tkinter.LEFT)
# # self.monitor_button_index[1] = column_count
# # button.bind('<Button-1>', lambda event: self.callback_monitoring_execute_worker(event, window_name=arg1, index=self.monitor_button_index[1]))
# # column_count += 1
# if arg111 != None:
# button = ttk.Label(
# master = frame_label,
# text = '멈춤',
# anchor = tkinter.N,
# justify = tkinter.CENTER,
# style = 'mouse_up.TLabel',
# cursor = 'hand2',
# width = 5
# )
# button.pack(side=tkinter.LEFT)
# self.monitor_button_index[2] = column_count
# button.bind('<Button-1>', lambda event: self.callback_monitoring_execute_worker(event, window_name=arg1, index=self.monitor_button_index[2]))
# column_count += 1
# if arg112 != None:
# button = ttk.Label(
# master = frame_label,
# text = '《',
# anchor = tkinter.N,
# justify = tkinter.CENTER,
# style = 'mouse_up.TLabel',
# cursor = 'hand2',
# width = 5
# )
# button.pack(side=tkinter.LEFT)
# self.monitor_button_index[3] = column_count
# button.bind('<Button-1>', lambda event: self.callback_monitoring_execute_worker(event, window_name=arg1, index=self.monitor_button_index[3]))
# column_count += 1
# if arg113 != None:
# button = ttk.Label(
# master = frame_label,
# text = '》',
# anchor = tkinter.N,
# justify = tkinter.CENTER,
# style = 'mouse_up.TLabel',
# cursor = 'hand2',
# width = 5
# )
# button.pack(side=tkinter.LEFT)
# self.monitor_button_index[4] = column_count
# button.bind('<Button-1>', lambda event: self.callback_monitoring_execute_worker(event, window_name=arg1, index=self.monitor_button_index[4]))
# column_count += 1
frame_label.pack(anchor=tkinter.W, fill=tkinter.BOTH)
return frame_label
def adjust_monitor_name(self, name, adj_length=10):
if name == None:
return ''
if len(name) > adj_length:
return name[0:adj_length - 3] + '...' + name[-1]
return name
def callback_monitoring_title_button(self, event, window_name):
self.callback_show_tumbnail(None, window_name)
def callback_monitoring_subject_button(self, event, window_name):
game_object = self.game_object[window_name]
if game_object == None or game_object.main_scene == None:
return
button_label = self.option_dic[window_name + '_monitor'].winfo_children()[1]
# self.logger.warn(game_object.getCurrentStatistic())
button_label.configure(text=game_object.getCurrentStatistic())
if game_object.statistics_iterator >= len(game_object.statistics) - 1:
game_object.statistics_iterator = 0
else:
game_object.statistics_iterator += 1
def callback_monitoring_execute_worker(self, event, window_name, index):
stop_button_label = self.option_dic[window_name + '_monitor'].winfo_children()[index]
# self.logger.warn(str(window_name) + ':' + str(index) + ':' + str(stop_button_label))
# stop_button_label.configure(style='mouse_down.TLabel')
self.master.after(100,
lambda: self.callback_monitoring_execute_worker_back(window_name=window_name, index=index))
def callback_monitoring_execute_worker_back(self, window_name, index):
# stop_button_label = self.option_dic[window_name + '_monitor'].winfo_children()[index]
# stop_button_label.configure(style='mouse_up.TLabel')
# self.logger.debug('window_name=['+window_name+']['+str(index)+']')
if index == self.monitor_button_index[0]:
self.callback_show_window(None, window_name)
elif index == self.monitor_button_index[1]:
self.terminate_each_worker(window_name)
elif index == self.monitor_button_index[2]:
self.pause_each_worker(window_name)
elif index == self.monitor_button_index[3]:
self.start_each_worker(window_name)
# self.pause_each_worker(window_name)
# elif index == self.monitor_button_index[3]:
# self.backward_work_each_worker(window_name)
# elif index == self.monitor_button_index[4]:
# self.forward_work_each_worker(window_name)
def backward_work_each_worker(self, window_name):
game_object = self.game_object[window_name]
if game_object == None or game_object.main_scene == None:
return
work_name = game_object.current_schedule_work
if work_name != None:
try:
if not work_name in game_object.main_scene.move_status:
work_index = game_object.main_scene.last_status[work_name]
else:
work_index = game_object.main_scene.move_status[work_name]
except:
work_index = game_object.main_scene.last_status[work_name]
work_index -= 1
self.move_to_work_index(window_name, work_index)
def forward_work_each_worker(self, window_name):
game_object = self.game_object[window_name]
if game_object == None or game_object.main_scene == None:
return
work_name = game_object.current_schedule_work
if work_name != None:
try:
if not work_name in game_object.main_scene.move_status:
work_index = game_object.main_scene.last_status[work_name]
else:
work_index = game_object.main_scene.move_status[work_name]
except:
work_index = game_object.main_scene.last_status[work_name]
work_index += 1
self.move_to_work_index(window_name, work_index)
def callback_select_wlist_stringvar(self, args, option_name):
self.logger.debug('[MoveStatus] callback_select_wlist_stringvar: ' + option_name)
self.logger.debug(str(self.wlist_stringvar_skip_dic))
if len(option_name) < 1:
return
if not option_name in self.current_work_dic:
return
# self.logger.debug('[MoveStatus] CP1')
if option_name in self.wlist_stringvar_skip_dic:
if self.wlist_stringvar_skip_dic[option_name] == True:
self.wlist_stringvar_skip_dic[option_name] = False
self.logger.debug('[MoveStatus] ' + str(self.wlist_stringvar_skip_dic))
return
# self.logger.debug('[MoveStatus] CP2')
if len(self.wlist_stringvar_dic[option_name].get()) < 1:
return
# self.logger.debug('[DEBUG MoveStatus] CP3')
move_status = int(self.wlist_stringvar_dic[option_name].get().split('.')[0])
self.logger.debug(
'[DEBUG MoveStatus] moveStatus: ' + str(move_status) + ' current_work_name: ' + self.current_work_dic[
option_name])
game_object = self.game_object[option_name]
if game_object == None or game_object.main_scene == None:
return
self.move_to_work_index(option_name, move_status)
def move_to_work_index(self, window_name, index):
if not window_name in self.game_object:
return
game_object = self.game_object[window_name]
if game_object == None or game_object.main_scene == None:
return
max_len = len(game_object.main_scene.get_game_config('schedule_list'))
if index >= max_len - 1:
index = max_len - 1
if index < 1:
index = 1
work_name = game_object.current_schedule_work
if work_name != None:
call_index = 0
if len(game_object.main_scene.callstack) > 0:
for each_call in game_object.main_scene.callstack:
iterator_key = game_object.build_iterator_key(call_index, each_call)
game_object.main_scene.set_option(iterator_key, None)
call_index += 1
game_object.main_scene.callstack.clear()
game_object.main_scene.callstack_status.clear()
game_object.main_scene.set_option(work_name + '_end_flag', True)
game_object.main_scene.move_status[work_name] = index
move_work_name = str(index) + '. ' + game_object.main_scene.get_game_config('schedule_list')[index - 1]
self.wlist_stringvar_dic[window_name].set(move_work_name)
def tooltip(self, widget_name, text):
tooltip = ToolTip(widget_name, text)
tooltip.wraplength = 640
def callback_telegram(self, event):
if self.telegram_button_label.get() == '연동하기':
if len(self.telegram_entry.get()) < 1:
self.logging_message("NORMAL", "연동하기 버튼 위 입력란에 아무거나 입력하세요.")
self.logging_message("NORMAL", "입력한 내용을 텔레그램 도그푸터 봇 대화창에 똑같이 입력하고 연동 버튼을 누르세요.")
return
rest = self.login()
chat_id = rest.connect_telegram(self.telegram_entry.get())
if chat_id != '':
error_message = rest.login(mb_3=chat_id)
if error_message == '':
self.telegram_button_label.set('연동해제')
self.telegram_chatid_label.set(chat_id)
rest.send_telegram_message(chat_id,
self.telegram_entry.get() + ' from DogFooter Macro ' + lybconstant.LYB_VERSION)
self.logging_message("SUCCESS", "텔레그램 연동에 성공했습니다.")
self.logging_message("SUCCESS", "도그푸터 봇이 메세지를 전송했습니다.")
self.logging_message("SUCCESS", "텔레그램 알람이 온다면 텔레그램 연동에 성공한 것입니다.")
self.logging_message("SUCCESS", "연동해제 버튼을 눌러서 언제든지 해제 할 수 있습니다.")
self.telegram_entry.set('')
chat_id = rest.get_chatid(refresh=True)
self.logger.debug('update chatting id: ' + str(chat_id))
return
self.logging_message("FAIL", "텔레그램 연동에 실패했습니다.")
self.logging_message("FAIL", "[" + self.telegram_entry.get() + "]를 텔레그램 대화창에 제대로 입력했는지 확인하세요.")
else:
rest = self.login()
error_message = rest.login(mb_3='-1')
if error_message == '':
self.logging_message("SUCCESS", "텔레그램 연동을 해제했습니다.")
self.telegram_button_label.set('연동하기')
self.telegram_chatid_label.set('')
self.telegram_entry.set(self.generate_token())
chat_id = rest.get_chatid(refresh=True)
def get_mb_point(self):
if self.mb_point != None:
return self.mb_point
rest = self.login()
error_message = rest.login()
self.mb_point = rest.get_point()
return self.mb_point
# def get_mb_ip(self):
# rest = self.login()
# return rest.get_ip()
def login(self):
if self.rest is not None:
return self.rest
user_id = self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_SAVE_LOGIN_ACCOUNT + '_id']
user_password = likeyoubot_license.LYBLicense().get_decrypt(
self.configure.common_config[lybconstant.LYB_DO_BOOLEAN_SAVE_LOGIN_ACCOUNT + '_passwd'])
self.rest = likeyoubot_rest.LYBRest(self.configure.root_url, user_id, user_password)
return self.rest
def generate_token(self):
return ''.join(random.choices(string.ascii_uppercase + string.digits, k=8))
def send_screenshot_telegram(self):
self.logger.debug('/SS')
screenShot = ImageGrab.grab()
png_name = self.save_image(screenShot, 'ss_command')
rest = self.login()
chat_id = rest.get_chatid()
self.rest.send_telegram_image(chat_id, png_name)
def save_image(self, image, png_name):
try:
directory = resource_path('screenshot')
if not os.path.exists(directory):
os.makedirs(directory)
now = datetime.datetime.now()
now_time = now.strftime('%y%m%d_%H%M%S')
png_name = directory + '\\' + png_name + '_' + str(now_time) + '.png'
image.save(png_name)
return png_name
except:
self.logger.error('스크린샷 저장 중 에러 발생')
self.logger.error(traceback.format_exc())
return None
def callback_hide_window(self, e, window_name):
self.configure.keyword = self.keyword_entry.get()
self.master.focus()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
worker_thread = self.executeThread()
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('watchout', [self, 'hide', window_name]))
def callback_show_window(self, e, window_name):
self.configure.keyword = self.keyword_entry.get()
self.master.focus()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
worker_thread = self.executeThread()
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('watchout', [self, 'show', window_name]))
def callback_show_tumbnail(self, e, window_name):
self.configure.keyword = self.keyword_entry.get()
self.master.focus()
try:
with open(self.configure.path, 'wb') as dat_file:
pickle.dump(self.configure, dat_file)
except:
self.logger.error(traceback.format_exc())
worker_thread = self.executeThread()
if worker_thread == None:
return
worker_thread.command_queue.put_nowait(likeyoubot_message.LYBMessage('thumbnail', [self, window_name]))
|
[
"tkinter.ttk.Label",
"tkinter.StringVar",
"tkinter.Text",
"webbrowser.open_new",
"pickle.dump",
"random.choices",
"os.path.isfile",
"tkinter.BooleanVar",
"tkinter.ttk.LabelFrame",
"os.path.abspath",
"likeyoubot_license.LYBLicense",
"os.path.exists",
"tkinter.ttk.Frame",
"traceback.format_exc",
"requests.get",
"likeyoubot_rohan.LYBRohanTab",
"tkinter.ttk.Checkbutton",
"datetime.datetime.now",
"subprocess.Popen",
"tkinter.ttk.Entry",
"copy.deepcopy",
"datetime.datetime.today",
"tkinter.ttk.Scrollbar",
"PIL.ImageGrab.grab",
"belfrywidgets.ToolTip",
"tkinter.ttk.Style",
"tkinter.ttk.Combobox",
"likeyoubot_rest.LYBRest",
"likeyoubot_message.LYBMessage",
"tkinter.ttk.Notebook",
"queue.Queue",
"PIL.ImageTk.PhotoImage",
"os.makedirs",
"likeyoubot_logger.LYBLogger.getLogger",
"time.time"
] |
[((1110, 1149), 'likeyoubot_logger.LYBLogger.getLogger', 'likeyoubot_logger.LYBLogger.getLogger', ([], {}), '()\n', (1147, 1149), False, 'import likeyoubot_logger\n'), ((1367, 1378), 'time.time', 'time.time', ([], {}), '()\n', (1376, 1378), False, 'import time\n'), ((1408, 1419), 'time.time', 'time.time', ([], {}), '()\n', (1417, 1419), False, 'import time\n'), ((1804, 1867), 'tkinter.ttk.Notebook', 'ttk.Notebook', (['self.master'], {'width': 'self.width', 'height': 'self.height'}), '(self.master, width=self.width, height=self.height)\n', (1816, 1867), False, 'from tkinter import ttk\n'), ((2972, 2983), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (2981, 2983), False, 'from tkinter import ttk\n'), ((6747, 6776), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.tab_frame[-1]'], {}), '(self.tab_frame[-1])\n', (6756, 6776), False, 'from tkinter import ttk\n'), ((7525, 7565), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""앱 플레이어: """'}), "(master=frame, text='앱 플레이어: ')\n", (7534, 7565), False, 'from tkinter import ttk\n'), ((7673, 7697), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (7690, 7697), False, 'import tkinter\n'), ((7892, 8037), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': '[]', 'textvariable': 'self.app_player_process', 'state': '"""readonly"""', 'height': '(20)', 'width': '(30)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=[], textvariable=self.app_player_process,\n state='readonly', height=20, width=30, font=lybconstant.LYB_FONT)\n", (7904, 8037), False, 'from tkinter import ttk\n'), ((8237, 8323), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.LEFT', 'font': 'lybconstant.LYB_FONT', 'width': '(20)'}), '(master=frame, justify=tkinter.LEFT, font=lybconstant.LYB_FONT,\n width=20)\n', (8246, 8323), False, 'from tkinter import ttk\n'), ((8588, 8599), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (8597, 8599), False, 'from tkinter import ttk\n'), ((12469, 12526), 'tkinter.ttk.Frame', 'ttk.Frame', ([], {'master': 'self.tab_frame[-1]', 'relief': 'frame_relief'}), '(master=self.tab_frame[-1], relief=frame_relief)\n', (12478, 12526), False, 'from tkinter import ttk\n'), ((12579, 12618), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_s'], {'relief': 'frame_relief'}), '(frame_s, relief=frame_relief)\n', (12588, 12618), False, 'from tkinter import ttk\n'), ((15153, 15194), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame_l'], {'text': '"""앱 플레이어 설정"""'}), "(frame_l, text='앱 플레이어 설정')\n", (15167, 15194), False, 'from tkinter import ttk\n'), ((15217, 15251), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_app_player_config'], {}), '(frame_app_player_config)\n', (15226, 15251), False, 'from tkinter import ttk\n'), ((15273, 15295), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_inner'], {}), '(frame_inner)\n', (15282, 15295), False, 'from tkinter import ttk\n'), ((15309, 15320), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (15318, 15320), False, 'from tkinter import ttk\n'), ((15423, 15500), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame_game', 'text': '"""게임 선택 ☞ """', 'style': '"""fgWhite_bgGreen.TLabel"""'}), "(master=frame_game, text='게임 선택 ☞ ', style='fgWhite_bgGreen.TLabel')\n", (15432, 15500), False, 'from tkinter import ttk\n'), ((15625, 15651), 'tkinter.StringVar', 'tkinter.StringVar', (['frame_l'], {}), '(frame_l)\n', (15642, 15651), False, 'import tkinter\n'), ((16006, 16164), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame_game', 'values': 'self.games', 'textvariable': "self.gui_config_dic['games']", 'state': '"""readonly"""', 'width': '(22)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame_game, values=self.games, textvariable=self.\n gui_config_dic['games'], state='readonly', width=22, font=lybconstant.\n LYB_FONT)\n", (16018, 16164), False, 'from tkinter import ttk\n'), ((17037, 17057), 'tkinter.BooleanVar', 'tkinter.BooleanVar', ([], {}), '()\n', (17055, 17057), False, 'import tkinter\n'), ((17681, 17701), 'tkinter.BooleanVar', 'tkinter.BooleanVar', ([], {}), '()\n', (17699, 17701), False, 'import tkinter\n'), ((18194, 18237), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_inner'], {'relief': 'frame_relief'}), '(frame_inner, relief=frame_relief)\n', (18203, 18237), False, 'from tkinter import ttk\n'), ((18503, 18523), 'tkinter.BooleanVar', 'tkinter.BooleanVar', ([], {}), '()\n', (18521, 18523), False, 'import tkinter\n'), ((19212, 19236), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (19229, 19236), False, 'import tkinter\n'), ((20609, 20620), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (20618, 20620), False, 'from tkinter import ttk\n'), ((20798, 20999), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'inactive_mode_flag_list', 'textvariable': 'self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG]', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=inactive_mode_flag_list, textvariable=\n self.gui_config_dic[lybconstant.LYB_DO_STRING_INACTIVE_MODE_FLAG],\n state='readonly', width=5, font=lybconstant.LYB_FONT)\n", (20810, 20999), False, 'from tkinter import ttk\n'), ((21356, 21399), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_inner'], {'relief': 'frame_relief'}), '(frame_inner, relief=frame_relief)\n', (21365, 21399), False, 'from tkinter import ttk\n'), ((21707, 21727), 'tkinter.BooleanVar', 'tkinter.BooleanVar', ([], {}), '()\n', (21725, 21727), False, 'import tkinter\n'), ((22400, 22572), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""창 고정"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean']", 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='창 고정', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'boolean'], onvalue=\n True, offvalue=False)\n", (22415, 22572), False, 'from tkinter import ttk\n'), ((22797, 22831), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""X:"""'}), "(master=frame, text='X:')\n", (22806, 22831), False, 'from tkinter import ttk\n'), ((22989, 23013), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (23006, 23013), False, 'import tkinter\n'), ((23850, 24060), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x']", 'state': '"""readonly"""', 'height': '(20)', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'x'],\n state='readonly', height=20, width=5, font=lybconstant.LYB_FONT)\n", (23862, 24060), False, 'from tkinter import ttk\n'), ((24324, 24359), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '""" Y:"""'}), "(master=frame, text=' Y:')\n", (24333, 24359), False, 'from tkinter import ttk\n'), ((24517, 24541), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (24534, 24541), False, 'import tkinter\n'), ((25378, 25588), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y']", 'state': '"""readonly"""', 'height': '(20)', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'y'],\n state='readonly', height=20, width=5, font=lybconstant.LYB_FONT)\n", (25390, 25588), False, 'from tkinter import ttk\n'), ((26091, 26134), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_inner'], {'relief': 'frame_relief'}), '(frame_inner, relief=frame_relief)\n', (26100, 26134), False, 'from tkinter import ttk\n'), ((26250, 26298), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""멀티 플레이어에 설치된 순서:"""'}), "(master=frame, text='멀티 플레이어에 설치된 순서:')\n", (26259, 26298), False, 'from tkinter import ttk\n'), ((26461, 26485), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (26478, 26485), False, 'import tkinter\n'), ((27367, 27582), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION + 'number']", 'state': '"""readonly"""', 'height': '(20)', 'width': '(1)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n gui_config_dic[lybconstant.LYB_DO_BOOLEAN_FIX_WINDOW_LOCATION +\n 'number'], state='readonly', height=20, width=1, font=lybconstant.LYB_FONT)\n", (27379, 27582), False, 'from tkinter import ttk\n'), ((28700, 28739), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_s'], {'relief': 'frame_relief'}), '(frame_s, relief=frame_relief)\n', (28709, 28739), False, 'from tkinter import ttk\n'), ((28762, 28806), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', ([], {'master': 'frame_r', 'text': '"""공지 사항"""'}), "(master=frame_r, text='공지 사항')\n", (28776, 28806), False, 'from tkinter import ttk\n'), ((34416, 34455), 'tkinter.ttk.Notebook', 'ttk.Notebook', ([], {'master': 'self.tab_frame[-1]'}), '(master=self.tab_frame[-1])\n', (34428, 34455), False, 'from tkinter import ttk\n'), ((34622, 34669), 'tkinter.ttk.Frame', 'ttk.Frame', ([], {'master': "self.option_dic['common_tab']"}), "(master=self.option_dic['common_tab'])\n", (34631, 34669), False, 'from tkinter import ttk\n'), ((34953, 35000), 'tkinter.ttk.Frame', 'ttk.Frame', ([], {'master': "self.option_dic['common_tab']"}), "(master=self.option_dic['common_tab'])\n", (34962, 35000), False, 'from tkinter import ttk\n'), ((35589, 35633), 'tkinter.ttk.Frame', 'ttk.Frame', (["self.option_dic['monitoring_tab']"], {}), "(self.option_dic['monitoring_tab'])\n", (35598, 35633), False, 'from tkinter import ttk\n'), ((35818, 35862), 'tkinter.ttk.Frame', 'ttk.Frame', (["self.option_dic['monitoring_tab']"], {}), "(self.option_dic['monitoring_tab'])\n", (35827, 35862), False, 'from tkinter import ttk\n'), ((35883, 35906), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_bottom'], {}), '(frame_bottom)\n', (35892, 35906), False, 'from tkinter import ttk\n'), ((36229, 36249), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_log'], {}), '(frame_log)\n', (36238, 36249), False, 'from tkinter import ttk\n'), ((36332, 36357), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (36350, 36357), False, 'import tkinter\n'), ((37248, 37264), 'tkinter.ttk.Style', 'ttk.Style', (['frame'], {}), '(frame)\n', (37257, 37264), False, 'from tkinter import ttk\n'), ((37361, 37564), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""필수정보"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical']", 'style': '"""green_checkbutton.TCheckbutton"""', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='필수정보', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'critical'], style=\n 'green_checkbutton.TCheckbutton', onvalue=True, offvalue=False)\n", (37376, 37564), False, 'from tkinter import ttk\n'), ((37761, 37786), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (37779, 37786), False, 'import tkinter\n'), ((38646, 38662), 'tkinter.ttk.Style', 'ttk.Style', (['frame'], {}), '(frame)\n', (38655, 38662), False, 'from tkinter import ttk\n'), ((38755, 38951), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""에러"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error']", 'style': '"""red_checkbutton.TCheckbutton"""', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='에러', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'error'], style=\n 'red_checkbutton.TCheckbutton', onvalue=True, offvalue=False)\n", (38770, 38951), False, 'from tkinter import ttk\n'), ((39147, 39172), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (39165, 39172), False, 'import tkinter\n'), ((40023, 40039), 'tkinter.ttk.Style', 'ttk.Style', (['frame'], {}), '(frame)\n', (40032, 40039), False, 'from tkinter import ttk\n'), ((40139, 40337), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""경고"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn']", 'style': '"""orange_checkbutton.TCheckbutton"""', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='경고', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'warn'], style=\n 'orange_checkbutton.TCheckbutton', onvalue=True, offvalue=False)\n", (40154, 40337), False, 'from tkinter import ttk\n'), ((40533, 40558), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (40551, 40558), False, 'import tkinter\n'), ((41419, 41578), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""게임정보"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info']", 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='게임정보', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'info'], onvalue=True, offvalue=\n False)\n", (41434, 41578), False, 'from tkinter import ttk\n'), ((41763, 41788), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (41781, 41788), False, 'import tkinter\n'), ((42660, 42819), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""디버깅"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug']", 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='디버깅', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'debug'], onvalue=True, offvalue\n =False)\n", (42675, 42819), False, 'from tkinter import ttk\n'), ((42942, 42975), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '""" """'}), "(master=frame, text=' ')\n", (42951, 42975), False, 'from tkinter import ttk\n'), ((43065, 43101), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""필터링:"""'}), "(master=frame, text='필터링:')\n", (43074, 43101), False, 'from tkinter import ttk\n'), ((43207, 43231), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (43224, 43231), False, 'import tkinter\n'), ((43433, 43556), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'textvariable': 'self.log_filter_entry', 'justify': 'tkinter.LEFT', 'font': 'lybconstant.LYB_FONT', 'width': '(15)'}), '(master=frame, textvariable=self.log_filter_entry, justify=tkinter\n .LEFT, font=lybconstant.LYB_FONT, width=15)\n', (43442, 43556), False, 'from tkinter import ttk\n'), ((43678, 43711), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '""" """'}), "(master=frame, text=' ')\n", (43687, 43711), False, 'from tkinter import ttk\n'), ((43862, 43887), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (43880, 43887), False, 'import tkinter\n'), ((44657, 44818), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""스크롤 잠금"""', 'variable': "self.gui_config_dic[lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock']", 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='스크롤 잠금', variable=self.gui_config_dic[\n lybconstant.LYB_DO_BOOLEAN_LOG_LEVEL + 'lock'], onvalue=True, offvalue=\n False)\n", (44672, 44818), False, 'from tkinter import ttk\n'), ((45977, 45997), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_log'], {}), '(frame_log)\n', (45986, 45997), False, 'from tkinter import ttk\n'), ((46026, 46117), 'tkinter.Text', 'tkinter.Text', ([], {'master': 'self.information_frame', 'width': '(90)', 'height': '(25)', 'font': "('Consolas', 8)"}), "(master=self.information_frame, width=90, height=25, font=(\n 'Consolas', 8))\n", (46038, 46117), False, 'import tkinter\n'), ((47685, 47722), 'tkinter.ttk.Scrollbar', 'ttk.Scrollbar', (['self.information_frame'], {}), '(self.information_frame)\n', (47698, 47722), False, 'from tkinter import ttk\n'), ((48518, 48541), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_bottom'], {}), '(frame_bottom)\n', (48527, 48541), False, 'from tkinter import ttk\n'), ((48554, 48565), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (48563, 48565), False, 'from tkinter import ttk\n'), ((48642, 48653), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (48651, 48653), False, 'from tkinter import ttk\n'), ((48757, 48768), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (48766, 48768), False, 'from tkinter import ttk\n'), ((48875, 48895), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_log'], {}), '(frame_log)\n', (48884, 48895), False, 'from tkinter import ttk\n'), ((48916, 48950), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame'], {'text': '"""바로가기"""'}), "(frame, text='바로가기')\n", (48930, 48950), False, 'from tkinter import ttk\n'), ((49739, 49774), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame'], {'text': '"""오픈채팅방"""'}), "(frame, text='오픈채팅방')\n", (49753, 49774), False, 'from tkinter import ttk\n'), ((51751, 51771), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_log'], {}), '(frame_log)\n', (51760, 51771), False, 'from tkinter import ttk\n'), ((51795, 51830), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame_br'], {'text': '"""계정"""'}), "(frame_br, text='계정')\n", (51809, 51830), False, 'from tkinter import ttk\n'), ((52036, 52084), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'login_frame', 'text': 'user_account'}), '(master=login_frame, text=user_account)\n', (52045, 52084), False, 'from tkinter import ttk\n'), ((52220, 52247), 'tkinter.StringVar', 'tkinter.StringVar', (['frame_br'], {}), '(frame_br)\n', (52237, 52247), False, 'import tkinter\n'), ((52271, 52307), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame_br'], {'text': '"""포인트"""'}), "(frame_br, text='포인트')\n", (52285, 52307), False, 'from tkinter import ttk\n'), ((52324, 52387), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'login_frame', 'textvariable': 'self.mb_point_label'}), '(master=login_frame, textvariable=self.mb_point_label)\n', (52333, 52387), False, 'from tkinter import ttk\n'), ((52902, 52940), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame_br'], {'text': '"""뒷통수조심"""'}), "(frame_br, text='뒷통수조심')\n", (52916, 52940), False, 'from tkinter import ttk\n'), ((53598, 53635), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', (['frame_br'], {'text': '"""텔레그램"""'}), "(frame_br, text='텔레그램')\n", (53612, 53635), False, 'from tkinter import ttk\n'), ((53815, 53841), 'PIL.ImageTk.PhotoImage', 'ImageTk.PhotoImage', (['image1'], {}), '(image1)\n', (53833, 53841), False, 'from PIL import Image, ImageTk, ImageGrab\n'), ((53859, 53902), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'login_frame', 'image': 'image1'}), '(master=login_frame, image=image1)\n', (53868, 53902), False, 'from tkinter import ttk\n'), ((54110, 54140), 'tkinter.StringVar', 'tkinter.StringVar', (['login_frame'], {}), '(login_frame)\n', (54127, 54140), False, 'import tkinter\n'), ((54157, 54284), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'login_frame', 'textvariable': 'self.telegram_entry', 'justify': 'tkinter.LEFT', 'font': 'lybconstant.LYB_FONT', 'width': '(15)'}), '(master=login_frame, textvariable=self.telegram_entry, justify=\n tkinter.LEFT, font=lybconstant.LYB_FONT, width=15)\n', (54166, 54284), False, 'from tkinter import ttk\n'), ((54477, 54507), 'tkinter.StringVar', 'tkinter.StringVar', (['login_frame'], {}), '(login_frame)\n', (54494, 54507), False, 'import tkinter\n'), ((55083, 55105), 'tkinter.ttk.Style', 'ttk.Style', (['login_frame'], {}), '(login_frame)\n', (55092, 55105), False, 'from tkinter import ttk\n'), ((55205, 55235), 'tkinter.StringVar', 'tkinter.StringVar', (['login_frame'], {}), '(login_frame)\n', (55222, 55235), False, 'import tkinter\n'), ((55252, 55376), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'login_frame', 'textvariable': 'self.telegram_chatid_label', 'justify': 'tkinter.LEFT', 'style': '"""green_label.TLabel"""'}), "(master=login_frame, textvariable=self.telegram_chatid_label,\n justify=tkinter.LEFT, style='green_label.TLabel')\n", (55261, 55376), False, 'from tkinter import ttk\n'), ((55666, 55685), 'tkinter.ttk.Frame', 'ttk.Frame', (['frame_br'], {}), '(frame_br)\n', (55675, 55685), False, 'from tkinter import ttk\n'), ((56875, 56922), 'tkinter.ttk.Frame', 'ttk.Frame', (["self.option_dic['common_config_tab']"], {}), "(self.option_dic['common_config_tab'])\n", (56884, 56922), False, 'from tkinter import ttk\n'), ((56954, 57011), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', ([], {'master': 'self.common_top_frame', 'text': '"""봇 설정"""'}), "(master=self.common_top_frame, text='봇 설정')\n", (56968, 57011), False, 'from tkinter import ttk\n'), ((57063, 57115), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (57072, 57115), False, 'from tkinter import ttk\n'), ((57143, 57237), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""이미지를 인식할 때 비교 대상과 """', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='이미지를 인식할 때 비교 대상과 ', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (57152, 57237), False, 'from tkinter import ttk\n'), ((57657, 57681), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (57674, 57681), False, 'import tkinter\n'), ((57698, 57793), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.threshold_entry', 'width': '(3)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n threshold_entry, width=3)\n', (57707, 57793), False, 'from tkinter import ttk\n'), ((58225, 58300), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""% 이상 동일하면 감지하도록 설정합니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='% 이상 동일하면 감지하도록 설정합니다', justify=tkinter.LEFT)\n", (58234, 58300), False, 'from tkinter import ttk\n'), ((58526, 58578), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (58535, 58578), False, 'from tkinter import ttk\n'), ((58595, 58693), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""이미지를 인식할 때 RGB 값의 차이가 """', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='이미지를 인식할 때 RGB 값의 차이가 ', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (58604, 58693), False, 'from tkinter import ttk\n'), ((58824, 58848), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (58841, 58848), False, 'import tkinter\n'), ((58865, 58966), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.pixel_tolerance_entry', 'width': '(3)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n pixel_tolerance_entry, width=3)\n', (58874, 58966), False, 'from tkinter import ttk\n'), ((59427, 59499), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""이하는 같은 이미지로 간주합니다."""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='이하는 같은 이미지로 간주합니다.', justify=tkinter.LEFT)\n", (59436, 59499), False, 'from tkinter import ttk\n'), ((59725, 59777), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (59734, 59777), False, 'from tkinter import ttk\n'), ((59794, 59898), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""이미지 인식이 안 될 경우 찾을 때까지 지속적으로"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='이미지 인식이 안 될 경우 찾을 때까지 지속적으로', anchor=tkinter.\n W, justify=tkinter.LEFT)\n", (59803, 59898), False, 'from tkinter import ttk\n'), ((60019, 60043), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (60036, 60043), False, 'import tkinter\n'), ((60060, 60152), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.adjust_entry', 'width': '(3)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n adjust_entry, width=3)\n', (60069, 60152), False, 'from tkinter import ttk\n'), ((60558, 60624), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""% 씩 가중치를 줍니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='% 씩 가중치를 줍니다', justify=tkinter.LEFT)\n", (60567, 60624), False, 'from tkinter import ttk\n'), ((60851, 60903), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (60860, 60903), False, 'from tkinter import ttk\n'), ((60920, 61007), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""봇의 작업 주기를 """', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='봇의 작업 주기를 ', anchor=tkinter.W, justify=\n tkinter.LEFT)\n", (60929, 61007), False, 'from tkinter import ttk\n'), ((61418, 61442), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (61435, 61442), False, 'import tkinter\n'), ((61459, 61558), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.wakeup_period_entry', 'width': '(6)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n wakeup_period_entry, width=6)\n', (61468, 61558), False, 'from tkinter import ttk\n'), ((61666, 61728), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초로 설정합니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초로 설정합니다', justify=tkinter.LEFT)\n", (61675, 61728), False, 'from tkinter import ttk\n'), ((62296, 62348), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (62305, 62348), False, 'from tkinter import ttk\n'), ((62365, 62452), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""UI 갱신 주기를 """', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='UI 갱신 주기를 ', anchor=tkinter.W, justify=\n tkinter.LEFT)\n", (62374, 62452), False, 'from tkinter import ttk\n'), ((62584, 62608), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (62601, 62608), False, 'import tkinter\n'), ((62625, 62727), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.update_period_ui_entry', 'width': '(6)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n update_period_ui_entry, width=6)\n', (62634, 62727), False, 'from tkinter import ttk\n'), ((62835, 62897), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초로 설정합니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초로 설정합니다', justify=tkinter.LEFT)\n", (62844, 62897), False, 'from tkinter import ttk\n'), ((63451, 63503), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (63460, 63503), False, 'from tkinter import ttk\n'), ((63539, 63564), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (63557, 63564), False, 'import tkinter\n'), ((63582, 63704), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""모니터링 기능을 사용합니다"""', 'variable': 'self.use_monitoring_flag', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='모니터링 기능을 사용합니다', variable=self.\n use_monitoring_flag, onvalue=True, offvalue=False)\n", (63597, 63704), False, 'from tkinter import ttk\n'), ((64362, 64414), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (64371, 64414), False, 'from tkinter import ttk\n'), ((64432, 64519), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""게임 화면 전환 후"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='게임 화면 전환 후', anchor=tkinter.W, justify=\n tkinter.LEFT)\n", (64441, 64519), False, 'from tkinter import ttk\n'), ((64691, 64715), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (64708, 64715), False, 'import tkinter\n'), ((64732, 64876), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE]', 'width': '(3)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n gui_config_dic[lybconstant.LYB_DO_STRING_WAIT_TIME_SCENE_CHANGE], width=3)\n', (64741, 64876), False, 'from tkinter import ttk\n'), ((64984, 65048), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초 동안 대기합니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초 동안 대기합니다', justify=tkinter.LEFT)\n", (64993, 65048), False, 'from tkinter import ttk\n'), ((65801, 65853), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (65810, 65853), False, 'from tkinter import ttk\n'), ((65870, 65966), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""매크로 실행 중 에러가 발생하면 최대"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='매크로 실행 중 에러가 발생하면 최대', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (65879, 65966), False, 'from tkinter import ttk\n'), ((66100, 66124), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (66117, 66124), False, 'import tkinter\n'), ((66245, 66423), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.recovery_count_stringvar', 'state': '"""readonly"""', 'justify': 'tkinter.RIGHT', 'width': '(3)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n recovery_count_stringvar, state='readonly', justify=tkinter.RIGHT,\n width=3, font=lybconstant.LYB_FONT)\n", (66257, 66423), False, 'from tkinter import ttk\n'), ((66567, 66631), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""회 재실행 시킵니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='회 재실행 시킵니다', justify=tkinter.LEFT)\n", (66576, 66631), False, 'from tkinter import ttk\n'), ((67259, 67311), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (67268, 67311), False, 'from tkinter import ttk\n'), ((67328, 67416), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""APP 종료 행동을 """', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='APP 종료 행동을 ', anchor=tkinter.W, justify=\n tkinter.LEFT)\n", (67337, 67416), False, 'from tkinter import ttk\n'), ((67544, 67568), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (67561, 67568), False, 'import tkinter\n'), ((67585, 67684), 'tkinter.ttk.Entry', 'ttk.Entry', ([], {'master': 'frame', 'justify': 'tkinter.RIGHT', 'textvariable': 'self.close_app_stringvar', 'width': '(3)'}), '(master=frame, justify=tkinter.RIGHT, textvariable=self.\n close_app_stringvar, width=3)\n', (67594, 67684), False, 'from tkinter import ttk\n'), ((67792, 67855), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""회 실행 시킵니다"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='회 실행 시킵니다', justify=tkinter.LEFT)\n", (67801, 67855), False, 'from tkinter import ttk\n'), ((68469, 68521), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (68478, 68521), False, 'from tkinter import ttk\n'), ((68562, 68587), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (68580, 68587), False, 'import tkinter\n'), ((68610, 68741), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""마우스 랜덤 좌표 클릭(오차 범위:"""', 'variable': 'self.random_click_booleanvar', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='마우스 랜덤 좌표 클릭(오차 범위:', variable=self.\n random_click_booleanvar, onvalue=True, offvalue=False)\n", (68625, 68741), False, 'from tkinter import ttk\n'), ((68897, 68921), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (68914, 68921), False, 'import tkinter\n'), ((69036, 69196), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.random_click_pixel_stringvar', 'state': '"""readonly"""', 'width': '(2)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n random_click_pixel_stringvar, state='readonly', width=2, font=\n lybconstant.LYB_FONT)\n", (69048, 69196), False, 'from tkinter import ttk\n'), ((69327, 69384), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""픽셀)"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='픽셀)', justify=tkinter.LEFT)\n", (69336, 69384), False, 'from tkinter import ttk\n'), ((70374, 70426), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (70383, 70426), False, 'from tkinter import ttk\n'), ((70472, 70497), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (70490, 70497), False, 'import tkinter\n'), ((70520, 70653), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""썸네일 단축키로 닫기 활성화"""', 'variable': 'self.thumbnail_shortcut_booleanvar', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='썸네일 단축키로 닫기 활성화', variable=self.\n thumbnail_shortcut_booleanvar, onvalue=True, offvalue=False)\n", (70535, 70653), False, 'from tkinter import ttk\n'), ((71289, 71341), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (71298, 71341), False, 'from tkinter import ttk\n'), ((71383, 71407), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (71400, 71407), False, 'import tkinter\n'), ((71424, 71490), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""썸네일 크기 - 가로 """', 'justify': 'tkinter.LEFT'}), "(master=frame, text='썸네일 크기 - 가로 ', justify=tkinter.LEFT)\n", (71433, 71490), False, 'from tkinter import ttk\n'), ((71696, 71853), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.thumbnail_width_stringvar', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n thumbnail_width_stringvar, state='readonly', width=5, font=lybconstant.\n LYB_FONT)\n", (71708, 71853), False, 'from tkinter import ttk\n'), ((72010, 72034), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (72027, 72034), False, 'import tkinter\n'), ((72051, 72108), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '""" 세로"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text=' 세로', justify=tkinter.LEFT)\n", (72060, 72108), False, 'from tkinter import ttk\n'), ((72315, 72473), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.thumbnail_height_stringvar', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n thumbnail_height_stringvar, state='readonly', width=5, font=lybconstant\n .LYB_FONT)\n", (72327, 72473), False, 'from tkinter import ttk\n'), ((73889, 73941), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (73898, 73941), False, 'from tkinter import ttk\n'), ((73987, 74012), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (74005, 74012), False, 'import tkinter\n'), ((74035, 74199), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""마우스 드래그 실행할 때 강제 커서 치우기(체크 해제시 오동작할 수 있음)"""', 'variable': 'self.mouse_pointer_away_booleanvar', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text=\n '마우스 드래그 실행할 때 강제 커서 치우기(체크 해제시 오동작할 수 있음)', variable=self.\n mouse_pointer_away_booleanvar, onvalue=True, offvalue=False)\n", (74050, 74199), False, 'from tkinter import ttk\n'), ((74818, 74870), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (74827, 74870), False, 'from tkinter import ttk\n'), ((74915, 74940), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (74933, 74940), False, 'import tkinter\n'), ((74963, 75110), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""녹스 최신 버전 사용 중(앱 종료 기능이 구버전과 다름)"""', 'variable': 'self.close_app_nox_new_booleanvar', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='녹스 최신 버전 사용 중(앱 종료 기능이 구버전과 다름)',\n variable=self.close_app_nox_new_booleanvar, onvalue=True, offvalue=False)\n", (74978, 75110), False, 'from tkinter import ttk\n'), ((75701, 75753), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.configure_frame'], {'relief': 'frame_relief'}), '(self.configure_frame, relief=frame_relief)\n', (75710, 75753), False, 'from tkinter import ttk\n'), ((75794, 75818), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (75811, 75818), False, 'import tkinter\n'), ((75836, 75915), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""화면 프리징 감지 제한 시간(0: 사용 안함)"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='화면 프리징 감지 제한 시간(0: 사용 안함)', justify=tkinter.LEFT)\n", (75845, 75915), False, 'from tkinter import ttk\n'), ((76116, 76272), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.freezing_limit_stringvar', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n freezing_limit_stringvar, state='readonly', width=5, font=lybconstant.\n LYB_FONT)\n", (76128, 76272), False, 'from tkinter import ttk\n'), ((76403, 76458), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초"""', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초', justify=tkinter.LEFT)\n", (76412, 76458), False, 'from tkinter import ttk\n'), ((77164, 77229), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', ([], {'master': 'self.common_top_frame', 'text': '"""앱플레이어 재시작 설정"""'}), "(master=self.common_top_frame, text='앱플레이어 재시작 설정')\n", (77178, 77229), False, 'from tkinter import ttk\n'), ((77247, 77269), 'tkinter.ttk.Frame', 'ttk.Frame', (['label_frame'], {}), '(label_frame)\n', (77256, 77269), False, 'from tkinter import ttk\n'), ((77308, 77333), 'tkinter.BooleanVar', 'tkinter.BooleanVar', (['frame'], {}), '(frame)\n', (77326, 77333), False, 'import tkinter\n'), ((77848, 77987), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""앱 플레이어(녹스, 모모) 재시작 기능을 사용합니다"""', 'variable': 'self.use_restart_app_player', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='앱 플레이어(녹스, 모모) 재시작 기능을 사용합니다', variable\n =self.use_restart_app_player, onvalue=True, offvalue=False)\n", (77863, 77987), False, 'from tkinter import ttk\n'), ((78152, 78174), 'tkinter.ttk.Frame', 'ttk.Frame', (['label_frame'], {}), '(label_frame)\n', (78161, 78174), False, 'from tkinter import ttk\n'), ((78191, 78282), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""앱 플레이어 재시작 주기:"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='앱 플레이어 재시작 주기:', anchor=tkinter.W, justify=\n tkinter.LEFT)\n", (78200, 78282), False, 'from tkinter import ttk\n'), ((78421, 78445), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (78438, 78445), False, 'import tkinter\n'), ((79176, 79337), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.use_restart_app_player_period', 'state': '"""readonly"""', 'width': '(6)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n use_restart_app_player_period, state='readonly', width=6, font=\n lybconstant.LYB_FONT)\n", (79188, 79337), False, 'from tkinter import ttk\n'), ((79467, 79540), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초', anchor=tkinter.W, justify=tkinter.LEFT)\n", (79476, 79540), False, 'from tkinter import ttk\n'), ((79692, 79714), 'tkinter.ttk.Frame', 'ttk.Frame', (['label_frame'], {}), '(label_frame)\n', (79701, 79714), False, 'from tkinter import ttk\n'), ((79731, 79829), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""앱 플레이어 종료 후 재시작 대기 시간:"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='앱 플레이어 종료 후 재시작 대기 시간:', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (79740, 79829), False, 'from tkinter import ttk\n'), ((79964, 79988), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (79981, 79988), False, 'import tkinter\n'), ((80596, 80752), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.restart_app_player_delay', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n restart_app_player_delay, state='readonly', width=5, font=lybconstant.\n LYB_FONT)\n", (80608, 80752), False, 'from tkinter import ttk\n'), ((80882, 80955), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초', anchor=tkinter.W, justify=tkinter.LEFT)\n", (80891, 80955), False, 'from tkinter import ttk\n'), ((81107, 81129), 'tkinter.ttk.Frame', 'ttk.Frame', (['label_frame'], {}), '(label_frame)\n', (81116, 81129), False, 'from tkinter import ttk\n'), ((81146, 81244), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""앱 플레이어 종료 후 재시작 시도 횟수:"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='앱 플레이어 종료 후 재시작 시도 횟수:', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (81155, 81244), False, 'from tkinter import ttk\n'), ((81379, 81403), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (81396, 81403), False, 'import tkinter\n'), ((82005, 82161), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': 'combobox_list', 'textvariable': 'self.restart_app_player_retry', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=combobox_list, textvariable=self.\n restart_app_player_retry, state='readonly', width=5, font=lybconstant.\n LYB_FONT)\n", (82017, 82161), False, 'from tkinter import ttk\n'), ((82291, 82364), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""회"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='회', anchor=tkinter.W, justify=tkinter.LEFT)\n", (82300, 82364), False, 'from tkinter import ttk\n'), ((82645, 82717), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', ([], {'master': "self.option_dic['common_config_tab']", 'text': '"""텔레그램"""'}), "(master=self.option_dic['common_config_tab'], text='텔레그램')\n", (82659, 82717), False, 'from tkinter import ttk\n'), ((82769, 82799), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.telegram_frame'], {}), '(self.telegram_frame)\n', (82778, 82799), False, 'from tkinter import ttk\n'), ((83067, 83087), 'tkinter.BooleanVar', 'tkinter.BooleanVar', ([], {}), '()\n', (83085, 83087), False, 'import tkinter\n'), ((83424, 83552), 'tkinter.ttk.Checkbutton', 'ttk.Checkbutton', ([], {'master': 'frame', 'text': '"""프로그램 오류 발생 알림"""', 'variable': 'self.recovery_telegram_checkbox', 'onvalue': '(True)', 'offvalue': '(False)'}), "(master=frame, text='프로그램 오류 발생 알림', variable=self.\n recovery_telegram_checkbox, onvalue=True, offvalue=False)\n", (83439, 83552), False, 'from tkinter import ttk\n'), ((83717, 83747), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.telegram_frame'], {}), '(self.telegram_frame)\n', (83726, 83747), False, 'from tkinter import ttk\n'), ((83764, 83858), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""텔레그램 메세지 수신 확인 주기:"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='텔레그램 메세지 수신 확인 주기:', anchor=tkinter.W,\n justify=tkinter.LEFT)\n", (83773, 83858), False, 'from tkinter import ttk\n'), ((83990, 84014), 'tkinter.StringVar', 'tkinter.StringVar', (['frame'], {}), '(frame)\n', (84007, 84014), False, 'import tkinter\n'), ((84034, 84192), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame', 'values': '[5, 10, 30, 99999]', 'textvariable': 'self.period_telegram_entry', 'state': '"""readonly"""', 'width': '(5)', 'font': 'lybconstant.LYB_FONT'}), "(master=frame, values=[5, 10, 30, 99999], textvariable=self.\n period_telegram_entry, state='readonly', width=5, font=lybconstant.LYB_FONT\n )\n", (84046, 84192), False, 'from tkinter import ttk\n'), ((84322, 84395), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame', 'text': '"""초"""', 'anchor': 'tkinter.W', 'justify': 'tkinter.LEFT'}), "(master=frame, text='초', anchor=tkinter.W, justify=tkinter.LEFT)\n", (84331, 84395), False, 'from tkinter import ttk\n'), ((85029, 85103), 'tkinter.ttk.LabelFrame', 'ttk.LabelFrame', ([], {'master': "self.option_dic['common_config_tab']", 'text': '"""비정상 복구"""'}), "(master=self.option_dic['common_config_tab'], text='비정상 복구')\n", (85043, 85103), False, 'from tkinter import ttk\n'), ((86244, 86431), 'likeyoubot_rohan.LYBRohanTab', 'LYBROHAN.LYBRohanTab', (['self.tab_frame[game_index + 1]', 'self.configure', 'self.game_options[self.games[game_index]]', 'self.game_frame[self.games[game_index]]', 'self.width', 'self.height'], {}), '(self.tab_frame[game_index + 1], self.configure, self.\n game_options[self.games[game_index]], self.game_frame[self.games[\n game_index]], self.width, self.height)\n', (86264, 86431), True, 'import likeyoubot_rohan as LYBROHAN\n'), ((106525, 106536), 'time.time', 'time.time', ([], {}), '()\n', (106534, 106536), False, 'import time\n'), ((107322, 107333), 'time.time', 'time.time', ([], {}), '()\n', (107331, 107333), False, 'import time\n'), ((107997, 108008), 'time.time', 'time.time', ([], {}), '()\n', (108006, 108008), False, 'import time\n'), ((108524, 108535), 'time.time', 'time.time', ([], {}), '()\n', (108533, 108535), False, 'import time\n'), ((133194, 133204), 'subprocess.Popen', 'Popen', (['cmd'], {}), '(cmd)\n', (133199, 133204), False, 'from subprocess import Popen, PIPE\n'), ((149397, 149447), 'webbrowser.open_new', 'webbrowser.open_new', (['"""https://numaking.cafe24.com"""'], {}), "('https://numaking.cafe24.com')\n", (149416, 149447), False, 'import webbrowser\n'), ((149567, 149596), 'webbrowser.open_new', 'webbrowser.open_new', (['docs_url'], {}), '(docs_url)\n', (149586, 149596), False, 'import webbrowser\n'), ((149732, 149762), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (149751, 149762), False, 'import webbrowser\n'), ((149912, 149942), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (149931, 149942), False, 'import webbrowser\n'), ((150082, 150112), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (150101, 150112), False, 'import webbrowser\n'), ((150252, 150282), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (150271, 150282), False, 'import webbrowser\n'), ((150422, 150452), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (150441, 150452), False, 'import webbrowser\n'), ((150592, 150622), 'webbrowser.open_new', 'webbrowser.open_new', (['kakao_url'], {}), '(kakao_url)\n', (150611, 150622), False, 'import webbrowser\n'), ((150673, 150741), 'webbrowser.open_new', 'webbrowser.open_new', (['"""https://bitbucket.org/dogfooter/dogfooter/src"""'], {}), "('https://bitbucket.org/dogfooter/dogfooter/src')\n", (150692, 150741), False, 'import webbrowser\n'), ((174922, 174933), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (174931, 174933), False, 'from tkinter import ttk\n'), ((175013, 175064), 'tkinter.ttk.Frame', 'ttk.Frame', ([], {'master': "self.option_dic['monitor_master']"}), "(master=self.option_dic['monitor_master'])\n", (175022, 175064), False, 'from tkinter import ttk\n'), ((175841, 175871), 'tkinter.StringVar', 'tkinter.StringVar', (['frame_label'], {}), '(frame_label)\n', (175858, 175871), False, 'import tkinter\n'), ((176244, 176431), 'tkinter.ttk.Combobox', 'ttk.Combobox', ([], {'master': 'frame_label', 'values': 'combo_list', 'textvariable': 'self.wlist_stringvar_dic[title]', 'state': '"""readonly"""', 'height': '(10)', 'width': '(24)', 'font': 'monitor_font', 'justify': 'tkinter.LEFT'}), "(master=frame_label, values=combo_list, textvariable=self.\n wlist_stringvar_dic[title], state='readonly', height=10, width=24, font\n =monitor_font, justify=tkinter.LEFT)\n", (176256, 176431), False, 'from tkinter import ttk\n'), ((176757, 176808), 'tkinter.ttk.Label', 'ttk.Label', ([], {'master': 'frame_label', 'text': 'status', 'width': '(4)'}), '(master=frame_label, text=status, width=4)\n', (176766, 176808), False, 'from tkinter import ttk\n'), ((189806, 189832), 'belfrywidgets.ToolTip', 'ToolTip', (['widget_name', 'text'], {}), '(widget_name, text)\n', (189813, 189832), False, 'from belfrywidgets import ToolTip\n'), ((192579, 192651), 'likeyoubot_rest.LYBRest', 'likeyoubot_rest.LYBRest', (['self.configure.root_url', 'user_id', 'user_password'], {}), '(self.configure.root_url, user_id, user_password)\n', (192602, 192651), False, 'import likeyoubot_rest\n'), ((192888, 192904), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {}), '()\n', (192902, 192904), False, 'from PIL import Image, ImageTk, ImageGrab\n'), ((894, 914), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (909, 914), False, 'import os\n'), ((3886, 4045), 'tkinter.ttk.Frame', 'ttk.Frame', ([], {'master': 'self.note', 'width': '(self.width * 0.2 + 10 * lybconstant.LYB_PADDING)', 'height': '(self.height - lybconstant.LYB_PADDING)', 'relief': 'frame_relief'}), '(master=self.note, width=self.width * 0.2 + 10 * lybconstant.\n LYB_PADDING, height=self.height - lybconstant.LYB_PADDING, relief=\n frame_relief)\n', (3895, 4045), False, 'from tkinter import ttk\n'), ((101451, 101462), 'time.time', 'time.time', ([], {}), '()\n', (101460, 101462), False, 'import time\n'), ((102295, 102306), 'time.time', 'time.time', ([], {}), '()\n', (102304, 102306), False, 'import time\n'), ((103024, 103062), 'copy.deepcopy', 'copy.deepcopy', (['game_object.player_type'], {}), '(game_object.player_type)\n', (103037, 103062), False, 'import copy\n'), ((103108, 103149), 'copy.deepcopy', 'copy.deepcopy', (['game_object.multi_hwnd_dic'], {}), '(game_object.multi_hwnd_dic)\n', (103121, 103149), False, 'import copy\n'), ((103193, 103232), 'copy.deepcopy', 'copy.deepcopy', (['game_object.window_title'], {}), '(game_object.window_title)\n', (103206, 103232), False, 'import copy\n'), ((120997, 121010), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (121008, 121010), False, 'import queue\n'), ((121064, 121077), 'queue.Queue', 'queue.Queue', ([], {}), '()\n', (121075, 121077), False, 'import queue\n'), ((126398, 126672), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""start"""', '[self.start_flag, each_hwnd, started_window_name, started_game_name,\n started_option, started_config, started_window_config,\n side_window_handle, parent_window_handle, self.multi_hwnds, self.\n game_tab_dic[started_game_name]]'], {}), "('start', [self.start_flag, each_hwnd,\n started_window_name, started_game_name, started_option, started_config,\n started_window_config, side_window_handle, parent_window_handle, self.\n multi_hwnds, self.game_tab_dic[started_game_name]])\n", (126427, 126672), False, 'import likeyoubot_message\n'), ((130078, 130128), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""longPolling"""', 'self'], {}), "('longPolling', self)\n", (130107, 130128), False, 'import likeyoubot_message\n'), ((130313, 130369), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""GetWindowLocation"""', 'self'], {}), "('GetWindowLocation', self)\n", (130342, 130369), False, 'import likeyoubot_message\n'), ((130836, 130905), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""search"""', 'self.configure.window_config'], {}), "('search', self.configure.window_config)\n", (130865, 130905), False, 'import likeyoubot_message\n'), ((131767, 131821), 'requests.get', 'requests.get', (['lybcfg_information'], {'allow_redirects': '(True)'}), '(lybcfg_information, allow_redirects=True)\n', (131779, 131821), False, 'import requests\n'), ((132696, 132721), 'os.path.isfile', 'os.path.isfile', (['file_path'], {}), '(file_path)\n', (132710, 132721), False, 'import os\n'), ((132953, 132963), 'subprocess.Popen', 'Popen', (['cmd'], {}), '(cmd)\n', (132958, 132963), False, 'from subprocess import Popen, PIPE\n'), ((192732, 192791), 'random.choices', 'random.choices', (['(string.ascii_uppercase + string.digits)'], {'k': '(8)'}), '(string.ascii_uppercase + string.digits, k=8)\n', (192746, 192791), False, 'import random\n'), ((193301, 193324), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (193322, 193324), False, 'import datetime\n'), ((194152, 194222), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""watchout"""', "[self, 'hide', window_name]"], {}), "('watchout', [self, 'hide', window_name])\n", (194181, 194222), False, 'import likeyoubot_message\n'), ((194710, 194780), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""watchout"""', "[self, 'show', window_name]"], {}), "('watchout', [self, 'show', window_name])\n", (194739, 194780), False, 'import likeyoubot_message\n'), ((195270, 195333), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""thumbnail"""', '[self, window_name]'], {}), "('thumbnail', [self, window_name])\n", (195299, 195333), False, 'import likeyoubot_message\n'), ((85379, 85511), 'tkinter.ttk.Frame', 'ttk.Frame', (['self.note'], {'width': '(self.width - lybconstant.LYB_PADDING)', 'height': '(self.height - lybconstant.LYB_PADDING)', 'relief': '"""groove"""'}), "(self.note, width=self.width - lybconstant.LYB_PADDING, height=\n self.height - lybconstant.LYB_PADDING, relief='groove')\n", (85388, 85511), False, 'from tkinter import ttk\n'), ((100300, 100325), 'datetime.datetime.today', 'datetime.datetime.today', ([], {}), '()\n', (100323, 100325), False, 'import datetime\n'), ((101611, 101640), 'os.path.isfile', 'os.path.isfile', (['ads_file_path'], {}), '(ads_file_path)\n', (101625, 101640), False, 'import os\n'), ((102112, 102122), 'subprocess.Popen', 'Popen', (['cmd'], {}), '(cmd)\n', (102117, 102122), False, 'from subprocess import Popen, PIPE\n'), ((102219, 102230), 'time.time', 'time.time', ([], {}), '()\n', (102228, 102230), False, 'import time\n'), ((103984, 103995), 'time.time', 'time.time', ([], {}), '()\n', (103993, 103995), False, 'import time\n'), ((105613, 105624), 'time.time', 'time.time', ([], {}), '()\n', (105622, 105624), False, 'import time\n'), ((106332, 106390), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""start_app_player"""', 'message'], {}), "('start_app_player', message)\n", (106361, 106390), False, 'import likeyoubot_message\n'), ((106746, 106757), 'time.time', 'time.time', ([], {}), '()\n', (106755, 106757), False, 'import time\n'), ((107746, 107757), 'time.time', 'time.time', ([], {}), '()\n', (107755, 107757), False, 'import time\n'), ((108428, 108439), 'time.time', 'time.time', ([], {}), '()\n', (108437, 108439), False, 'import time\n'), ((115340, 115370), 'copy.deepcopy', 'copy.deepcopy', (['message.message'], {}), '(message.message)\n', (115353, 115370), False, 'import copy\n'), ((122201, 122244), 'copy.deepcopy', 'copy.deepcopy', (['self.configure.common_config'], {}), '(self.configure.common_config)\n', (122214, 122244), False, 'import copy\n'), ((124474, 124511), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (124485, 124511), False, 'import pickle\n'), ((128089, 128126), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (128100, 128126), False, 'import pickle\n'), ((128525, 128569), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""pause"""', 'None'], {}), "('pause', None)\n", (128554, 128569), False, 'import likeyoubot_message\n'), ((128920, 128962), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""end"""', 'None'], {}), "('end', None)\n", (128949, 128962), False, 'import likeyoubot_message\n'), ((129298, 129342), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""pause"""', 'None'], {}), "('pause', None)\n", (129327, 129342), False, 'import likeyoubot_message\n'), ((129459, 129501), 'likeyoubot_message.LYBMessage', 'likeyoubot_message.LYBMessage', (['"""end"""', 'None'], {}), "('end', None)\n", (129488, 129501), False, 'import likeyoubot_message\n'), ((130581, 130618), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (130592, 130618), False, 'import pickle\n'), ((134347, 134384), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (134358, 134384), False, 'import pickle\n'), ((139032, 139069), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (139043, 139069), False, 'import pickle\n'), ((142426, 142480), 'copy.deepcopy', 'copy.deepcopy', (['self.configure.common_config[game_name]'], {}), '(self.configure.common_config[game_name])\n', (142439, 142480), False, 'import copy\n'), ((152768, 152805), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (152779, 152805), False, 'import pickle\n'), ((153182, 153219), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (153193, 153219), False, 'import pickle\n'), ((153607, 153644), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (153618, 153644), False, 'import pickle\n'), ((154008, 154045), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (154019, 154045), False, 'import pickle\n'), ((155434, 155471), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (155445, 155471), False, 'import pickle\n'), ((160552, 160589), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (160563, 160589), False, 'import pickle\n'), ((192412, 192443), 'likeyoubot_license.LYBLicense', 'likeyoubot_license.LYBLicense', ([], {}), '()\n', (192441, 192443), False, 'import likeyoubot_license\n'), ((193216, 193241), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (193230, 193241), False, 'import os\n'), ((193259, 193281), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (193270, 193281), False, 'import os\n'), ((193897, 193934), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (193908, 193934), False, 'import pickle\n'), ((194455, 194492), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (194466, 194492), False, 'import pickle\n'), ((195015, 195052), 'pickle.dump', 'pickle.dump', (['self.configure', 'dat_file'], {}), '(self.configure, dat_file)\n', (195026, 195052), False, 'import pickle\n'), ((1277, 1299), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1297, 1299), False, 'import traceback\n'), ((100207, 100229), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (100227, 100229), False, 'import traceback\n'), ((101754, 101783), 'os.path.isfile', 'os.path.isfile', (['ads_file_path'], {}), '(ads_file_path)\n', (101768, 101783), False, 'import os\n'), ((115457, 115487), 'copy.deepcopy', 'copy.deepcopy', (['message.message'], {}), '(message.message)\n', (115470, 115487), False, 'import copy\n'), ((124558, 124580), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (124578, 124580), False, 'import traceback\n'), ((128173, 128195), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (128193, 128195), False, 'import traceback\n'), ((130665, 130687), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (130685, 130687), False, 'import traceback\n'), ((131230, 131252), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (131250, 131252), False, 'import traceback\n'), ((131450, 131472), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (131470, 131472), False, 'import traceback\n'), ((132309, 132331), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (132329, 132331), False, 'import traceback\n'), ((134431, 134453), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (134451, 134453), False, 'import traceback\n'), ((139116, 139138), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (139136, 139138), False, 'import traceback\n'), ((152852, 152874), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (152872, 152874), False, 'import traceback\n'), ((153266, 153288), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (153286, 153288), False, 'import traceback\n'), ((153691, 153713), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (153711, 153713), False, 'import traceback\n'), ((154092, 154114), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (154112, 154114), False, 'import traceback\n'), ((155518, 155540), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (155538, 155540), False, 'import traceback\n'), ((160636, 160658), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (160656, 160658), False, 'import traceback\n'), ((193617, 193639), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (193637, 193639), False, 'import traceback\n'), ((193981, 194003), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (194001, 194003), False, 'import traceback\n'), ((194539, 194561), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (194559, 194561), False, 'import traceback\n'), ((195099, 195121), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (195119, 195121), False, 'import traceback\n'), ((101907, 101938), 'likeyoubot_license.LYBLicense', 'likeyoubot_license.LYBLicense', ([], {}), '()\n', (101936, 101938), False, 'import likeyoubot_license\n'), ((109311, 109333), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (109331, 109333), False, 'import traceback\n'), ((115578, 115608), 'copy.deepcopy', 'copy.deepcopy', (['message.message'], {}), '(message.message)\n', (115591, 115608), False, 'import copy\n'), ((164558, 164569), 'time.time', 'time.time', ([], {}), '()\n', (164567, 164569), False, 'import time\n'), ((168829, 168851), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (168849, 168851), False, 'import traceback\n'), ((168928, 168950), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (168948, 168950), False, 'import traceback\n'), ((100004, 100026), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (100024, 100026), False, 'import traceback\n'), ((115697, 115727), 'copy.deepcopy', 'copy.deepcopy', (['message.message'], {}), '(message.message)\n', (115710, 115727), False, 'import copy\n'), ((164359, 164381), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (164379, 164381), False, 'import traceback\n'), ((172305, 172316), 'tkinter.ttk.Style', 'ttk.Style', ([], {}), '()\n', (172314, 172316), False, 'from tkinter import ttk\n'), ((165163, 165174), 'time.time', 'time.time', ([], {}), '()\n', (165172, 165174), False, 'import time\n'), ((105060, 105071), 'time.time', 'time.time', ([], {}), '()\n', (105069, 105071), False, 'import time\n'), ((173329, 173340), 'time.time', 'time.time', ([], {}), '()\n', (173338, 173340), False, 'import time\n'), ((173503, 173514), 'time.time', 'time.time', ([], {}), '()\n', (173512, 173514), False, 'import time\n')]
|
from django.db import models
from django.contrib.auth.models import User
from students.models import Class
class BaseAbstractPost(models.Model):
posted_on = models.DateTimeField(auto_now_add=True)
edited = models.BooleanField(default=False)
last_edited_on = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class AbstractPost(BaseAbstractPost):
author = models.ForeignKey(User, on_delete=models.CASCADE)
class Meta:
abstract = True
class News(AbstractPost):
title = models.CharField(max_length=100, blank=False)
content = models.TextField(max_length=10000, blank=False)
class_number = models.IntegerField(
default=8,
validators=[Class.CLASS_NUMBER_VALIDATORS],
choices=Class.CLASS_NUMBERS
)
class_letter = models.CharField(
max_length=1,
blank=True,
choices=Class.CLASS_LETTERS
)
def __str__(self):
return '{} ({})'.format(self.title, self.posted_on.date())
class Meta:
ordering = ['-last_edited_on']
verbose_name_plural = 'news'
class Comment(AbstractPost):
news = models.ForeignKey(News, related_name='comments', on_delete=models.CASCADE)
author_image = models.URLField(blank=True)
content = models.TextField(max_length=2048)
def __str__(self):
return '{} - {}'.format(self.author, self.news)
class Meta:
ordering = ['-posted_on']
|
[
"django.db.models.TextField",
"django.db.models.URLField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.BooleanField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((164, 203), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (184, 203), False, 'from django.db import models\n'), ((217, 251), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (236, 251), False, 'from django.db import models\n'), ((273, 308), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (293, 308), False, 'from django.db import models\n'), ((403, 452), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (420, 452), False, 'from django.db import models\n'), ((534, 579), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)', 'blank': '(False)'}), '(max_length=100, blank=False)\n', (550, 579), False, 'from django.db import models\n'), ((594, 641), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(10000)', 'blank': '(False)'}), '(max_length=10000, blank=False)\n', (610, 641), False, 'from django.db import models\n'), ((661, 768), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(8)', 'validators': '[Class.CLASS_NUMBER_VALIDATORS]', 'choices': 'Class.CLASS_NUMBERS'}), '(default=8, validators=[Class.CLASS_NUMBER_VALIDATORS],\n choices=Class.CLASS_NUMBERS)\n', (680, 768), False, 'from django.db import models\n'), ((814, 885), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(1)', 'blank': '(True)', 'choices': 'Class.CLASS_LETTERS'}), '(max_length=1, blank=True, choices=Class.CLASS_LETTERS)\n', (830, 885), False, 'from django.db import models\n'), ((1142, 1216), 'django.db.models.ForeignKey', 'models.ForeignKey', (['News'], {'related_name': '"""comments"""', 'on_delete': 'models.CASCADE'}), "(News, related_name='comments', on_delete=models.CASCADE)\n", (1159, 1216), False, 'from django.db import models\n'), ((1236, 1263), 'django.db.models.URLField', 'models.URLField', ([], {'blank': '(True)'}), '(blank=True)\n', (1251, 1263), False, 'from django.db import models\n'), ((1278, 1311), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(2048)'}), '(max_length=2048)\n', (1294, 1311), False, 'from django.db import models\n')]
|
"""@file numpy_float_array_as_tfrecord_writer.py
contains the NumpyFloatArrayAsTfrecordWriter class"""
import numpy as np
import tensorflow as tf
import tfwriter
class NumpyFloatArrayAsTfrecordWriter(tfwriter.TfWriter):
"""a TfWriter to write numpy float arrays"""
def _get_example(self, data):
"""write data to a file
Args:
data: the data to be written"""
shape_feature = tf.train.Feature(bytes_list=tf.train.BytesList(
value=[np.array(data.astype(np.int32).shape).tostring()]))
data_feature = tf.train.Feature(bytes_list=tf.train.BytesList(
value=[data.reshape([-1]).astype(np.float32).tostring()]))
# create the example proto
example = tf.train.Example(features=tf.train.Features(feature={
'shape': shape_feature,
'data': data_feature}))
return example
|
[
"tensorflow.train.Features"
] |
[((764, 837), 'tensorflow.train.Features', 'tf.train.Features', ([], {'feature': "{'shape': shape_feature, 'data': data_feature}"}), "(feature={'shape': shape_feature, 'data': data_feature})\n", (781, 837), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2015 moco_beta
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys, os
PY3 = sys.version_info[0] == 3
class NodeType:
SYS_DICT = "SYS_DICT"
USER_DICT = "USER_DICT"
UNKNOWN = "UNKNOWN"
class Node(object):
"""
Node class
"""
__slots__ = [
'pos', 'index', 'surface', 'left_id', 'right_id', 'cost',
'part_of_speech', 'infl_type', 'infl_form',
'base_form', 'reading', 'phonetic', 'node_type',
'min_cost', 'back_pos', 'back_index'
]
def __init__(self, dict_entry, node_type=NodeType.SYS_DICT):
self.pos = 0
self.index = 0
self.min_cost = 2147483647 # int(pow(2,31)-1)
self.back_pos = -1
self.back_index = -1
self.surface, self.left_id, self.right_id, self.cost, self.part_of_speech, self.infl_type, self.infl_form, self.base_form, self.reading, self.phonetic = dict_entry
self.node_type = node_type
def __str__(self):
return "(%s,%s,%s,%d,%s,%s,%s,%s,%s,%s) [back_pos=%d,back_index=%d]" % \
(self.surface, self.left_id, self.right_id, self.cost, self.part_of_speech,
self.infl_type, self.infl_form, self.base_form, self.reading, self.phonetic,
self.back_pos, self.back_index)
def node_label(self):
return self.surface
class SurfaceNode(object):
"""
Node class with surface form only.
"""
__slots__ = ['pos', 'index', 'num', 'surface', 'left_id', 'right_id', 'cost', 'node_type', 'min_cost', 'back_pos', 'back_index']
def __init__(self, dict_entry, node_type=NodeType.SYS_DICT):
self.pos = 0
self.index = 0
self.min_cost = 2147483647 # int(pow(2,31)-1)
self.back_pos = -1
self.back_index = -1
self.num, self.surface, self.left_id, self.right_id, self.cost = dict_entry
self.node_type = node_type
def node_label(self):
return self.surface
class BOS(object):
"""
BOS node
"""
def __init__(self):
self.pos = 0
self.index = 0
self.right_id = 0
self.cost = 0
self.min_cost = 0
self.back_pos = -1
self.back_index = -1
def __str__(self):
return '__BOS__'
def node_label(self):
return 'BOS'
class EOS(object):
"""
EOS node
"""
def __init__(self, pos):
self.min_cost = 2147483647 # int(pow(2,31)-1)
self.pos = pos
self.cost = 0
self.left_id = 0
def __str__(self):
return '__EOS__' + ' [back_pos=%d]' % self.back_pos
def node_label(self):
return 'EOS'
class Lattice:
def __init__(self, size, dic):
self.snodes = [[BOS()]] + [[] for i in range(0, size + 1)]
self.enodes = [[], [BOS()]] + [[] for i in range(0, size + 1)]
self.conn_costs = [[]]
self.p = 1
self.dic = dic
def add(self, node):
min_cost, best_node, node_left_id = node.min_cost - node.cost, None, node.left_id
dic = self.dic
for enode in self.enodes[self.p]:
cost = enode.min_cost + dic.get_trans_cost(enode.right_id, node_left_id)
if cost < min_cost:
min_cost, best_node = cost, enode
node.min_cost = min_cost + node.cost
node.back_index = best_node.index
node.back_pos = best_node.pos
node.pos = self.p
node.index = len(self.snodes[self.p])
self.snodes[self.p].append(node)
node_len = len(node.surface) if hasattr(node, 'surface') else 1
self.enodes[self.p + node_len].append(node)
def forward(self):
old_p = self.p
self.p += 1
while not self.enodes[self.p]:
self.p += 1
return self.p - old_p
def end(self):
eos = EOS(self.p)
self.add(eos)
# truncate snodes
self.snodes = self.snodes[:self.p+1]
def backward(self):
assert isinstance(self.snodes[len(self.snodes)-1][0], EOS)
path = []
pos = len(self.snodes) - 1
index = 0
while pos >= 0:
node = self.snodes[pos][index]
path.append(node)
index = node.back_index
pos = node.back_pos
path.reverse()
return path
# generate Graphviz dot file
def generate_dotfile(self, filename='lattice.gv'):
def is_unknown(node):
return hasattr(node, 'node_type') and node.node_type == NodeType.UNKNOWN
# traverse lattice and make nodes and edges
node_ids = []
edges = []
path = self.backward()
for pos in range(0, len(self.snodes) - 1):
for i in range(0, len(self.snodes[pos])):
node1 = self.snodes[pos][i]
if is_unknown(node1) and node1 not in path:
continue
node1_id = (pos, i)
if node1_id not in node_ids:
node_ids.append(node1_id)
node_len = len(node1.surface) if hasattr(node1, 'surface') else 1
for j in range(0, len(self.snodes[pos + node_len])):
node2 = self.snodes[pos + node_len][j]
if is_unknown(node2) and node2 not in path:
continue
node2_id = (pos+node_len, j)
if node2_id not in node_ids:
node_ids.append(node1_id)
edges.append((node1_id, node2_id))
# output dot file
with self.__open_file(filename, mode='w', encoding='utf-8') as f:
f.write(u'digraph G {\n')
f.write(u' rankdir=LR;\n')
f.write(u' ranksep=2.0;\n')
for node_id in node_ids:
(pos, idx) = node_id
node = self.snodes[pos][idx]
id_str = '%d.%d' % (pos, idx)
label = '%s\\n%s' % (node.node_label(), str(node.cost))
shape = 'ellipse' if isinstance(node, BOS) or isinstance(node, EOS) else 'box'
color = 'lightblue' if isinstance(node, BOS) or isinstance(node, EOS) or node in path else 'lightgray'
font = 'MS UI Gothic' if os.name == 'nt' else ''
f.write(u' %s [label="%s",shape=%s,style=filled,fillcolor=%s,fontname="%s"];\n' % (id_str, label, shape, color, font))
for edge in edges:
((pos1, idx1), (pos2, idx2)) = edge
node1 = self.snodes[pos1][idx1]
node2 = self.snodes[pos2][idx2]
id_str1 = '%d.%d' % (pos1, idx1)
id_str2 = '%d.%d' % (pos2, idx2)
label = str(self.dic.get_trans_cost(node1.right_id, node2.left_id))
(color, style) = ('blue', 'bold') if node1 in path and node2 in path else ('black', 'solid')
f.write(u' %s -> %s [label="%s",color=%s,style=%s,fontcolor=red];\n' % (id_str1, id_str2, label, color, style))
f.write('}\n')
def __open_file(self, filename, mode, encoding):
if PY3:
return open(filename, mode=mode, encoding=encoding)
else:
import codecs
return codecs.open(filename, mode, encoding)
def __str__(self):
return '\n'.join(','.join(str(node) for node in nodes) for nodes in self.snodes)
|
[
"codecs.open"
] |
[((7687, 7724), 'codecs.open', 'codecs.open', (['filename', 'mode', 'encoding'], {}), '(filename, mode, encoding)\n', (7698, 7724), False, 'import codecs\n')]
|
# Test the MQTT module
# (c) Copyright <NAME>, Jul 2020
#
# VERSION 1.1
import sys,time
from miot import mqtt
def on_error( broker, error ):
try:
err=str(error['id'])
print( '# ERROR ('+err+'): '+error['message']+", "+broker.error( err ) )
except Exception as e:
print(e)
def on_connect( broker, msg ):
print( "# Connect", str(msg ) )
print( "- Broker=", broker.hostname )
# Subscribe to topic
result,mid = broker.subscribe( '#' )
print( "- Subscribed with mid="+str(mid) )
# Send message
broker.publish( "miot/event", "test-mqtt.py connected" )
def on_disconnect( broker, msg ):
print( "# Disconnect", broker.hostname, str(msg ) )
def on_publish( broker, msg ):
print( "# Publish", broker.hostname, str(msg ) )
def on_subscribe( broker, msg ):
print( "# Subscribe", broker.hostname, str(msg ) )
def on_unsubscribe( broker, msg ):
print( "# Unsubscribe", broker.hostname, str(msg ) )
def on_message( broker, msg ):
print( "# Message", broker.hostname, str(msg ) )
pass
def main():
#try:
print( "* Create broker" )
broker = mqtt.broker()
#except Exception as e:
# print( e )
# sys.exit()
print( "* Setup authentication" )
broker.authenticate( 'RunForTheHills', 'Another<PASSWORD>' )
# Add event handlers
print( "* Setup event handlers" )
broker.on( 'error', on_error )
broker.on( 'connect', on_connect )
broker.on( 'disconnect', on_disconnect )
broker.on( 'publish', on_publish )
broker.on( 'subscribe', on_subscribe )
broker.on( 'unsubscribe', on_unsubscribe )
broker.on( 'message', on_message )
# Connect to broker
print( "* Connecting to broker" )
broker.connect( '192.168.1.228' )
# AYNC Listener
#broker.start() # ASYNC
# while running:
# do something
# broker.stop()
# SYNC Blocking
broker.wait() # SYNC (Blocking)
# MANUAL Dispatch
# while True:.
# broker.dispatch()
#while True:
# THIS IS BAD: SEE HERE
# https://blog.miguelgrinberg.com/post/how-to-make-python-wait
# time.sleep(10)
if __name__=='__main__':
try:
main()
except KeyboardInterrupt:
sys.exit()
|
[
"miot.mqtt.broker",
"sys.exit"
] |
[((1138, 1151), 'miot.mqtt.broker', 'mqtt.broker', ([], {}), '()\n', (1149, 1151), False, 'from miot import mqtt\n'), ((2255, 2265), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2263, 2265), False, 'import sys, time\n')]
|
import numpy as np
import os
import pandas as pd
from ..utils import (drop_tseconds_volume, read_ndata,
write_ndata,compute_FD,generate_mask,interpolate_masked_data)
from nipype.interfaces.base import (traits, TraitedSpec, BaseInterfaceInputSpec, File,
SimpleInterface )
from nipype import logging
from nipype.utils.filemanip import fname_presuffix
class _removeTRInputSpec(BaseInterfaceInputSpec):
bold_file = File(exists=True,mandatory=True, desc=" either bold or nifti ")
mask_file = File(exists=False,mandatory=False, desc ="required for nifti")
time_todrop = traits.Float(exists=True,mandatory=True, desc="time in seconds to drop")
TR = traits.Float(exists=True,mandatory=True, desc="repetition time in TR")
fmriprep_conf = File(exists=True,mandatory=False,desc="confound selected from fmriprep confound matrix")
class _removeTROutputSpec(TraitedSpec):
fmrip_confdropTR = File(exists=True, manadatory=True,
desc="fmriprep confound after removing TRs,")
bold_file_TR = File(exists=True,mandatory=True, desc=" either bold or nifti modified")
class removeTR(SimpleInterface):
r"""
testing and documentation open to me
"""
input_spec = _removeTRInputSpec
output_spec = _removeTROutputSpec
def _run_interface(self, runtime):
# get the nifti or cifti
data_matrix = read_ndata(datafile=self.inputs.bold_file,
maskfile=self.inputs.mask_file)
fmriprepx_conf = pd.read_csv(self.inputs.fmriprep_conf,header=None)
data_matrix_TR,fmriprep_confTR = drop_tseconds_volume (
data_matrix=data_matrix,confound=fmriprepx_conf,
delets=self.inputs.time_todrop,
TR=self.inputs.TR )
#write the output out
self._results['bold_file_TR'] = fname_presuffix(
self.inputs.bold_file,
newpath=os.getcwd(),
use_ext=True)
self._results['fmrip_confdropTR'] = fname_presuffix(
self.inputs.bold_file,
suffix='fmriprep_dropTR.txt', newpath=os.getcwd(),
use_ext=False)
write_ndata(data_matrix=data_matrix_TR,template=self.inputs.bold_file,
mask=self.inputs.mask_file, filename=self._results['bold_file_TR'],
tr=self.inputs.TR)
fmriprep_confTR.to_csv(self._results['fmrip_confdropTR'],index=False,header=False)
return runtime
class _censorscrubInputSpec(BaseInterfaceInputSpec):
bold_file = File(exists=True,mandatory=True, desc=" raw bold or nifti real")
in_file =File(exists=True,mandatory=True, desc=" bold or nifti")
fd_thresh = traits.Float(exists=True,mandatory=True, desc ="fd_threshold")
mask_file = File(exists=False,mandatory=False, desc ="required for nifti")
TR = traits.Float(exists=True,mandatory=True, desc="repetition time in TR")
custom_conf = traits.Either(
traits.Undefined, File,
desc="name of output file with field or true",exists=False,mandatory=False)
#custom_conf = File(exists=False,mandatory=False,desc=" custom confound")
fmriprep_conf= File(exists=True,mandatory=True,
desc=" confound selected from fmriprep confound matrix ")
head_radius = traits.Float(exists=False,mandatory=False, default_value=50,
desc="head radius in mm ")
filtertype = traits.Float(exists=False,mandatory=False)
time_todrop = traits.Float(exists=False,mandatory=False,default_value=0, desc="time in seconds to drop")
low_freq= traits.Float(exit=False,mandatory=False, desc=' low frequency band for nortch filterin breathe per min (bpm)')
high_freq= traits.Float(exit=False,mandatory=False, desc=' high frequency for nortch filter in breathe per min (bpm)')
class _censorscrubOutputSpec(TraitedSpec):
bold_censored = File(exists=True, manadatory=True,
desc=" fmriprep censored")
fmriprepconf_censored = File(exists=True,mandatory=True,
desc=" fmriprep_conf censored")
customconf_censored = File(exists=False,mandatory=False, desc="custom conf censored")
tmask = File(exists=True,mandatory=True,desc="temporal mask")
fd_timeseries = File(exists=True,mandatory=True,desc="fd timeseries")
class censorscrub(SimpleInterface):
r"""
generate temporal masking with volumes above fd threshold
.. testsetup::
>>> from tempfile import TemporaryDirectory
>>> tmpdir = TemporaryDirectory()
>>> os.chdir(tmpdir.name)
.. doctest::
>>> cscrub = censorscrub()
>>> cscrub.inputs.bold_file = cleanbold
>>> cscrub.inputs.in_file = datafile
>>> cscrub.inputs.TR = TR
>>> cscrub.inputs.fd_thresh = fd_thresh
>>> cscrub.inputs.fmriprep_conf = fmriprepconf
>>> cscrub.inputs.mask_file = mask
>>> cscrub.inputs.time_todrop = dummytime
>>> cscrub.run()
.. testcleanup::
>>> tmpdir.cleanup()
"""
input_spec = _censorscrubInputSpec
output_spec = _censorscrubOutputSpec
def _run_interface(self, runtime):
# get the raw confound matrix and compute
# conf_matrix = load_confound(datafile=self.inputs.bold_file)
# fd_timeseries = compute_FD(confound=conf_matrix[0],
# head_radius=self.inputs.head_radius)
from ..utils.confounds import (load_confound, load_motion)
conf_matrix = load_confound(datafile=self.inputs.bold_file)[0]
motion_conf = load_motion(conf_matrix.copy(),TR=self.inputs.TR,filtertype=self.inputs.filtertype,freqband=[self.inputs.low_freq,self.inputs.high_freq])
motion_df = pd.DataFrame(data=motion_conf.values,columns=["rot_x", "rot_y", "rot_z","trans_x", "trans_y","trans_z"])
fd_timeseries = compute_FD(confound=motion_df, head_radius=self.inputs.head_radius)
### read confound
dataxx = read_ndata(datafile=self.inputs.in_file, maskfile=self.inputs.mask_file)
fmriprepx_conf = pd.read_csv(self.inputs.fmriprep_conf,header=None)
if self.inputs.custom_conf:
customx_conf = pd.read_csv(self.inputs.custom_conf,header=None)
if self.inputs.time_todrop == 0:
# do censoring staright
tmask = generate_mask(fd_res=fd_timeseries,fd_thresh=self.inputs.fd_thresh)
if np.sum(tmask) > 0:
datax_censored = dataxx[:,tmask==0]
fmriprepx_censored = fmriprepx_conf.drop(fmriprepx_conf.index[np.where(tmask==1)])
if self.inputs.custom_conf:
customx_censored = customx_conf.drop(customx_conf.index[np.where(tmask==1)])
else:
datax_censored = dataxx
fmriprepx_censored = fmriprepx_conf
if self.inputs.custom_conf:
customx_censored = customx_conf
fd_timeseries2 = fd_timeseries
else:
num_vol = np.int(np.divide(self.inputs.time_todrop,self.inputs.TR))
fd_timeseries2 = fd_timeseries
fd_timeseries2 = fd_timeseries2[num_vol:]
tmask = generate_mask(fd_res=fd_timeseries2,fd_thresh=self.inputs.fd_thresh)
if np.sum(tmask) > 0:
datax_censored = dataxx[:,tmask==0]
fmriprepx_censored = fmriprepx_conf.drop(fmriprepx_conf.index[np.where(tmask==1)])
if self.inputs.custom_conf:
customx_censored = customx_conf.drop(customx_conf.index[np.where(tmask==1)])
else:
datax_censored = dataxx
fmriprepx_censored = fmriprepx_conf
if self.inputs.custom_conf:
customx_censored = customx_conf
### get the output
self._results['bold_censored'] = fname_presuffix(
self.inputs.in_file,
newpath=os.getcwd(),
use_ext=True)
self._results['fmriprepconf_censored'] = fname_presuffix(
self.inputs.in_file,
suffix='fmriprepconf_censored.csv', newpath=os.getcwd(),
use_ext=False)
self._results['customconf_censored'] = fname_presuffix(
self.inputs.in_file,
suffix='customconf_censored.txt', newpath=os.getcwd(),
use_ext=False)
self._results['tmask'] = fname_presuffix(
self.inputs.in_file,
suffix='temporalmask.tsv', newpath=os.getcwd(),
use_ext=False)
self._results['fd_timeseries'] = fname_presuffix(
self.inputs.in_file,
suffix='fd_timeseries.tsv', newpath=os.getcwd(),
use_ext=False)
write_ndata(data_matrix=datax_censored,template=self.inputs.in_file,
mask=self.inputs.mask_file, filename=self._results['bold_censored'],
tr=self.inputs.TR)
fmriprepx_censored.to_csv(self._results['fmriprepconf_censored'],index=False,header=False)
np.savetxt(self._results['tmask'],tmask,fmt="%d",delimiter=',')
np.savetxt(self._results['fd_timeseries'],fd_timeseries2,fmt="%1.4f",delimiter=',')
if self.inputs.custom_conf:
customx_censored.to_csv(self._results['customconf_censored'],index=False,header=False)
return runtime
## interpolation
class _interpolateInputSpec(BaseInterfaceInputSpec):
in_file = File(exists=True,mandatory=True, desc=" censored or clean bold")
bold_file = File(exists=True,mandatory=True, desc=" censored or clean bold")
tmask = File(exists=True,mandatory=True,desc="temporal mask")
mask_file = File(exists=False,mandatory=False, desc ="required for nifti")
TR = traits.Float(exists=True,mandatory=True, desc="repetition time in TR")
class _interpolateOutputSpec(TraitedSpec):
bold_interpolated = File(exists=True, manadatory=True,
desc=" fmriprep censored")
class interpolate(SimpleInterface):
r"""
interpolate data over the clean bold
.. testsetup::
>>> from tempfile import TemporaryDirectory
>>> tmpdir = TemporaryDirectory()
>>> os.chdir(tmpdir.name)
.. doctest::
>>> interpolatewf = interpolate()
>>> interpolatewf.inputs.in_file = datafile
>>> interpolatewf.inputs.bold_file = rawbold
>>> interpolatewf.inputs.TR = TR
>>> interpolatewf.inputs.tmask = temporalmask
>>> interpolatewf.inputs.mask_file = mask
>>> interpolatewf.run()
.. testcleanup::
>>> tmpdir.cleanup()
"""
input_spec = _interpolateInputSpec
output_spec = _interpolateOutputSpec
def _run_interface(self, runtime):
datax = read_ndata(datafile=self.inputs.in_file,
maskfile=self.inputs.mask_file)
tmask = np.loadtxt(self.inputs.tmask)
if datax.shape[1]!= len(tmask):
fulldata = np.zeros([datax.shape[0],len(tmask)])
fulldata[:,tmask==0]=datax
else:
fulldata = datax
recon_data = interpolate_masked_data(img_datax=fulldata, tmask=tmask,
TR=self.inputs.TR)
self._results['bold_interpolated'] = fname_presuffix(
self.inputs.in_file,
newpath=os.getcwd(),
use_ext=True)
write_ndata(data_matrix=recon_data,template=self.inputs.bold_file,
mask=self.inputs.mask_file,tr=self.inputs.TR,
filename=self._results['bold_interpolated'])
return runtime
|
[
"pandas.DataFrame",
"numpy.divide",
"numpy.sum",
"pandas.read_csv",
"nipype.interfaces.base.traits.Float",
"os.getcwd",
"numpy.savetxt",
"nipype.interfaces.base.File",
"numpy.where",
"numpy.loadtxt",
"nipype.interfaces.base.traits.Either"
] |
[((441, 505), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" either bold or nifti """'}), "(exists=True, mandatory=True, desc=' either bold or nifti ')\n", (445, 505), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((521, 583), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(False)', 'mandatory': '(False)', 'desc': '"""required for nifti"""'}), "(exists=False, mandatory=False, desc='required for nifti')\n", (525, 583), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((602, 675), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""time in seconds to drop"""'}), "(exists=True, mandatory=True, desc='time in seconds to drop')\n", (614, 675), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((684, 755), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""repetition time in TR"""'}), "(exists=True, mandatory=True, desc='repetition time in TR')\n", (696, 755), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((775, 870), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(False)', 'desc': '"""confound selected from fmriprep confound matrix"""'}), "(exists=True, mandatory=False, desc=\n 'confound selected from fmriprep confound matrix')\n", (779, 870), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((929, 1014), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'manadatory': '(True)', 'desc': '"""fmriprep confound after removing TRs,"""'}), "(exists=True, manadatory=True, desc='fmriprep confound after removing TRs,'\n )\n", (933, 1014), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((1068, 1140), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" either bold or nifti modified"""'}), "(exists=True, mandatory=True, desc=' either bold or nifti modified')\n", (1072, 1140), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((2652, 2717), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" raw bold or nifti real"""'}), "(exists=True, mandatory=True, desc=' raw bold or nifti real')\n", (2656, 2717), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((2730, 2786), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" bold or nifti"""'}), "(exists=True, mandatory=True, desc=' bold or nifti')\n", (2734, 2786), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((2802, 2864), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""fd_threshold"""'}), "(exists=True, mandatory=True, desc='fd_threshold')\n", (2814, 2864), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((2881, 2943), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(False)', 'mandatory': '(False)', 'desc': '"""required for nifti"""'}), "(exists=False, mandatory=False, desc='required for nifti')\n", (2885, 2943), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((2953, 3024), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""repetition time in TR"""'}), "(exists=True, mandatory=True, desc='repetition time in TR')\n", (2965, 3024), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3042, 3162), 'nipype.interfaces.base.traits.Either', 'traits.Either', (['traits.Undefined', 'File'], {'desc': '"""name of output file with field or true"""', 'exists': '(False)', 'mandatory': '(False)'}), "(traits.Undefined, File, desc=\n 'name of output file with field or true', exists=False, mandatory=False)\n", (3055, 3162), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3270, 3366), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" confound selected from fmriprep confound matrix """'}), "(exists=True, mandatory=True, desc=\n ' confound selected from fmriprep confound matrix ')\n", (3274, 3366), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3406, 3500), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(False)', 'mandatory': '(False)', 'default_value': '(50)', 'desc': '"""head radius in mm """'}), "(exists=False, mandatory=False, default_value=50, desc=\n 'head radius in mm ')\n", (3418, 3500), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3539, 3582), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(False)', 'mandatory': '(False)'}), '(exists=False, mandatory=False)\n', (3551, 3582), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3600, 3697), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(False)', 'mandatory': '(False)', 'default_value': '(0)', 'desc': '"""time in seconds to drop"""'}), "(exists=False, mandatory=False, default_value=0, desc=\n 'time in seconds to drop')\n", (3612, 3697), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3705, 3821), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exit': '(False)', 'mandatory': '(False)', 'desc': '""" low frequency band for nortch filterin breathe per min (bpm)"""'}), "(exit=False, mandatory=False, desc=\n ' low frequency band for nortch filterin breathe per min (bpm)')\n", (3717, 3821), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((3831, 3944), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exit': '(False)', 'mandatory': '(False)', 'desc': '""" high frequency for nortch filter in breathe per min (bpm)"""'}), "(exit=False, mandatory=False, desc=\n ' high frequency for nortch filter in breathe per min (bpm)')\n", (3843, 3944), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((4005, 4066), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'manadatory': '(True)', 'desc': '""" fmriprep censored"""'}), "(exists=True, manadatory=True, desc=' fmriprep censored')\n", (4009, 4066), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((4133, 4198), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" fmriprep_conf censored"""'}), "(exists=True, mandatory=True, desc=' fmriprep_conf censored')\n", (4137, 4198), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((4261, 4325), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(False)', 'mandatory': '(False)', 'desc': '"""custom conf censored"""'}), "(exists=False, mandatory=False, desc='custom conf censored')\n", (4265, 4325), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((4337, 4392), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""temporal mask"""'}), "(exists=True, mandatory=True, desc='temporal mask')\n", (4341, 4392), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((4411, 4466), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""fd timeseries"""'}), "(exists=True, mandatory=True, desc='fd timeseries')\n", (4415, 4466), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((9657, 9722), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" censored or clean bold"""'}), "(exists=True, mandatory=True, desc=' censored or clean bold')\n", (9661, 9722), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((9738, 9803), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '""" censored or clean bold"""'}), "(exists=True, mandatory=True, desc=' censored or clean bold')\n", (9742, 9803), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((9815, 9870), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""temporal mask"""'}), "(exists=True, mandatory=True, desc='temporal mask')\n", (9819, 9870), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((9885, 9947), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(False)', 'mandatory': '(False)', 'desc': '"""required for nifti"""'}), "(exists=False, mandatory=False, desc='required for nifti')\n", (9889, 9947), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((9957, 10028), 'nipype.interfaces.base.traits.Float', 'traits.Float', ([], {'exists': '(True)', 'mandatory': '(True)', 'desc': '"""repetition time in TR"""'}), "(exists=True, mandatory=True, desc='repetition time in TR')\n", (9969, 10028), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((10098, 10159), 'nipype.interfaces.base.File', 'File', ([], {'exists': '(True)', 'manadatory': '(True)', 'desc': '""" fmriprep censored"""'}), "(exists=True, manadatory=True, desc=' fmriprep censored')\n", (10102, 10159), False, 'from nipype.interfaces.base import traits, TraitedSpec, BaseInterfaceInputSpec, File, SimpleInterface\n'), ((1554, 1605), 'pandas.read_csv', 'pd.read_csv', (['self.inputs.fmriprep_conf'], {'header': 'None'}), '(self.inputs.fmriprep_conf, header=None)\n', (1565, 1605), True, 'import pandas as pd\n'), ((5838, 5949), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'motion_conf.values', 'columns': "['rot_x', 'rot_y', 'rot_z', 'trans_x', 'trans_y', 'trans_z']"}), "(data=motion_conf.values, columns=['rot_x', 'rot_y', 'rot_z',\n 'trans_x', 'trans_y', 'trans_z'])\n", (5850, 5949), True, 'import pandas as pd\n'), ((6193, 6244), 'pandas.read_csv', 'pd.read_csv', (['self.inputs.fmriprep_conf'], {'header': 'None'}), '(self.inputs.fmriprep_conf, header=None)\n', (6204, 6244), True, 'import pandas as pd\n'), ((9252, 9318), 'numpy.savetxt', 'np.savetxt', (["self._results['tmask']", 'tmask'], {'fmt': '"""%d"""', 'delimiter': '""","""'}), "(self._results['tmask'], tmask, fmt='%d', delimiter=',')\n", (9262, 9318), True, 'import numpy as np\n'), ((9324, 9414), 'numpy.savetxt', 'np.savetxt', (["self._results['fd_timeseries']", 'fd_timeseries2'], {'fmt': '"""%1.4f"""', 'delimiter': '""","""'}), "(self._results['fd_timeseries'], fd_timeseries2, fmt='%1.4f',\n delimiter=',')\n", (9334, 9414), True, 'import numpy as np\n'), ((11042, 11071), 'numpy.loadtxt', 'np.loadtxt', (['self.inputs.tmask'], {}), '(self.inputs.tmask)\n', (11052, 11071), True, 'import numpy as np\n'), ((6324, 6373), 'pandas.read_csv', 'pd.read_csv', (['self.inputs.custom_conf'], {'header': 'None'}), '(self.inputs.custom_conf, header=None)\n', (6335, 6373), True, 'import pandas as pd\n'), ((2008, 2019), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2017, 2019), False, 'import os\n'), ((2206, 2217), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2215, 2217), False, 'import os\n'), ((6566, 6579), 'numpy.sum', 'np.sum', (['tmask'], {}), '(tmask)\n', (6572, 6579), True, 'import numpy as np\n'), ((7171, 7221), 'numpy.divide', 'np.divide', (['self.inputs.time_todrop', 'self.inputs.TR'], {}), '(self.inputs.time_todrop, self.inputs.TR)\n', (7180, 7221), True, 'import numpy as np\n'), ((7428, 7441), 'numpy.sum', 'np.sum', (['tmask'], {}), '(tmask)\n', (7434, 7441), True, 'import numpy as np\n'), ((8103, 8114), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8112, 8114), False, 'import os\n'), ((8309, 8320), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8318, 8320), False, 'import os\n'), ((8512, 8523), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8521, 8523), False, 'import os\n'), ((8694, 8705), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8703, 8705), False, 'import os\n'), ((8885, 8896), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (8894, 8896), False, 'import os\n'), ((11500, 11511), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (11509, 11511), False, 'import os\n'), ((6716, 6736), 'numpy.where', 'np.where', (['(tmask == 1)'], {}), '(tmask == 1)\n', (6724, 6736), True, 'import numpy as np\n'), ((7577, 7597), 'numpy.where', 'np.where', (['(tmask == 1)'], {}), '(tmask == 1)\n', (7585, 7597), True, 'import numpy as np\n'), ((6857, 6877), 'numpy.where', 'np.where', (['(tmask == 1)'], {}), '(tmask == 1)\n', (6865, 6877), True, 'import numpy as np\n'), ((7718, 7738), 'numpy.where', 'np.where', (['(tmask == 1)'], {}), '(tmask == 1)\n', (7726, 7738), True, 'import numpy as np\n')]
|
import ast
import datetime
import hashlib
import json
import os
import secrets
from urllib.parse import urlencode
from django.conf import settings
from django.core import exceptions
from django.core.handlers.wsgi import WSGIRequest
from django.http import QueryDict
try:
import stripe
except ImportError:
print('You should install Stripe in order to use the payment logic.')
raise
else:
try:
KEYS = settings.STRIPE_API_KEYS
except:
raise exceptions.ImproperlyConfigured(
'You should provide live and secret keys in STRIPE_API_KEYS to use the payment logic.')
def initialize_stripe():
# domain = settings.APPLE_PAY_DOMAIN
if settings.STRIPE_DEBUG:
stripe.api_key = KEYS['test']['secret']
else:
stripe.api_key = KEYS['live']['secret']
# stripe.ApplePayDomain.create(domain_name='nawoka.fr')
return True
def stripe_context_processor(request):
"""Return the publisheable key directly in the context
of the application
"""
context = dict()
if settings.STRIPE_DEBUG:
context.update({'publishable_key': KEYS['test']['publishable']})
else:
context.update({'publishable_key': KEYS['live']['publishable']})
return context
is_valid = initialize_stripe()
def create_payment_reference(n=5):
"""Create a basic reference: `NAW201906126011b0e0b8`
"""
current_date = datetime.datetime.now().date()
prefix = f'NAW{current_date.year}{current_date.month}{current_date.day}'
return prefix + secrets.token_hex(n)
def create_transaction_token(n=1, salt='<PASSWORD>'):
"""Create a payment token for Google enhanced ecommerce"""
tokens = [secrets.token_hex(2) for _ in range(0, n)]
# Append the salt that allows us to identify
# if the payment method is a valid one
tokens.append(hashlib.sha256(salt.encode('utf-8')).hexdigest())
return '-'.join(tokens)
class PreprocessPayment:
"""A helper class used just before the user accesses the
payment page. It gathers and checks all the important informations
such as the firstname, the lastname or address and then
returns them or sets them in the session.
Parameters
----------
request: the http request
set_in_session: set the gathered values in the session
"""
def __init__(self, request, set_in_session=False, **kwargs):
internal_dict = dict()
# We receive a QueryDict here. Transform
# it to a regular dict and copy
if isinstance(request, WSGIRequest):
data = request.POST.dict().copy()
data.pop('csrfmiddlewaretoken')
for key, value in data.items():
if key != 'csrfmiddlewaretoken':
internal_dict.update({key: value})
self.user_infos = {
**self._check_final_dict(internal_dict), **kwargs}
self.fitted_user_infos = self._fit_dictionnary(self.user_infos)
if set_in_session:
request.session.update({'user_infos': self.fitted_user_infos})
else:
raise ValueError(
"'request' should be an instance of WSGIRequest got: %s" % request)
def _fit_dictionnary(self, data):
"""Normalizes the Python dictionnary for Javascript or other
template requirements"""
items = str(self.user_infos).replace("\'", "\"")
return f'{json.loads(items)}'
@staticmethod
def _check_final_dict(values: dict):
"""Checks that the incomming dictionnary has all the required
fields in order to create the payment in stripe
"""
key_errors = []
incoming_keys = values.keys()
required_keys = ['firstname', 'lastname', 'email', 'telephone',
'address', 'country', 'city', 'zip_code']
for key in required_keys:
if key not in incoming_keys:
key_errors.append(key)
if key_errors:
raise ValueError(f"In order to create a payment, you need to ' \
'provide these required keys: {', '.join(key_errors)}")
return values
class PostProcessPayment:
"""Call this class once the payment process has been
completed. It cleans the sessions from the `user infos`
and adds a `transaction token` that can be used later on.
"""
def __init__(self, request, enforce_comparision=False, token_name=None):
self.is_authorized = False
order_reference = request.session.get('conversion')['reference']
# This compares that the transaction token
# in the url corresponds to the one in the
# session. -; Determines for example whether
# the user can be on this page or not.
if enforce_comparision:
result = self.compare(
request.GET.get('transaction_token'),
request.session.get('transaction_token')
)
if result:
self.is_authorized = True
if order_reference:
self.is_authorized = True
@staticmethod
def compare(url_token, session_token):
"""Use this definition to check if a url token
is equals to a session based token. This can be
helpful for authorizing a user on a specific page
in the cart e.g. success page.
"""
return url_token == session_token
class PaymentMixin:
@staticmethod
def price_to_stripe(price):
"""A defintion that converts a decimal into a
Stripe formatted number
Example
-------
12.95€ should be 1295 for Stripe
"""
if isinstance(price, dict):
try:
price = price['cart_total']
except:
raise KeyError('Could not get "cart_total" from dict')
return int(price) * 100
@staticmethod
def _get_full_name(name, surname):
return f'{name} {surname}'
@staticmethod
def extract_from_string(user_infos: str):
"""If it happens that the dictionnary from the session
is a string object, this defintion extracts it"""
return ast.literal_eval(user_infos)
class SessionPaymentBackend(PaymentMixin):
"""This class is the main entrypoint for creating a session
based payment with Stripe.
Description
-----------
The flow of a stripe payment is the following:
1. Stripe JS
2. SessionBasedBackend
3. Stripe JS
Parameters
----------
token_name: the stripe token to retrieve based on the name provided in the POST request
from your payment page e.g. {token: tok_ABC}
Result
------
Returns a tuple of values such as:
( state,
{
order_reference: ABC,
transaction: ch_ABC,
total: 0.00,
redirect_url: /
}
)
NOTE: The transaction is the one created and returned by Stripe in the payment response
when the payment is completed.
Finally, the state is either True or False. The way the payment state becomes True is if
the process was completed without any errors appended in the error array.
"""
cart_model = None
success_url = '/shop/cart/success'
fail_url = '/shop/cart/payment'
def __init__(self, request, token_name='token'):
initialize_stripe()
self.stripe_token = request.POST.get(token_name)
if not self.stripe_token:
raise ValueError('You should provide a token from StripeJS')
self.request = request
# Create an order reference
self.order_reference = create_payment_reference()
self.total_of_products_to_buy = 0
# This parameter returns a charge
# object that can be used for other
# kinds of processing afterwards
self.completed_charge = dict()
# Access the user once the payment
# process has been done
self.anonymous_user = None
self.errors = []
# The ID of the charge as returned
# by the charge response
self.charge_id = None
# Create an internal transaction token for tracking
# purposes for example
self.transaction_token = create_transaction_token()
self.cart_id = request.session.get('cart_id')
if not self.cart_id:
raise ValueError(
"You should provide a cart ID number to identify the user's cart")
user_infos = request.session.get('user_infos')
if not user_infos:
raise ValueError(
'In order to create a payment, you need to provide user informations such as firstname, lastname...')
else:
if isinstance(user_infos, str):
self.user_infos = self.extract_from_string(user_infos)
elif isinstance(user_infos, dict):
self.user_infos = user_infos
def process_payment(self, customer_id=None, payment_debug=False, **kwargs):
final_dictionnary = dict()
if self.cart_model is None:
raise exceptions.ImproperlyConfigured(('You should provide a model from which '
'we can extract the cart total.'))
try:
self.cart_queryset = self.cart_model.objects.filter(
cart_id__iexact=self.cart_id)
# The model tries to get a get_cart_total() on the model
# from a cart_manager() model manager. It should return the total
# sum of items that should be charged to Stripe
total_of_products_to_buy = self.cart_model.cart_manager.cart_total(
self.cart_id)
except:
raise exceptions.ObjectDoesNotExist(('Could not find a manager of type '
'.cart_manager.cart_total() from which to get the cart total'))
else:
if total_of_products_to_buy != 0:
amount = self.price_to_stripe(total_of_products_to_buy)
# Now we can create the dict that will be used
# to process the payment -- In this case, we do
# need the customer_id for now since we are not
# registering customers to charge.
# We just need their card.
params = {
'amount': amount,
'currency': 'eur',
'source': self.stripe_token,
'description': f'Order for cart {self.cart_id}',
'receipt_email': self.user_infos['email'],
'shipping': {
'address': {
'line1': self.user_infos['address'],
'city': self.user_infos['city'],
'postal_code': self.user_infos['zip_code']
},
'name': 'Clothes',
'phone': self.user_infos['telephone']
},
'metadata': {
'name': self._get_full_name(self.user_infos['firstname'], self.user_infos['lastname']),
'order_reference': self.order_reference,
'shipping': self.user_infos['shipping']
}
}
if 'tracking_number' in kwargs and 'carrier' in kwargs:
params.update(
{
'tracking_number': kwargs['shipping']['tracking_number'],
'carrier': kwargs['shipping']['carrier']
}
)
if customer_id:
# To create a customer and charge the card,
# we need to create the customer first and then
# charge that customer with his card
params.pop('source')
params.update({'customer': customer_id})
else:
self.errors.append('There was no total to charge to Stripe')
charge = None
if not payment_debug:
try:
charge = stripe.Charge.create(**params)
except stripe.error.CardError as e:
# Error if card was
# declined -; mainly
errors = {
'status': e.http_status,
'type': e.error.type,
'code': e.error.code,
'param': e.error.param,
'message': e.error.message
}
self.errors.append(errors)
except stripe.error.RateLimitError as e:
self.errors.append('Rate limit exceeded')
except stripe.error.InvalidRequestError as e:
self.errors.append('Invalid request')
except stripe.error.AuthenticationError as e:
self.errors.append('Authentication error')
except stripe.error.APIConnectionError as e:
self.errors.append('API connection error')
except stripe.error.StripeError as e:
self.errors.append('Stripe error')
except Exception as e:
self.errors.append('Unknown error')
else:
# A simple dict that passes the payment
# as successful in order order to debug
# the rest of the payment process
charge = {'status': 'succeeded', 'id': 'FAKE ID'}
if charge:
if charge['status'] == 'succeeded':
self.charge_id = charge['id']
parameters = urlencode(
{
'order_reference': self.order_reference,
'transaction': charge['id'],
'transaction_token': self.transaction_token
}
)
# If the payment was successful,
# then we can redirect the user
# to the success page
self.final_url = f'{self.success_url}?{parameters}'
final_dictionnary.update({
'order_reference': self.order_reference,
'transaction': charge['id'],
'total': total_of_products_to_buy['cart_total'],
'redirect_url': self.final_url
})
else:
final_dictionnary.update({
'reference': self.order_reference,
'redirect_url': self.fail_url,
'errors': self.errors
})
self.errors.append('The charge was not successful')
else:
final_dictionnary.update({
'reference': self.order_reference,
'redirect_url': self.fail_url,
'errors': self.errors
})
self.errors.append('There is no charge')
if not self.errors:
if not payment_debug:
self.request.session.pop('cart_id')
return (True, final_dictionnary)
return (False, final_dictionnary)
def create_customer_and_process_payment(self, payment_debug=False):
# TODO: Make final dict a global element
final_dictionnary = {}
customer_name = self._get_full_name(self.user_infos['firstname'], self.user_infos['lastname'])
customer = {
'source': self.stripe_token,
'name': customer_name,
'email': self.user_infos['email'],
'phone': '',
'address': {
'line1': self.user_infos['address'],
'city': self.user_infos['city'],
'postal_code': self.user_infos['zip_code']
},
'shipping': {
'name': customer_name,
'phone': '',
'address': {
'line1': self.user_infos['address'],
'city': self.user_infos['city'],
'postal_code': self.user_infos['zip_code']
}
}
}
try:
customer = stripe.Customer.create(**customer)
except (stripe.error.StripeError, Exception) as e:
errors = {
'status': e.http_status,
'type': e.error.type,
'code': e.error.code,
'param': e.error.param,
'message': e.error.message
}
self.errors.append(errors)
final_dictionnary.update({
'reference': self.order_reference,
'redirect_url': self.fail_url,
'errors': self.errors
})
else:
return self.process_payment(customer['id'], payment_debug=payment_debug)
def set_session_for_post_process(self, **kwargs):
"""Often times, on the success page, you might have a script
tag for Google Ads conversion. This helper definition sets the
data for that tag directly in the session.
"""
conversion = {
'conversion': {
'reference': self.order_reference,
'transaction': self.charge_id or self.transaction_token,
'payment': str(self.total_of_products_to_buy)
}
}
# conversion['conversion'] = {**conversion['conversion'], **kwargs}
self.request.session.update(conversion)
return conversion
class ApplePlay(SessionPaymentBackend):
def process_payment(self):
intent = stripe.PaymentIntent.create(
amount=self.total_of_products_to_buy,
currency='eur',
# Verify your integration in this guide by including this parameter
metadata={'integration_check': 'accept_a_payment'},
)
return intent
|
[
"django.core.exceptions.ImproperlyConfigured",
"json.loads",
"urllib.parse.urlencode",
"secrets.token_hex",
"stripe.Customer.create",
"stripe.PaymentIntent.create",
"ast.literal_eval",
"stripe.Charge.create",
"datetime.datetime.now",
"django.core.exceptions.ObjectDoesNotExist"
] |
[((1532, 1552), 'secrets.token_hex', 'secrets.token_hex', (['n'], {}), '(n)\n', (1549, 1552), False, 'import secrets\n'), ((1686, 1706), 'secrets.token_hex', 'secrets.token_hex', (['(2)'], {}), '(2)\n', (1703, 1706), False, 'import secrets\n'), ((6180, 6208), 'ast.literal_eval', 'ast.literal_eval', (['user_infos'], {}), '(user_infos)\n', (6196, 6208), False, 'import ast\n'), ((17856, 17994), 'stripe.PaymentIntent.create', 'stripe.PaymentIntent.create', ([], {'amount': 'self.total_of_products_to_buy', 'currency': '"""eur"""', 'metadata': "{'integration_check': 'accept_a_payment'}"}), "(amount=self.total_of_products_to_buy, currency=\n 'eur', metadata={'integration_check': 'accept_a_payment'})\n", (17883, 17994), False, 'import stripe\n'), ((476, 605), 'django.core.exceptions.ImproperlyConfigured', 'exceptions.ImproperlyConfigured', (['"""You should provide live and secret keys in STRIPE_API_KEYS to use the payment logic."""'], {}), "(\n 'You should provide live and secret keys in STRIPE_API_KEYS to use the payment logic.'\n )\n", (507, 605), False, 'from django.core import exceptions\n'), ((1404, 1427), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1425, 1427), False, 'import datetime\n'), ((9215, 9323), 'django.core.exceptions.ImproperlyConfigured', 'exceptions.ImproperlyConfigured', (['"""You should provide a model from which we can extract the cart total."""'], {}), "(\n 'You should provide a model from which we can extract the cart total.')\n", (9246, 9323), False, 'from django.core import exceptions\n'), ((16437, 16471), 'stripe.Customer.create', 'stripe.Customer.create', ([], {}), '(**customer)\n', (16459, 16471), False, 'import stripe\n'), ((3421, 3438), 'json.loads', 'json.loads', (['items'], {}), '(items)\n', (3431, 3438), False, 'import json\n'), ((9851, 9986), 'django.core.exceptions.ObjectDoesNotExist', 'exceptions.ObjectDoesNotExist', (['"""Could not find a manager of type .cart_manager.cart_total() from which to get the cart total"""'], {}), "(\n 'Could not find a manager of type .cart_manager.cart_total() from which to get the cart total'\n )\n", (9880, 9986), False, 'from django.core import exceptions\n'), ((13924, 14055), 'urllib.parse.urlencode', 'urlencode', (["{'order_reference': self.order_reference, 'transaction': charge['id'],\n 'transaction_token': self.transaction_token}"], {}), "({'order_reference': self.order_reference, 'transaction': charge[\n 'id'], 'transaction_token': self.transaction_token})\n", (13933, 14055), False, 'from urllib.parse import urlencode\n'), ((12338, 12368), 'stripe.Charge.create', 'stripe.Charge.create', ([], {}), '(**params)\n', (12358, 12368), False, 'import stripe\n')]
|
# coding=utf-8
# Copyright 2018 The Dopamine Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup script for Dopamine.
This script will install Dopamine as a Python module.
See: https://github.com/google/dopamine
"""
from os import path
from setuptools import find_packages
from setuptools import setup
here = path.abspath(path.dirname(__file__))
install_requires = ['gin-config >= 0.1.1', 'absl-py >= 0.2.2',
'opencv-python >= 3.4.1.15',
'gym >= 0.10.5']
tests_require = ['gin-config >= 0.1.1', 'absl-py >= 0.2.2',
'opencv-python >= 3.4.1.15',
'gym >= 0.10.5', 'mock >= 1.0.0']
dopamine_description = (
'Dopamine: A framework for flexible Reinforcement Learning research')
setup(
name='dopamine_rl',
version='2.0.5',
include_package_data=True,
packages=find_packages(exclude=['docs']), # Required
package_data={'testdata': ['testdata/*.gin']},
install_requires=install_requires,
tests_require=tests_require,
description=dopamine_description,
long_description=dopamine_description,
url='https://github.com/google/dopamine', # Optional
author='The Dopamine Team', # Optional
classifiers=[ # Optional
'Development Status :: 4 - Beta',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
# Pick your license as you wish
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
project_urls={ # Optional
'Documentation': 'https://github.com/google/dopamine',
'Bug Reports': 'https://github.com/google/dopamine/issues',
'Source': 'https://github.com/google/dopamine',
},
license='Apache 2.0',
keywords='dopamine reinforcement-learning python machine learning'
)
|
[
"os.path.dirname",
"setuptools.find_packages"
] |
[((840, 862), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (852, 862), False, 'from os import path\n'), ((1368, 1399), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['docs']"}), "(exclude=['docs'])\n", (1381, 1399), False, 'from setuptools import find_packages\n')]
|
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import collections.abc
import math
from typing import List, Sequence, Tuple
import cv2
import numpy as np
from megengine.data.transform import Transform
from megengine.data.transform.vision import functional as F
__all__ = [
"VisionTransform",
"ToMode",
"Compose",
"TorchTransformCompose",
"Pad",
"Resize",
"ShortestEdgeResize",
"RandomResize",
"RandomCrop",
"RandomResizedCrop",
"CenterCrop",
"RandomHorizontalFlip",
"RandomVerticalFlip",
"Normalize",
"GaussianNoise",
"BrightnessTransform",
"SaturationTransform",
"ContrastTransform",
"HueTransform",
"ColorJitter",
"Lighting",
]
class VisionTransform(Transform):
r"""Base class of all transforms used in computer vision.
Calling logic: apply_batch() -> apply() -> _apply_image() and other _apply_*()
method. If you want to implement a self-defined transform method for image,
rewrite _apply_image method in subclass.
Args:
order: input type order. Input is a tuple containing different structures,
order is used to specify the order of structures. For example, if your input
is (image, boxes) type, then the ``order`` should be ("image", "boxes").
Current available strings and data type are describe below:
* "image": input image, with shape of `(H, W, C)`.
* "coords": coordinates, with shape of `(N, 2)`.
* "boxes": bounding boxes, with shape of `(N, 4)`, "xyxy" format,
the 1st "xy" represents top left point of a box,
the 2nd "xy" represents right bottom point.
* "mask": map used for segmentation, with shape of `(H, W, 1)`.
* "keypoints": keypoints with shape of `(N, K, 3)`, N for number of instances,
and K for number of keypoints in one instance. The first two dimensions
of last axis is coordinate of keypoints and the the 3rd dimension is
the label of keypoints.
* "polygons": a sequence containing numpy arrays, its length is the number of instances.
Each numpy array represents polygon coordinate of one instance.
* "category": categories for some data type. For example, "image_category"
means category of the input image and "boxes_category" means categories of
bounding boxes.
* "info": information for images such as image shapes and image path.
You can also customize your data types only if you implement the corresponding
_apply_*() methods, otherwise ``NotImplementedError`` will be raised.
"""
def __init__(self, order=None):
super().__init__()
if order is None:
order = ("image",)
elif not isinstance(order, collections.abc.Sequence):
raise ValueError(
"order should be a sequence, but got order={}".format(order)
)
for k in order:
if k in ("batch",):
raise ValueError("{} is invalid data type".format(k))
elif k.endswith("category") or k.endswith("info"):
# when the key is *category or info, we should do nothing
# if the corresponding apply methods are not implemented.
continue
elif self._get_apply(k) is None:
raise NotImplementedError("{} is unsupported data type".format(k))
self.order = order
def apply_batch(self, inputs: Sequence[Tuple]):
r"""Apply transform on batch input data."""
return tuple(self.apply(input) for input in inputs)
def apply(self, input: Tuple):
r"""Apply transform on single input data."""
if not isinstance(input, tuple):
input = (input,)
output = []
for i in range(min(len(input), len(self.order))):
apply_func = self._get_apply(self.order[i])
if apply_func is None:
output.append(input[i])
else:
output.append(apply_func(input[i]))
if len(input) > len(self.order):
output.extend(input[len(self.order) :])
if len(output) == 1:
output = output[0]
else:
output = tuple(output)
return output
def _get_apply(self, key):
return getattr(self, "_apply_{}".format(key), None)
def _get_image(self, input: Tuple):
if not isinstance(input, tuple):
input = (input,)
return input[self.order.index("image")]
def _apply_image(self, image):
raise NotImplementedError
def _apply_coords(self, coords):
raise NotImplementedError
def _apply_boxes(self, boxes):
idxs = np.array([(0, 1), (2, 1), (0, 3), (2, 3)]).flatten()
coords = np.asarray(boxes).reshape(-1, 4)[:, idxs].reshape(-1, 2)
coords = self._apply_coords(coords).reshape((-1, 4, 2))
minxy = coords.min(axis=1)
maxxy = coords.max(axis=1)
trans_boxes = np.concatenate((minxy, maxxy), axis=1)
return trans_boxes
def _apply_mask(self, mask):
raise NotImplementedError
def _apply_keypoints(self, keypoints):
coords, visibility = keypoints[..., :2], keypoints[..., 2:]
trans_coords = [self._apply_coords(p) for p in coords]
return np.concatenate((trans_coords, visibility), axis=-1)
def _apply_polygons(self, polygons):
return [[self._apply_coords(p) for p in instance] for instance in polygons]
class ToMode(VisionTransform):
r"""Change input data to a target mode.
For example, most transforms use HWC mode image,
while the neural network might use CHW mode input tensor.
Args:
mode: output mode of input. Default: "CHW"
order: the same with :class:`VisionTransform`
"""
def __init__(self, mode="CHW", *, order=None):
super().__init__(order)
assert mode in ["CHW"], "unsupported mode: {}".format(mode)
self.mode = mode
def _apply_image(self, image):
if self.mode == "CHW":
return np.ascontiguousarray(np.rollaxis(image, 2))
return image
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
if self.mode == "CHW":
return np.ascontiguousarray(np.rollaxis(mask, 2))
return mask
class Compose(VisionTransform):
r"""Composes several transfomations together.
Args:
transforms: list of :class:`VisionTransform` to compose.
batch_compose: whether keep the same transform order in batch data when shuffle.
shuffle_indices: indices used for random shuffle, start at 1.
order: the same with :class:`VisionTransform`
.. seealso:: Refer to :mod:`~.data.transform` module for vision transform APIs.
Examples:
>>> import megengine.data.transform as T
>>> T.Compose([ # doctest: +SKIP
... T.RandomHorizontalFlip(), # 1st
... T.RandomVerticalFlip(), # 2nd
... T.CenterCrop(100), # 3rd
... T.ToMode("CHW"), # 4th
... ],
... shuffle_indices=[(1, 2, 3)]
... )
In this case, ``shuffle_indices`` is given so each input data will be transformed
out of order:
.. math::
\begin{array}{cc}
[{\color{red}1 \quad 2 \quad 3} \quad 4] & [{\color{red}1 \quad 3 \quad 2} \quad 4] \\
[{\color{red}2 \quad 1 \quad 3} \quad 4] & [{\color{red}2 \quad 3 \quad 1} \quad 4] \\
[{\color{red}3 \quad 1 \quad 2} \quad 4] & [{\color{red}3 \quad 2 \quad 1} \quad 4]
\end{array}
In another case, if ``[(1, 3), (2, 4)]`` is given, then the 1st and 3rd transfomation
will be random shuffled, the 2nd and 4th transfomation will also be shuffled:
.. math::
\begin{array}{cc}
[{\color{red}1} \quad {\color{blue}2} \quad {\color{red}3} \quad {\color{blue}4}] &
[{\color{red}1} \quad {\color{blue}4} \quad {\color{red}3} \quad {\color{blue}2}] \\
[{\color{red}3} \quad {\color{blue}2} \quad {\color{red}1} \quad {\color{blue}4}] &
[{\color{red}3} \quad {\color{blue}4} \quad {\color{red}1} \quad {\color{blue}2}]
\end{array}
Different colors represent different groups that need to be internally shuffled.
.. warning::
Different samples within each batch will also use random transfomation orders,
unless ``batch_compose`` is set to ``True``.
"""
def __init__(
self,
transforms: List[VisionTransform] = [],
batch_compose: bool = False,
shuffle_indices: List[Tuple] = None,
*,
order=None
):
super().__init__(order)
self.transforms = transforms
self._set_order()
if batch_compose and shuffle_indices is not None:
raise ValueError(
"Do not support shuffle when apply transforms along the whole batch"
)
self.batch_compose = batch_compose
if shuffle_indices is not None:
shuffle_indices = [tuple(x - 1 for x in idx) for idx in shuffle_indices]
self.shuffle_indices = shuffle_indices
def _set_order(self):
for t in self.transforms:
t.order = self.order
if isinstance(t, Compose):
t._set_order()
def apply_batch(self, inputs: Sequence[Tuple]):
if self.batch_compose:
for t in self.transforms:
inputs = t.apply_batch(inputs)
return inputs
else:
return super().apply_batch(inputs)
def apply(self, input: Tuple):
for t in self._shuffle():
input = t.apply(input)
return input
def _shuffle(self):
if self.shuffle_indices is not None:
source_idx = list(range(len(self.transforms)))
for idx in self.shuffle_indices:
shuffled = np.random.permutation(idx).tolist()
for src, dst in zip(idx, shuffled):
source_idx[src] = dst
return [self.transforms[i] for i in source_idx]
else:
return self.transforms
class TorchTransformCompose(VisionTransform):
r"""Compose class used for transforms in torchvision, only support PIL image,
some transforms with tensor in torchvision are not supported,
such as Normalize and ToTensor in torchvision.
Args:
transforms: the same with ``Compose``.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, transforms, *, order=None):
super().__init__(order)
self.transforms = transforms
def _apply_image(self, image):
from PIL import Image
try:
import accimage
except ImportError:
accimage = None
if image.shape[0] == 3: # CHW
image = np.ascontiguousarray(image[[2, 1, 0]])
elif image.shape[2] == 3: # HWC
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image = Image.fromarray(image.astype(np.uint8))
for t in self.transforms:
image = t(image)
if isinstance(image, Image.Image) or (
accimage is not None and isinstance(image, accimage.Image)
):
image = np.array(image, dtype=np.uint8)
if image.shape[0] == 3: # CHW
image = np.ascontiguousarray(image[[2, 1, 0]])
elif image.shape[2] == 3: # HWC
image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
return image
class Pad(VisionTransform):
r"""Pad the input data.
Args:
size: padding size of input image, it could be integer or sequence.
If it is an integer, the input image will be padded in four directions.
If it is a sequence containing two integers, the bottom and right side
of image will be padded.
If it is a sequence containing four integers, the top, bottom, left, right
side of image will be padded with given size.
value: padding value of image, could be a sequence of int or float.
if it is float value, the dtype of image will be casted to float32 also.
mask_value: padding value of segmentation map.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, size=0, value=0, mask_value=0, *, order=None):
super().__init__(order)
if isinstance(size, int):
size = (size, size, size, size)
elif isinstance(size, collections.abc.Sequence) and len(size) == 2:
size = (0, size[0], 0, size[1])
elif not (isinstance(size, collections.abc.Sequence) and len(size) == 4):
raise ValueError(
"size should be a list/tuple which contains "
"(top, down, left, right) four pad sizes."
)
self.size = size
self.value = value
if not isinstance(mask_value, int):
raise ValueError(
"mask_value should be a positive integer, "
"but got mask_value={}".format(mask_value)
)
self.mask_value = mask_value
def _apply_image(self, image):
return F.pad(image, self.size, self.value)
def _apply_coords(self, coords):
coords[:, 0] += self.size[2]
coords[:, 1] += self.size[0]
return coords
def _apply_mask(self, mask):
return F.pad(mask, self.size, self.mask_value)
class Resize(VisionTransform):
r"""Resize the input data.
Args:
output_size: target size of image, with (height, width) shape.
interpolation: interpolation method. All methods are listed below:
* cv2.INTER_NEAREST – a nearest-neighbor interpolation.
* cv2.INTER_LINEAR – a bilinear interpolation (used by default).
* cv2.INTER_AREA – resampling using pixel area relation.
* cv2.INTER_CUBIC – a bicubic interpolation over 4×4 pixel neighborhood.
* cv2.INTER_LANCZOS4 – a Lanczos interpolation over 8×8 pixel neighborhood.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, output_size, interpolation=cv2.INTER_LINEAR, *, order=None):
super().__init__(order)
self.output_size = output_size
self.interpolation = interpolation
def apply(self, input: Tuple):
self._shape_info = self._get_shape(self._get_image(input))
return super().apply(input)
def _apply_image(self, image):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return image
return F.resize(image, (th, tw), self.interpolation)
def _apply_coords(self, coords):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return coords
coords[:, 0] = coords[:, 0] * (tw / w)
coords[:, 1] = coords[:, 1] * (th / h)
return coords
def _apply_mask(self, mask):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return mask
return F.resize(mask, (th, tw), cv2.INTER_NEAREST)
def _get_shape(self, image):
h, w, _ = image.shape
if isinstance(self.output_size, int):
if min(h, w) == self.output_size:
return h, w, h, w
if h < w:
th = self.output_size
tw = int(self.output_size * w / h)
else:
tw = self.output_size
th = int(self.output_size * h / w)
return h, w, th, tw
else:
return (h, w, *self.output_size)
class ShortestEdgeResize(VisionTransform):
r"""Resize the input data with specified shortset edge."""
def __init__(
self,
min_size,
max_size,
sample_style="range",
interpolation=cv2.INTER_LINEAR,
*,
order=None
):
super().__init__(order)
if sample_style not in ("range", "choice"):
raise NotImplementedError(
"{} is unsupported sample style".format(sample_style)
)
self.sample_style = sample_style
if isinstance(min_size, int):
min_size = (min_size, min_size)
self.min_size = min_size
self.max_size = max_size
self.interpolation = interpolation
def apply(self, input: Tuple):
self._shape_info = self._get_shape(self._get_image(input))
return super().apply(input)
def _apply_image(self, image):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return image
return F.resize(image, (th, tw), self.interpolation)
def _apply_coords(self, coords):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return coords
coords[:, 0] = coords[:, 0] * (tw / w)
coords[:, 1] = coords[:, 1] * (th / h)
return coords
def _apply_mask(self, mask):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return mask
return F.resize(mask, (th, tw), cv2.INTER_NEAREST)
def _get_shape(self, image):
h, w, _ = image.shape
if self.sample_style == "range":
size = np.random.randint(self.min_size[0], self.min_size[1] + 1)
else:
size = np.random.choice(self.min_size)
scale = size / min(h, w)
if h < w:
th, tw = size, scale * w
else:
th, tw = scale * h, size
if max(th, tw) > self.max_size:
scale = self.max_size / max(th, tw)
th = th * scale
tw = tw * scale
th = int(round(th))
tw = int(round(tw))
return h, w, th, tw
class RandomResize(VisionTransform):
r"""Resize the input data randomly.
Args:
scale_range: range of scaling.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, scale_range, interpolation=cv2.INTER_LINEAR, *, order=None):
super().__init__(order)
self.scale_range = scale_range
self.interpolation = interpolation
def apply(self, input: Tuple):
self._shape_info = self._get_shape(self._get_image(input))
return super().apply(input)
def _apply_image(self, image):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return image
return F.resize(image, (th, tw), self.interpolation)
def _apply_coords(self, coords):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return coords
coords[:, 0] = coords[:, 0] * (tw / w)
coords[:, 1] = coords[:, 1] * (th / h)
return coords
def _apply_mask(self, mask):
h, w, th, tw = self._shape_info
if h == th and w == tw:
return mask
return F.resize(mask, (th, tw), cv2.INTER_NEAREST)
def _get_shape(self, image):
h, w, _ = image.shape
scale = np.random.uniform(*self.scale_range)
th = int(round(h * scale))
tw = int(round(w * scale))
return h, w, th, tw
class RandomCrop(VisionTransform):
r"""Crop the input data randomly. Before applying the crop transform,
pad the image first. If target size is still bigger than the size of
padded image, pad the image size to target size.
Args:
output_size: target size of output image, with (height, width) shape.
padding_size: the same with `size` in ``Pad``.
padding_value: the same with `value` in ``Pad``.
order: the same with :class:`VisionTransform`.
"""
def __init__(
self,
output_size,
padding_size=0,
padding_value=[0, 0, 0],
padding_maskvalue=0,
*,
order=None
):
super().__init__(order)
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
self.output_size = output_size
self.pad = Pad(padding_size, padding_value, order=self.order)
self.padding_value = padding_value
self.padding_maskvalue = padding_maskvalue
def apply(self, input):
input = self.pad.apply(input)
self._h, self._w, _ = self._get_image(input).shape
self._th, self._tw = self.output_size
self._x = np.random.randint(0, max(0, self._w - self._tw) + 1)
self._y = np.random.randint(0, max(0, self._h - self._th) + 1)
return super().apply(input)
def _apply_image(self, image):
if self._th > self._h:
image = F.pad(image, (self._th - self._h, 0), self.padding_value)
if self._tw > self._w:
image = F.pad(image, (0, self._tw - self._w), self.padding_value)
return image[self._y : self._y + self._th, self._x : self._x + self._tw]
def _apply_coords(self, coords):
coords[:, 0] -= self._x
coords[:, 1] -= self._y
return coords
def _apply_mask(self, mask):
if self._th > self._h:
mask = F.pad(mask, (self._th - self._h, 0), self.padding_maskvalue)
if self._tw > self._w:
mask = F.pad(mask, (0, self._tw - self._w), self.padding_maskvalue)
return mask[self._y : self._y + self._th, self._x : self._x + self._tw]
class RandomResizedCrop(VisionTransform):
r"""Crop the input data to random size and aspect ratio.
A crop of random size (default: of 0.08 to 1.0) of the original size and a random
aspect ratio (default: of 3/4 to 1.33) of the original aspect ratio is made.
After applying crop transfrom, the input data will be resized to given size.
Args:
output_size: target size of output image, with (height, width) shape.
scale_range: range of size of the origin size cropped. Default: (0.08, 1.0)
ratio_range: range of aspect ratio of the origin aspect ratio cropped. Default: (0.75, 1.33)
order: the same with :class:`VisionTransform`.
"""
def __init__(
self,
output_size,
scale_range=(0.08, 1.0),
ratio_range=(3.0 / 4, 4.0 / 3),
interpolation=cv2.INTER_LINEAR,
*,
order=None
):
super().__init__(order)
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
self.output_size = output_size
assert (
scale_range[0] <= scale_range[1]
), "scale_range should be of kind (min, max)"
assert (
ratio_range[0] <= ratio_range[1]
), "ratio_range should be of kind (min, max)"
self.scale_range = scale_range
self.ratio_range = ratio_range
self.interpolation = interpolation
def apply(self, input: Tuple):
self._coord_info = self._get_coord(self._get_image(input))
return super().apply(input)
def _apply_image(self, image):
x, y, w, h = self._coord_info
cropped_img = image[y : y + h, x : x + w]
return F.resize(cropped_img, self.output_size, self.interpolation)
def _apply_coords(self, coords):
x, y, w, h = self._coord_info
coords[:, 0] = (coords[:, 0] - x) * self.output_size[1] / w
coords[:, 1] = (coords[:, 1] - y) * self.output_size[0] / h
return coords
def _apply_mask(self, mask):
x, y, w, h = self._coord_info
cropped_mask = mask[y : y + h, x : x + w]
return F.resize(cropped_mask, self.output_size, cv2.INTER_NEAREST)
def _get_coord(self, image, attempts=10):
height, width, _ = image.shape
area = height * width
for _ in range(attempts):
target_area = np.random.uniform(*self.scale_range) * area
log_ratio = tuple(math.log(x) for x in self.ratio_range)
aspect_ratio = math.exp(np.random.uniform(*log_ratio))
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if 0 < w <= width and 0 < h <= height:
x = np.random.randint(0, width - w + 1)
y = np.random.randint(0, height - h + 1)
return x, y, w, h
# Fallback to central crop
in_ratio = float(width) / float(height)
if in_ratio < min(self.ratio_range):
w = width
h = int(round(w / min(self.ratio_range)))
elif in_ratio > max(self.ratio_range):
h = height
w = int(round(h * max(self.ratio_range)))
else: # whole image
w = width
h = height
x = (width - w) // 2
y = (height - h) // 2
return x, y, w, h
class CenterCrop(VisionTransform):
r"""Crops the given the input data at the center.
Args:
output_size: target size of output image, with (height, width) shape.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, output_size, *, order=None):
super().__init__(order)
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
self.output_size = output_size
def apply(self, input: Tuple):
self._coord_info = self._get_coord(self._get_image(input))
return super().apply(input)
def _apply_image(self, image):
x, y = self._coord_info
th, tw = self.output_size
return image[y : y + th, x : x + tw]
def _apply_coords(self, coords):
x, y = self._coord_info
coords[:, 0] -= x
coords[:, 1] -= y
return coords
def _apply_mask(self, mask):
x, y = self._coord_info
th, tw = self.output_size
return mask[y : y + th, x : x + tw]
def _get_coord(self, image):
th, tw = self.output_size
h, w, _ = image.shape
assert th <= h and tw <= w, "output size is bigger than image size"
x = int(round((w - tw) / 2.0))
y = int(round((h - th) / 2.0))
return x, y
class RandomHorizontalFlip(VisionTransform):
r"""Horizontally flip the input data randomly with a given probability.
Args:
p: probability of the input data being flipped. Default: 0.5
order: the same with :class:`VisionTransform`.
"""
def __init__(self, prob: float = 0.5, *, order=None):
super().__init__(order)
self.prob = prob
def apply(self, input: Tuple):
self._flipped = np.random.random() < self.prob
self._w = self._get_image(input).shape[1]
return super().apply(input)
def _apply_image(self, image):
if self._flipped:
return F.flip(image, flipCode=1)
return image
def _apply_coords(self, coords):
if self._flipped:
coords[:, 0] = self._w - coords[:, 0]
return coords
def _apply_mask(self, mask):
if self._flipped:
return F.flip(mask, flipCode=1)
return mask
class RandomVerticalFlip(VisionTransform):
r"""Vertically flip the input data randomly with a given probability.
Args:
p: probability of the input data being flipped. Default: 0.5
order: the same with :class:`VisionTransform`.
"""
def __init__(self, prob: float = 0.5, *, order=None):
super().__init__(order)
self.prob = prob
def apply(self, input: Tuple):
self._flipped = np.random.random() < self.prob
self._h = self._get_image(input).shape[0]
return super().apply(input)
def _apply_image(self, image):
if self._flipped:
return F.flip(image, flipCode=0)
return image
def _apply_coords(self, coords):
if self._flipped:
coords[:, 1] = self._h - coords[:, 1]
return coords
def _apply_mask(self, mask):
if self._flipped:
return F.flip(mask, flipCode=0)
return mask
class Normalize(VisionTransform):
r"""Normalize the input data with mean and standard deviation.
Given mean: ``(M1,...,Mn)`` and std: ``(S1,..,Sn)`` for ``n`` channels,
this transform will normalize each channel of the input data.
``output[channel] = (input[channel] - mean[channel]) / std[channel]``
Args:
mean: sequence of means for each channel.
std: sequence of standard deviations for each channel.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, mean=0.0, std=1.0, *, order=None):
super().__init__(order)
self.mean = np.array(mean, dtype=np.float32)
self.std = np.array(std, dtype=np.float32)
def _apply_image(self, image):
return (image - self.mean) / self.std
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class GaussianNoise(VisionTransform):
r"""Add random gaussian noise to the input data.
Gaussian noise is generated with given mean and std.
Args:
mean: Gaussian mean used to generate noise.
std: Gaussian standard deviation used to generate noise.
order: the same with :class:`VisionTransform`
"""
def __init__(self, mean=0.0, std=1.0, *, order=None):
super().__init__(order)
self.mean = np.array(mean, dtype=np.float32)
self.std = np.array(std, dtype=np.float32)
def _apply_image(self, image):
dtype = image.dtype
noise = np.random.normal(self.mean, self.std, image.shape) * 255
image = image + noise.astype(np.float32)
return np.clip(image, 0, 255).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class BrightnessTransform(VisionTransform):
r"""Adjust brightness of the input data.
Args:
value: how much to adjust the brightness. Can be any
non negative number. 0 gives the original image.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, value, *, order=None):
super().__init__(order)
if value < 0:
raise ValueError("brightness value should be non-negative")
self.value = value
def _apply_image(self, image):
if self.value == 0:
return image
dtype = image.dtype
image = image.astype(np.float32)
alpha = np.random.uniform(max(0, 1 - self.value), 1 + self.value)
image = image * alpha
return image.clip(0, 255).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class ContrastTransform(VisionTransform):
r"""Adjust contrast of the input data.
Args:
value: how much to adjust the contrast. Can be any
non negative number. 0 gives the original image.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, value, *, order=None):
super().__init__(order)
if value < 0:
raise ValueError("contrast value should be non-negative")
self.value = value
def _apply_image(self, image):
if self.value == 0:
return image
dtype = image.dtype
image = image.astype(np.float32)
alpha = np.random.uniform(max(0, 1 - self.value), 1 + self.value)
image = image * alpha + F.to_gray(image).mean() * (1 - alpha)
return image.clip(0, 255).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class SaturationTransform(VisionTransform):
r"""Adjust saturation of the input data.
Args:
value: how much to adjust the saturation. Can be any
non negative number. 0 gives the original image.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, value, *, order=None):
super().__init__(order)
if value < 0:
raise ValueError("saturation value should be non-negative")
self.value = value
def _apply_image(self, image):
if self.value == 0:
return image
dtype = image.dtype
image = image.astype(np.float32)
alpha = np.random.uniform(max(0, 1 - self.value), 1 + self.value)
image = image * alpha + F.to_gray(image) * (1 - alpha)
return image.clip(0, 255).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class HueTransform(VisionTransform):
r"""Adjust hue of the input data.
Args:
value: how much to adjust the hue. Can be any number
between 0 and 0.5, 0 gives the original image.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, value, *, order=None):
super().__init__(order)
if value < 0 or value > 0.5:
raise ValueError("hue value should be in [0.0, 0.5]")
self.value = value
def _apply_image(self, image):
if self.value == 0:
return image
dtype = image.dtype
image = image.astype(np.uint8)
hsv_image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV_FULL)
h, s, v = cv2.split(hsv_image)
alpha = np.random.uniform(-self.value, self.value)
h = h.astype(np.uint8)
# uint8 addition take cares of rotation across boundaries
with np.errstate(over="ignore"):
h += np.uint8(alpha * 255)
hsv_image = cv2.merge([h, s, v])
return cv2.cvtColor(hsv_image, cv2.COLOR_HSV2BGR_FULL).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
class ColorJitter(VisionTransform):
r"""Randomly change the brightness, contrast, saturation and hue of an image.
Args:
brightness: how much to jitter brightness.
Chosen uniformly from [max(0, 1 - brightness), 1 + brightness]
or the given [min, max]. Should be non negative numbers.
contrast: how much to jitter contrast.
Chosen uniformly from [max(0, 1 - contrast), 1 + contrast]
or the given [min, max]. Should be non negative numbers.
saturation: how much to jitter saturation.
Chosen uniformly from [max(0, 1 - saturation), 1 + saturation]
or the given [min, max]. Should be non negative numbers.
hue: how much to jitter hue.
Chosen uniformly from [-hue, hue] or the given [min, max].
Should have 0<= hue <= 0.5 or -0.5 <= min <= max <= 0.5.
order: the same with :class:`VisionTransform`.
"""
def __init__(self, brightness=0, contrast=0, saturation=0, hue=0, *, order=None):
super().__init__(order)
transforms = []
if brightness != 0:
transforms.append(BrightnessTransform(brightness))
if contrast != 0:
transforms.append(ContrastTransform(contrast))
if saturation != 0:
transforms.append(SaturationTransform(saturation))
if hue != 0:
transforms.append(HueTransform(hue))
self.transforms = Compose(
transforms,
shuffle_indices=[tuple(range(1, len(transforms) + 1))],
order=order,
)
def apply(self, input):
return self.transforms.apply(input)
class Lighting(VisionTransform):
r"""Apply AlexNet-Style "lighting" augmentation to input data.
Input images are assumed to have 'RGB' channel order.
The degree of color jittering is randomly sampled via a normal distribution,
with standard deviation given by the scale parameter.
"""
def __init__(self, scale, *, order=None):
super().__init__(order)
if scale < 0:
raise ValueError("lighting scale should be non-negative")
self.scale = scale
self.eigvec = np.array(
[
[-0.5836, -0.6948, 0.4203],
[-0.5808, -0.0045, -0.8140],
[-0.5675, 0.7192, 0.4009],
]
) # reverse the first dimension for BGR
self.eigval = np.array([0.2175, 0.0188, 0.0045])
def _apply_image(self, image):
if self.scale == 0:
return image
dtype = image.dtype
image = image.astype(np.float32)
alpha = np.random.normal(scale=self.scale * 255, size=3)
image = image + self.eigvec.dot(alpha * self.eigval)
return image.clip(0, 255).astype(dtype)
def _apply_coords(self, coords):
return coords
def _apply_mask(self, mask):
return mask
|
[
"megengine.data.transform.vision.functional.pad",
"megengine.data.transform.vision.functional.resize",
"numpy.clip",
"numpy.random.randint",
"numpy.random.normal",
"cv2.cvtColor",
"cv2.split",
"numpy.rollaxis",
"numpy.random.choice",
"math.log",
"numpy.uint8",
"math.sqrt",
"numpy.asarray",
"numpy.random.permutation",
"cv2.merge",
"numpy.concatenate",
"numpy.random.uniform",
"megengine.data.transform.vision.functional.flip",
"numpy.errstate",
"numpy.random.random",
"numpy.array",
"numpy.ascontiguousarray",
"megengine.data.transform.vision.functional.to_gray"
] |
[((5400, 5438), 'numpy.concatenate', 'np.concatenate', (['(minxy, maxxy)'], {'axis': '(1)'}), '((minxy, maxxy), axis=1)\n', (5414, 5438), True, 'import numpy as np\n'), ((5724, 5775), 'numpy.concatenate', 'np.concatenate', (['(trans_coords, visibility)'], {'axis': '(-1)'}), '((trans_coords, visibility), axis=-1)\n', (5738, 5775), True, 'import numpy as np\n'), ((13693, 13728), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['image', 'self.size', 'self.value'], {}), '(image, self.size, self.value)\n', (13698, 13728), True, 'from megengine.data.transform.vision import functional as F\n'), ((13912, 13951), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['mask', 'self.size', 'self.mask_value'], {}), '(mask, self.size, self.mask_value)\n', (13917, 13951), True, 'from megengine.data.transform.vision import functional as F\n'), ((15110, 15155), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['image', '(th, tw)', 'self.interpolation'], {}), '(image, (th, tw), self.interpolation)\n', (15118, 15155), True, 'from megengine.data.transform.vision import functional as F\n'), ((15553, 15596), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['mask', '(th, tw)', 'cv2.INTER_NEAREST'], {}), '(mask, (th, tw), cv2.INTER_NEAREST)\n', (15561, 15596), True, 'from megengine.data.transform.vision import functional as F\n'), ((17106, 17151), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['image', '(th, tw)', 'self.interpolation'], {}), '(image, (th, tw), self.interpolation)\n', (17114, 17151), True, 'from megengine.data.transform.vision import functional as F\n'), ((17549, 17592), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['mask', '(th, tw)', 'cv2.INTER_NEAREST'], {}), '(mask, (th, tw), cv2.INTER_NEAREST)\n', (17557, 17592), True, 'from megengine.data.transform.vision import functional as F\n'), ((18886, 18931), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['image', '(th, tw)', 'self.interpolation'], {}), '(image, (th, tw), self.interpolation)\n', (18894, 18931), True, 'from megengine.data.transform.vision import functional as F\n'), ((19329, 19372), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['mask', '(th, tw)', 'cv2.INTER_NEAREST'], {}), '(mask, (th, tw), cv2.INTER_NEAREST)\n', (19337, 19372), True, 'from megengine.data.transform.vision import functional as F\n'), ((19453, 19489), 'numpy.random.uniform', 'np.random.uniform', (['*self.scale_range'], {}), '(*self.scale_range)\n', (19470, 19489), True, 'import numpy as np\n'), ((23476, 23535), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['cropped_img', 'self.output_size', 'self.interpolation'], {}), '(cropped_img, self.output_size, self.interpolation)\n', (23484, 23535), True, 'from megengine.data.transform.vision import functional as F\n'), ((23907, 23966), 'megengine.data.transform.vision.functional.resize', 'F.resize', (['cropped_mask', 'self.output_size', 'cv2.INTER_NEAREST'], {}), '(cropped_mask, self.output_size, cv2.INTER_NEAREST)\n', (23915, 23966), True, 'from megengine.data.transform.vision import functional as F\n'), ((28981, 29013), 'numpy.array', 'np.array', (['mean'], {'dtype': 'np.float32'}), '(mean, dtype=np.float32)\n', (28989, 29013), True, 'import numpy as np\n'), ((29033, 29064), 'numpy.array', 'np.array', (['std'], {'dtype': 'np.float32'}), '(std, dtype=np.float32)\n', (29041, 29064), True, 'import numpy as np\n'), ((29712, 29744), 'numpy.array', 'np.array', (['mean'], {'dtype': 'np.float32'}), '(mean, dtype=np.float32)\n', (29720, 29744), True, 'import numpy as np\n'), ((29764, 29795), 'numpy.array', 'np.array', (['std'], {'dtype': 'np.float32'}), '(std, dtype=np.float32)\n', (29772, 29795), True, 'import numpy as np\n'), ((33606, 33649), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2HSV_FULL'], {}), '(image, cv2.COLOR_BGR2HSV_FULL)\n', (33618, 33649), False, 'import cv2\n'), ((33668, 33688), 'cv2.split', 'cv2.split', (['hsv_image'], {}), '(hsv_image)\n', (33677, 33688), False, 'import cv2\n'), ((33706, 33748), 'numpy.random.uniform', 'np.random.uniform', (['(-self.value)', 'self.value'], {}), '(-self.value, self.value)\n', (33723, 33748), True, 'import numpy as np\n'), ((33946, 33966), 'cv2.merge', 'cv2.merge', (['[h, s, v]'], {}), '([h, s, v])\n', (33955, 33966), False, 'import cv2\n'), ((36350, 36447), 'numpy.array', 'np.array', (['[[-0.5836, -0.6948, 0.4203], [-0.5808, -0.0045, -0.814], [-0.5675, 0.7192, \n 0.4009]]'], {}), '([[-0.5836, -0.6948, 0.4203], [-0.5808, -0.0045, -0.814], [-0.5675,\n 0.7192, 0.4009]])\n', (36358, 36447), True, 'import numpy as np\n'), ((36591, 36625), 'numpy.array', 'np.array', (['[0.2175, 0.0188, 0.0045]'], {}), '([0.2175, 0.0188, 0.0045])\n', (36599, 36625), True, 'import numpy as np\n'), ((36801, 36849), 'numpy.random.normal', 'np.random.normal', ([], {'scale': '(self.scale * 255)', 'size': '(3)'}), '(scale=self.scale * 255, size=3)\n', (36817, 36849), True, 'import numpy as np\n'), ((11365, 11403), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['image[[2, 1, 0]]'], {}), '(image[[2, 1, 0]])\n', (11385, 11403), True, 'import numpy as np\n'), ((11774, 11805), 'numpy.array', 'np.array', (['image'], {'dtype': 'np.uint8'}), '(image, dtype=np.uint8)\n', (11782, 11805), True, 'import numpy as np\n'), ((11865, 11903), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['image[[2, 1, 0]]'], {}), '(image[[2, 1, 0]])\n', (11885, 11903), True, 'import numpy as np\n'), ((17717, 17774), 'numpy.random.randint', 'np.random.randint', (['self.min_size[0]', '(self.min_size[1] + 1)'], {}), '(self.min_size[0], self.min_size[1] + 1)\n', (17734, 17774), True, 'import numpy as np\n'), ((17808, 17839), 'numpy.random.choice', 'np.random.choice', (['self.min_size'], {}), '(self.min_size)\n', (17824, 17839), True, 'import numpy as np\n'), ((21055, 21112), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['image', '(self._th - self._h, 0)', 'self.padding_value'], {}), '(image, (self._th - self._h, 0), self.padding_value)\n', (21060, 21112), True, 'from megengine.data.transform.vision import functional as F\n'), ((21164, 21221), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['image', '(0, self._tw - self._w)', 'self.padding_value'], {}), '(image, (0, self._tw - self._w), self.padding_value)\n', (21169, 21221), True, 'from megengine.data.transform.vision import functional as F\n'), ((21511, 21571), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['mask', '(self._th - self._h, 0)', 'self.padding_maskvalue'], {}), '(mask, (self._th - self._h, 0), self.padding_maskvalue)\n', (21516, 21571), True, 'from megengine.data.transform.vision import functional as F\n'), ((21622, 21682), 'megengine.data.transform.vision.functional.pad', 'F.pad', (['mask', '(0, self._tw - self._w)', 'self.padding_maskvalue'], {}), '(mask, (0, self._tw - self._w), self.padding_maskvalue)\n', (21627, 21682), True, 'from megengine.data.transform.vision import functional as F\n'), ((26916, 26934), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (26932, 26934), True, 'import numpy as np\n'), ((27114, 27139), 'megengine.data.transform.vision.functional.flip', 'F.flip', (['image'], {'flipCode': '(1)'}), '(image, flipCode=1)\n', (27120, 27139), True, 'from megengine.data.transform.vision import functional as F\n'), ((27376, 27400), 'megengine.data.transform.vision.functional.flip', 'F.flip', (['mask'], {'flipCode': '(1)'}), '(mask, flipCode=1)\n', (27382, 27400), True, 'from megengine.data.transform.vision import functional as F\n'), ((27859, 27877), 'numpy.random.random', 'np.random.random', ([], {}), '()\n', (27875, 27877), True, 'import numpy as np\n'), ((28057, 28082), 'megengine.data.transform.vision.functional.flip', 'F.flip', (['image'], {'flipCode': '(0)'}), '(image, flipCode=0)\n', (28063, 28082), True, 'from megengine.data.transform.vision import functional as F\n'), ((28319, 28343), 'megengine.data.transform.vision.functional.flip', 'F.flip', (['mask'], {'flipCode': '(0)'}), '(mask, flipCode=0)\n', (28325, 28343), True, 'from megengine.data.transform.vision import functional as F\n'), ((29876, 29926), 'numpy.random.normal', 'np.random.normal', (['self.mean', 'self.std', 'image.shape'], {}), '(self.mean, self.std, image.shape)\n', (29892, 29926), True, 'import numpy as np\n'), ((33859, 33885), 'numpy.errstate', 'np.errstate', ([], {'over': '"""ignore"""'}), "(over='ignore')\n", (33870, 33885), True, 'import numpy as np\n'), ((33904, 33925), 'numpy.uint8', 'np.uint8', (['(alpha * 255)'], {}), '(alpha * 255)\n', (33912, 33925), True, 'import numpy as np\n'), ((5117, 5159), 'numpy.array', 'np.array', (['[(0, 1), (2, 1), (0, 3), (2, 3)]'], {}), '([(0, 1), (2, 1), (0, 3), (2, 3)])\n', (5125, 5159), True, 'import numpy as np\n'), ((6502, 6523), 'numpy.rollaxis', 'np.rollaxis', (['image', '(2)'], {}), '(image, 2)\n', (6513, 6523), True, 'import numpy as np\n'), ((6711, 6731), 'numpy.rollaxis', 'np.rollaxis', (['mask', '(2)'], {}), '(mask, 2)\n', (6722, 6731), True, 'import numpy as np\n'), ((11465, 11503), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_BGR2RGB'], {}), '(image, cv2.COLOR_BGR2RGB)\n', (11477, 11503), False, 'import cv2\n'), ((11965, 12003), 'cv2.cvtColor', 'cv2.cvtColor', (['image', 'cv2.COLOR_RGB2BGR'], {}), '(image, cv2.COLOR_RGB2BGR)\n', (11977, 12003), False, 'import cv2\n'), ((24144, 24180), 'numpy.random.uniform', 'np.random.uniform', (['*self.scale_range'], {}), '(*self.scale_range)\n', (24161, 24180), True, 'import numpy as np\n'), ((24293, 24322), 'numpy.random.uniform', 'np.random.uniform', (['*log_ratio'], {}), '(*log_ratio)\n', (24310, 24322), True, 'import numpy as np\n'), ((24529, 24564), 'numpy.random.randint', 'np.random.randint', (['(0)', '(width - w + 1)'], {}), '(0, width - w + 1)\n', (24546, 24564), True, 'import numpy as np\n'), ((24585, 24621), 'numpy.random.randint', 'np.random.randint', (['(0)', '(height - h + 1)'], {}), '(0, height - h + 1)\n', (24602, 24621), True, 'import numpy as np\n'), ((29997, 30019), 'numpy.clip', 'np.clip', (['image', '(0)', '(255)'], {}), '(image, 0, 255)\n', (30004, 30019), True, 'import numpy as np\n'), ((32756, 32772), 'megengine.data.transform.vision.functional.to_gray', 'F.to_gray', (['image'], {}), '(image)\n', (32765, 32772), True, 'from megengine.data.transform.vision import functional as F\n'), ((33982, 34029), 'cv2.cvtColor', 'cv2.cvtColor', (['hsv_image', 'cv2.COLOR_HSV2BGR_FULL'], {}), '(hsv_image, cv2.COLOR_HSV2BGR_FULL)\n', (33994, 34029), False, 'import cv2\n'), ((24218, 24229), 'math.log', 'math.log', (['x'], {}), '(x)\n', (24226, 24229), False, 'import math\n'), ((24351, 24388), 'math.sqrt', 'math.sqrt', (['(target_area * aspect_ratio)'], {}), '(target_area * aspect_ratio)\n', (24360, 24388), False, 'import math\n'), ((24417, 24454), 'math.sqrt', 'math.sqrt', (['(target_area / aspect_ratio)'], {}), '(target_area / aspect_ratio)\n', (24426, 24454), False, 'import math\n'), ((10413, 10439), 'numpy.random.permutation', 'np.random.permutation', (['idx'], {}), '(idx)\n', (10434, 10439), True, 'import numpy as np\n'), ((31804, 31820), 'megengine.data.transform.vision.functional.to_gray', 'F.to_gray', (['image'], {}), '(image)\n', (31813, 31820), True, 'from megengine.data.transform.vision import functional as F\n'), ((5187, 5204), 'numpy.asarray', 'np.asarray', (['boxes'], {}), '(boxes)\n', (5197, 5204), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 12:27:23 2020
@author: robi
"""
# call new main
from pathlib import Path, PurePath
import image_bbox_slicer.image_bbox_slicer
local = PurePath(Path.cwd())
#print(local)
home_path = Path.home()
print(repr(home_path))
|
[
"pathlib.Path.cwd",
"pathlib.Path.home"
] |
[((236, 247), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (245, 247), False, 'from pathlib import Path, PurePath\n'), ((196, 206), 'pathlib.Path.cwd', 'Path.cwd', ([], {}), '()\n', (204, 206), False, 'from pathlib import Path, PurePath\n')]
|
# -*- coding: utf-8 -*-
"""Utility functions for grouping sub-graphs by citation."""
from collections import defaultdict
from .utils import cleanup
from ...constants import CITATION, CITATION_REFERENCE, CITATION_TYPE
__all__ = [
'get_subgraphs_by_citation',
]
def get_subgraphs_by_citation(graph):
"""Stratify the graph based on citations.
:type graph: pybel.BELGraph
:rtype: dict[tuple[str,str],pybel.BELGraph]
"""
rv = defaultdict(graph.fresh_copy)
for u, v, key, data in graph.edges(keys=True, data=True):
if CITATION not in data:
continue
dk = data[CITATION][CITATION_TYPE], data[CITATION][CITATION_REFERENCE]
rv[dk].add_edge(u, v, key=key, **data)
cleanup(graph, rv)
return dict(rv)
|
[
"collections.defaultdict"
] |
[((452, 481), 'collections.defaultdict', 'defaultdict', (['graph.fresh_copy'], {}), '(graph.fresh_copy)\n', (463, 481), False, 'from collections import defaultdict\n')]
|
#==============================================================================
#
# This code was developed as part of the Astronomy Data and Computing Services
# (ADACS; https:#adacs.org.au) 2017B Software Support program.
#
# Written by: <NAME>, <NAME>, <NAME>
# Date: December 2017
#
# It is distributed under the MIT (Expat) License (see https:#opensource.org/):
#
# Copyright (c) 2017 Astronomy Data and Computing Services (ADACS)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#==============================================================================
from django import forms
from gbkfit_web.models import PSF, Job
from django.utils.translation import ugettext_lazy as _
FIELDS = ['psf_type', 'fwhm_x', 'fwhm_y', 'pa', 'beta']
WIDGETS = {
'psf_type': forms.Select(
attrs={'class': 'form-control has-popover'},
),
'fwhm_x': forms.TextInput(
attrs={'class': "form-control has-popover"},
),
'fwhm_y': forms.TextInput(
attrs={'class': "form-control has-popover"},
),
'pa': forms.TextInput(
attrs={'class': "form-control has-popover"},
),
'beta': forms.TextInput(
attrs={'class': "form-control has-popover"},
),
}
LABELS = {
'psf_type': _('Type'),
'fwhm_x': _('FWHM X'),
'fwhm_y': _('FWHM Y'),
'pa': _('Position Angle'),
'beta': _('Beta'),
}
HELP_TEXTS = {
'fwhm_x': _('Full width at half maximum (X)'),
'fwhm_y': _('Full width at half maximum (Y)'),
}
class PSFForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
self.id = kwargs.pop('id', None)
super(PSFForm, self).__init__(*args, **kwargs)
class Meta:
model = PSF
fields = FIELDS
widgets = WIDGETS
labels = LABELS
def save(self):
self.full_clean()
data = self.cleaned_data
job = Job.objects.get(id=self.id)
PSF.objects.create(
job=job,
psf_type=data.get('psf_type'),
fwhm_x=data.get('fwhm_x'),
fwhm_y=data.get('fwhm_y'),
pa=data.get('pa'),
beta=data.get('beta'),
)
self.request.session['psf'] = self.as_json(data)
def as_json(self, data):
if data.get('psf_type') in [PSF.MOFFAT]:
return dict(
type=data.get('psf_type'),
fwhm_x=data.get('fwhm_x'),
fwhm_y=data.get('fwhm_y'),
pa=data.get('pa'),
beta=data.get('beta')
)
else:
return dict(
type=data.get('psf_type'),
fwhm_x=data.get('fwhm_x'),
fwhm_y=data.get('fwhm_y'),
pa=data.get('pa')
)
class EditPSFForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request', None)
self.job_id = kwargs.pop('job_id', None)
if self.job_id:
try:
self.request.session['psf'] = PSF.objects.get(job_id=self.job_id).as_json()
except:
pass
super(EditPSFForm, self).__init__(*args, **kwargs)
for field in self.fields:
help_text = self.fields[field].help_text
self.fields[field].help_text = None
if help_text != '':
self.fields[field].widget.attrs.update(
{'data-content': help_text, 'data-placement': 'top',
'data-container': 'body'})
class Meta:
model = PSF
fields = FIELDS
widgets = WIDGETS
labels = LABELS
help_texts = HELP_TEXTS
|
[
"django.forms.Select",
"django.forms.TextInput",
"gbkfit_web.models.PSF.objects.get",
"django.utils.translation.ugettext_lazy",
"gbkfit_web.models.Job.objects.get"
] |
[((1788, 1845), 'django.forms.Select', 'forms.Select', ([], {'attrs': "{'class': 'form-control has-popover'}"}), "(attrs={'class': 'form-control has-popover'})\n", (1800, 1845), False, 'from django import forms\n'), ((1876, 1936), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control has-popover'}"}), "(attrs={'class': 'form-control has-popover'})\n", (1891, 1936), False, 'from django import forms\n'), ((1967, 2027), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control has-popover'}"}), "(attrs={'class': 'form-control has-popover'})\n", (1982, 2027), False, 'from django import forms\n'), ((2054, 2114), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control has-popover'}"}), "(attrs={'class': 'form-control has-popover'})\n", (2069, 2114), False, 'from django import forms\n'), ((2143, 2203), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control has-popover'}"}), "(attrs={'class': 'form-control has-popover'})\n", (2158, 2203), False, 'from django import forms\n'), ((2250, 2259), 'django.utils.translation.ugettext_lazy', '_', (['"""Type"""'], {}), "('Type')\n", (2251, 2259), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2275, 2286), 'django.utils.translation.ugettext_lazy', '_', (['"""FWHM X"""'], {}), "('FWHM X')\n", (2276, 2286), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2302, 2313), 'django.utils.translation.ugettext_lazy', '_', (['"""FWHM Y"""'], {}), "('FWHM Y')\n", (2303, 2313), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2325, 2344), 'django.utils.translation.ugettext_lazy', '_', (['"""Position Angle"""'], {}), "('Position Angle')\n", (2326, 2344), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2358, 2367), 'django.utils.translation.ugettext_lazy', '_', (['"""Beta"""'], {}), "('Beta')\n", (2359, 2367), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2401, 2436), 'django.utils.translation.ugettext_lazy', '_', (['"""Full width at half maximum (X)"""'], {}), "('Full width at half maximum (X)')\n", (2402, 2436), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2452, 2487), 'django.utils.translation.ugettext_lazy', '_', (['"""Full width at half maximum (Y)"""'], {}), "('Full width at half maximum (Y)')\n", (2453, 2487), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2920, 2947), 'gbkfit_web.models.Job.objects.get', 'Job.objects.get', ([], {'id': 'self.id'}), '(id=self.id)\n', (2935, 2947), False, 'from gbkfit_web.models import PSF, Job\n'), ((4062, 4097), 'gbkfit_web.models.PSF.objects.get', 'PSF.objects.get', ([], {'job_id': 'self.job_id'}), '(job_id=self.job_id)\n', (4077, 4097), False, 'from gbkfit_web.models import PSF, Job\n')]
|
import cgi
from paste.urlparser import PkgResourcesParser
from pylons import request
from pylons.controllers.util import forward
from pylons.middleware import error_document_template
from webhelpers.html.builder import literal
from pylonstest.lib.base import BaseController
class ErrorController(BaseController):
"""Generates error documents as and when they are required.
The ErrorDocuments middleware forwards to ErrorController when error
related status codes are returned from the application.
This behaviour can be altered by changing the parameters to the
ErrorDocuments middleware in your config/middleware.py file.
"""
def document(self):
"""Render the error document"""
resp = request.environ.get('pylons.original_response')
content = literal(resp.body) or cgi.escape(request.GET.get('message', ''))
page = error_document_template % \
dict(prefix=request.environ.get('SCRIPT_NAME', ''),
code=cgi.escape(request.GET.get('code', str(resp.status_int))),
message=content)
return page
def img(self, id):
"""Serve Pylons' stock images"""
return self._serve_file('/'.join(['media/img', id]))
def style(self, id):
"""Serve Pylons' stock stylesheets"""
return self._serve_file('/'.join(['media/style', id]))
def _serve_file(self, path):
"""Call Paste's FileApp (a WSGI application) to serve the file
at the specified path
"""
request.environ['PATH_INFO'] = '/%s' % path
return forward(PkgResourcesParser('pylons', 'pylons'))
|
[
"pylons.request.GET.get",
"pylons.request.environ.get",
"webhelpers.html.builder.literal",
"paste.urlparser.PkgResourcesParser"
] |
[((738, 785), 'pylons.request.environ.get', 'request.environ.get', (['"""pylons.original_response"""'], {}), "('pylons.original_response')\n", (757, 785), False, 'from pylons import request\n'), ((804, 822), 'webhelpers.html.builder.literal', 'literal', (['resp.body'], {}), '(resp.body)\n', (811, 822), False, 'from webhelpers.html.builder import literal\n'), ((1594, 1632), 'paste.urlparser.PkgResourcesParser', 'PkgResourcesParser', (['"""pylons"""', '"""pylons"""'], {}), "('pylons', 'pylons')\n", (1612, 1632), False, 'from paste.urlparser import PkgResourcesParser\n'), ((837, 867), 'pylons.request.GET.get', 'request.GET.get', (['"""message"""', '""""""'], {}), "('message', '')\n", (852, 867), False, 'from pylons import request\n'), ((936, 974), 'pylons.request.environ.get', 'request.environ.get', (['"""SCRIPT_NAME"""', '""""""'], {}), "('SCRIPT_NAME', '')\n", (955, 974), False, 'from pylons import request\n')]
|
"""Support for Kodak Smart Home."""
from datetime import timedelta
import logging
from kodaksmarthome import KodakSmartHome
import voluptuous as vol
from homeassistant.const import (
CONF_PASSWORD,
CONF_REGION,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Kodak Smart Home Portal"
NOTIFICATION_ID = "kodaksmarthome_notification"
NOTIFICATION_TITLE = "Kodak Smart Home Setup"
DATA_KODAKSMARTHOME_CAMS = "kodaksmarthome_cams"
DOMAIN = "kodaksmarthome"
DEFAULT_CACHEDB = ".kodaksmarthome_cache.pickle"
DEFAULT_ENTITY_NAMESPACE = "kodaksmarthome"
SIGNAL_UPDATE_KODAKSMARTHOME = "kodaksmarthome_update"
SCAN_INTERVAL = timedelta(seconds=10)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_REGION): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period,
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Kodak Smart Home Portalponent."""
conf = config[DOMAIN]
username = conf[CONF_USERNAME]
password = conf[CONF_PASSWORD]
region = conf[CONF_REGION]
scan_interval = conf[CONF_SCAN_INTERVAL]
try:
hass_kodak = KodakSmartHome(username=username, password=password, region=region)
hass_kodak.connect()
if not hass_kodak.is_connected:
return False
hass_kodak.disconnect()
hass.data[DATA_KODAKSMARTHOME_CAMS] = hass_kodak
except ConnectionError as ex:
_LOGGER.error("Unable to connect to Kodak Smart Home service: %s", str(ex))
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
return False
def service_hub_refresh(service):
hub_refresh()
def timer_hub_refresh(event_time):
hub_refresh()
def hub_refresh():
"""Call hass_kodak to refresh information."""
_LOGGER.debug("Updating Kodak Smart Home Hub component")
hass_kodak.connect()
dispatcher_send(hass, SIGNAL_UPDATE_KODAKSMARTHOME)
hass_kodak.disconnect()
# register service
hass.services.register(DOMAIN, "update", service_hub_refresh)
# register scan interval for kodak smart home
track_time_interval(hass, timer_hub_refresh, scan_interval)
return True
|
[
"voluptuous.Optional",
"homeassistant.helpers.event.track_time_interval",
"voluptuous.Required",
"homeassistant.helpers.dispatcher.dispatcher_send",
"datetime.timedelta",
"kodaksmarthome.KodakSmartHome",
"logging.getLogger"
] |
[((451, 478), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (468, 478), False, 'import logging\n'), ((874, 895), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (883, 895), False, 'from datetime import timedelta\n'), ((2760, 2819), 'homeassistant.helpers.event.track_time_interval', 'track_time_interval', (['hass', 'timer_hub_refresh', 'scan_interval'], {}), '(hass, timer_hub_refresh, scan_interval)\n', (2779, 2819), False, 'from homeassistant.helpers.event import track_time_interval\n'), ((1570, 1637), 'kodaksmarthome.KodakSmartHome', 'KodakSmartHome', ([], {'username': 'username', 'password': 'password', 'region': 'region'}), '(username=username, password=password, region=region)\n', (1584, 1637), False, 'from kodaksmarthome import KodakSmartHome\n'), ((2531, 2582), 'homeassistant.helpers.dispatcher.dispatcher_send', 'dispatcher_send', (['hass', 'SIGNAL_UPDATE_KODAKSMARTHOME'], {}), '(hass, SIGNAL_UPDATE_KODAKSMARTHOME)\n', (2546, 2582), False, 'from homeassistant.helpers.dispatcher import dispatcher_send\n'), ((989, 1016), 'voluptuous.Required', 'vol.Required', (['CONF_USERNAME'], {}), '(CONF_USERNAME)\n', (1001, 1016), True, 'import voluptuous as vol\n'), ((1045, 1072), 'voluptuous.Required', 'vol.Required', (['CONF_PASSWORD'], {}), '(CONF_PASSWORD)\n', (1057, 1072), True, 'import voluptuous as vol\n'), ((1101, 1126), 'voluptuous.Required', 'vol.Required', (['CONF_REGION'], {}), '(CONF_REGION)\n', (1113, 1126), True, 'import voluptuous as vol\n'), ((1155, 1210), 'voluptuous.Optional', 'vol.Optional', (['CONF_SCAN_INTERVAL'], {'default': 'SCAN_INTERVAL'}), '(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL)\n', (1167, 1210), True, 'import voluptuous as vol\n')]
|