code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import torch
import numpy as np
import h5py
from .logging import logger
from .rotations import quat_to_rotMat, rotMat_to_quat
from .quaternion import quat_fix
def add_normalized_positions(filepaths, new_group_name):
"""Add position data normalized relative to the pelvis to h5 files.
Args:
filepaths (list): paths to files to add data to
new_group_name (str): what the new group will be called in the h5
file
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file["orientation"][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
pos = np.array(h5_file["position"][:, :])
pos = pos.reshape(pos.shape[1], pos.shape[0])
pos = pos.reshape(pos.shape[0], -1, 3)
quat = quat_fix(quat)
norm_pos = np.zeros(pos.shape)
pelvis_rot = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, 0, :]))
)
pelvis_pos = pos[:, 0, :]
for i in range(0, quat.shape[1]):
relative_pos = np.expand_dims(pos[:, i, :] - pelvis_pos, axis=2)
norm_pos[:, i, :] = np.squeeze(np.matmul(pelvis_rot, relative_pos),
axis=2)
norm_pos = norm_pos.reshape(norm_pos.shape[0], -1)
norm_pos = norm_pos.reshape(norm_pos.shape[1], norm_pos.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_pos)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close()
def add_normalized_accelerations(filepaths, group_name, new_group_name,
root=0):
"""Add acceleration data normalized relative to a root to the h5 files.
Args:
filepaths (list): paths to files to add data to
group_name (str): acceleration group to normalize
(typically acceleration, but can also be sensorFreeAcceleration)
new_group_name (str): new group name for normalized acceleration data
root (int, optional): index of root (e.g., 0 is pelvis, 4 is sternum).
Defaults to 0.
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file["orientation"][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
acc = np.array(h5_file[group_name][:, :])
acc = acc.reshape(acc.shape[1], acc.shape[0])
acc = acc.reshape(acc.shape[0], -1, 3)
quat = quat_fix(quat)
norm_acc = np.zeros(acc.shape)
root_rot = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, root, :]))
)
root_acc = acc[:, root, :]
for i in range(0, acc.shape[1]):
relative_acc = np.expand_dims(acc[:, i, :] - root_acc, axis=2)
norm_acc[:, i, :] = np.squeeze(np.matmul(root_rot, relative_acc),
axis=2)
norm_acc = norm_acc.reshape(norm_acc.shape[0], -1)
norm_acc = norm_acc.reshape(norm_acc.shape[1], norm_acc.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_acc)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close()
def add_normalized_quaternions(filepaths, group_name, new_group_name, root=0):
"""Add orientation data normalized relative to a root to the h5 files.
Args:
filepaths (list): paths to files to add data to
group_name (str): orientation group to normalize
(typically orientation, but can also be sensorOrientation)
new_group_name (str): new group name for normalized orientation data
root (int, optional): index of root (e.g., 0 is pelvis, 4 is sternum).
Defaults to 0.
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file[group_name][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
quat = quat_fix(quat)
norm_quat = np.zeros(quat.shape)
root_rotMat = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, root, :]))
)
for i in range(0, quat.shape[1]):
rotMat = quat_to_rotMat(torch.tensor(quat[:, i, :]))
norm_rotMat = np.matmul(root_rotMat, rotMat)
norm_quat[:, i, :] = rotMat_to_quat(norm_rotMat)
norm_quat = norm_quat.reshape(norm_quat.shape[0], -1)
norm_quat = norm_quat.reshape(norm_quat.shape[1], norm_quat.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_quat)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close() | src/common/preprocessing.py |
import torch
import numpy as np
import h5py
from .logging import logger
from .rotations import quat_to_rotMat, rotMat_to_quat
from .quaternion import quat_fix
def add_normalized_positions(filepaths, new_group_name):
"""Add position data normalized relative to the pelvis to h5 files.
Args:
filepaths (list): paths to files to add data to
new_group_name (str): what the new group will be called in the h5
file
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file["orientation"][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
pos = np.array(h5_file["position"][:, :])
pos = pos.reshape(pos.shape[1], pos.shape[0])
pos = pos.reshape(pos.shape[0], -1, 3)
quat = quat_fix(quat)
norm_pos = np.zeros(pos.shape)
pelvis_rot = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, 0, :]))
)
pelvis_pos = pos[:, 0, :]
for i in range(0, quat.shape[1]):
relative_pos = np.expand_dims(pos[:, i, :] - pelvis_pos, axis=2)
norm_pos[:, i, :] = np.squeeze(np.matmul(pelvis_rot, relative_pos),
axis=2)
norm_pos = norm_pos.reshape(norm_pos.shape[0], -1)
norm_pos = norm_pos.reshape(norm_pos.shape[1], norm_pos.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_pos)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close()
def add_normalized_accelerations(filepaths, group_name, new_group_name,
root=0):
"""Add acceleration data normalized relative to a root to the h5 files.
Args:
filepaths (list): paths to files to add data to
group_name (str): acceleration group to normalize
(typically acceleration, but can also be sensorFreeAcceleration)
new_group_name (str): new group name for normalized acceleration data
root (int, optional): index of root (e.g., 0 is pelvis, 4 is sternum).
Defaults to 0.
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file["orientation"][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
acc = np.array(h5_file[group_name][:, :])
acc = acc.reshape(acc.shape[1], acc.shape[0])
acc = acc.reshape(acc.shape[0], -1, 3)
quat = quat_fix(quat)
norm_acc = np.zeros(acc.shape)
root_rot = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, root, :]))
)
root_acc = acc[:, root, :]
for i in range(0, acc.shape[1]):
relative_acc = np.expand_dims(acc[:, i, :] - root_acc, axis=2)
norm_acc[:, i, :] = np.squeeze(np.matmul(root_rot, relative_acc),
axis=2)
norm_acc = norm_acc.reshape(norm_acc.shape[0], -1)
norm_acc = norm_acc.reshape(norm_acc.shape[1], norm_acc.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_acc)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close()
def add_normalized_quaternions(filepaths, group_name, new_group_name, root=0):
"""Add orientation data normalized relative to a root to the h5 files.
Args:
filepaths (list): paths to files to add data to
group_name (str): orientation group to normalize
(typically orientation, but can also be sensorOrientation)
new_group_name (str): new group name for normalized orientation data
root (int, optional): index of root (e.g., 0 is pelvis, 4 is sternum).
Defaults to 0.
"""
for filepath in filepaths:
try:
h5_file = h5py.File(filepath, "r+")
except OSError:
logger.info(f"OSError: Unable to open file {filepath}")
continue
quat = np.array(h5_file[group_name][:, :])
quat = quat.reshape(quat.shape[1], quat.shape[0])
quat = quat.reshape(quat.shape[0], -1, 4)
quat = quat_fix(quat)
norm_quat = np.zeros(quat.shape)
root_rotMat = np.linalg.inv(
quat_to_rotMat(torch.tensor(quat[:, root, :]))
)
for i in range(0, quat.shape[1]):
rotMat = quat_to_rotMat(torch.tensor(quat[:, i, :]))
norm_rotMat = np.matmul(root_rotMat, rotMat)
norm_quat[:, i, :] = rotMat_to_quat(norm_rotMat)
norm_quat = norm_quat.reshape(norm_quat.shape[0], -1)
norm_quat = norm_quat.reshape(norm_quat.shape[1], norm_quat.shape[0])
try:
logger.info(f"Writing to file {filepath}")
h5_file.create_dataset(new_group_name, data=norm_quat)
except RuntimeError:
logger.info(("RuntimeError: Unable to create link "
f"(name already exists) in {filepath}"))
h5_file.close() | 0.642993 | 0.585694 |
from typing import Optional, List
from pydantic import PrivateAttr
from pydantic.main import BaseModel
from inoft_vocal_framework.skill_settings.skill_settings import INFRASTRUCTURE_TO_BASE_URL
from inoft_vocal_framework.platforms_handlers.alexa.audioplayer.audioplayer_directives import AudioPlayerWrapper
from inoft_vocal_framework.platforms_handlers.alexa.context import Context
from inoft_vocal_framework.platforms_handlers.alexa.request import Request
from inoft_vocal_framework.platforms_handlers.alexa.response import Response
from inoft_vocal_framework.platforms_handlers.alexa.session import Session
from inoft_vocal_framework.safe_dict import SafeDict
from inoft_vocal_framework.audio_editing.audioclip import AudioBlock
class AlexaHandlerInput(BaseModel):
from inoft_vocal_framework.platforms_handlers.handler_input import HandlerInput
session: Session
context: Context
request: Request
_response: Response = PrivateAttr(default_factory=Response)
_parent_handler_input: HandlerInput = PrivateAttr()
_is_new_session: Optional[bool] = PrivateAttr(default=None)
_session_attributes: Optional[dict] = PrivateAttr(default=None)
_audio_player: Optional[AudioPlayerWrapper] = PrivateAttr(default=None)
def __init__(self, parent_handler_input: HandlerInput, **kwargs):
super().__init__(**kwargs)
self._parent_handler_input = parent_handler_input
@staticmethod
def create_dummy(parent_handler_input: HandlerInput):
return AlexaHandlerInput(
parent_handler_input=parent_handler_input,
session=Session(new=False, sessionId="", application={}, user={}),
context=Context(
System=Context.SystemModel(
application=Context.SystemModel.ApplicationModel(),
user=Context.SystemModel.UserModel(),
device=Context.SystemModel.DeviceModel(
supportedInterfaces=Context.SystemModel.DeviceModel.SupportedInterfacesModel()
)
)
),
request=Request(
type='dummyRequestType',
requestId='dummyRequestId',
timestamp=1000000,
locale='fr-FR'
)
)
@property
def response(self) -> Response:
return self._response
@property
def is_new_session(self) -> bool:
if self._is_new_session is None:
self._is_new_session = self.session.new if isinstance(self.session.new, bool) else False
return self._is_new_session
def is_launch_request(self) -> bool:
return self.request.is_launch_request()
def active_intent_name(self) -> Optional[str]:
return self.request.active_intent_name()
def is_in_intent_names(self, intent_names_list: List[str] or str) -> bool:
return self.request.is_in_intent_names(intent_names_list=intent_names_list)
def is_in_request_types(self, request_types_list: list):
return self.request.is_in_request_types(request_types_list=request_types_list)
# todo: create a new function that handle the end of a session (like an optionnal function in each class type ?)
def say(self, text_or_ssml: str) -> None:
self._response.say(text_or_ssml=text_or_ssml)
def say_ssml(self, ssml: str) -> None:
self._response.say_ssml(ssml=ssml)
def reprompt(self, text_or_ssml: str) -> None:
# todo: fix reprompt (for alexa and dialogflow)
self._response.say_reprompt(text_or_ssml=text_or_ssml)
def play_audio_block(self, audio_block: AudioBlock, num_channels: 1 or 2 = 1, sample_rate: 24000 or 22050 or 16000 = 24000) -> bool:
# todo: stop using the infrastructure_speech_synthesis setting, and use a kind of 'audio_engine' setting ?
override_engine_base_url = INFRASTRUCTURE_TO_BASE_URL.get(self._parent_handler_input.settings.infrastructure_speech_synthesis, '')
# todo: only include if the engine is not INFRASTRUCTURE_ENGINE
file_url = audio_block.manual_render(
engine_account_id=self._parent_handler_input.settings.engine_account_id,
engine_project_id=self._parent_handler_input.settings.engine_project_id,
engine_api_key=self._parent_handler_input.settings.engine_api_key,
override_engine_base_url=override_engine_base_url,
num_channels=num_channels, sample_rate=sample_rate, bitrate=48,
out_filepath="null", format_type="mp3"
)
# todo: make out_filepath argument optional
self.say_ssml(f'<audio src="{file_url}" />')
return True # todo: return False is rendering failed
@property
def audioplayer(self) -> AudioPlayerWrapper:
if self._audioplayer is None:
self._audioplayer = AudioPlayerWrapper(parent_handler_input=self._parent_handler_input)
return self._audioplayer
def save_audioplayer_handlers_group_class(self, handlers_group_class_type: type, group_class_kwargs: dict = None):
from inspect import getfile
# We use the persistent attributes and not the session, because after launching an audio file with the audio player,
# the session of the user will end. Then when interacting with an audio file there will be no session id.
# So, if we save this data as session attributes, it would be considered of the same session only if the smart
# session timeout has not been exceeded. Which is not at all what we want.
self._parent_handler_input.persistent_memorize(
data_key='lastUsedAudioPlayerHandlersGroupClass',
data_value={
"fileFilepathContainingClass": getfile(handlers_group_class_type),
"classPath": handlers_group_class_type.__qualname__,
"classKwargs": group_class_kwargs
}
)
def get_last_used_audioplayer_handlers_group(self) -> SafeDict:
return SafeDict(self._parent_handler_input.persistent_remember('lastUsedAudioPlayerHandlersGroupClass', specific_object_type=dict))
def show_basic_card(self, title: str, text: str, small_image_url: Optional[str] = None, large_image_url: Optional[str] = None) -> None:
from inoft_vocal_framework.platforms_handlers.alexa.response import Card
if small_image_url is not None or large_image_url is not None:
from inoft_vocal_framework.platforms_handlers.alexa.response import Image
if small_image_url is None and large_image_url is not None:
small_image_url = large_image_url
print("WARNING ! The small_image_url argument was not specified, the large_image_url"
"argument (which has been specified) is now also used as the small_image_url.")
elif large_image_url is None and small_image_url is not None:
large_image_url = small_image_url
print("WARNING ! The large_image_url argument was not specified, the small_image_url"
"argument (which has been specified) is now also used as the large_image_url.")
self._response.card = Card(
type_value=Card.type_standard, title=title, text=text,
image=Image(small_image_url=small_image_url, large_image_url=large_image_url)
)
else:
self._response.card = Card(type_value=Card.type_simple, title=title, content_text=text)
def show_link_account_card(self) -> None:
raise Exception("Not yet implemented")
def show_ask_permissions_card(self) -> None:
raise Exception("Not yet implemented")
def end_session(self, should_end: bool = True) -> None:
self._response.end_session(should_end=should_end)
@property
def session_attributes(self) -> dict:
if self._session_attributes is None:
self._session_attributes = self.session.attributes if isinstance(self.session.attributes, dict) else {}
return self._session_attributes | platforms_handlers/alexa/handler_input.py | from typing import Optional, List
from pydantic import PrivateAttr
from pydantic.main import BaseModel
from inoft_vocal_framework.skill_settings.skill_settings import INFRASTRUCTURE_TO_BASE_URL
from inoft_vocal_framework.platforms_handlers.alexa.audioplayer.audioplayer_directives import AudioPlayerWrapper
from inoft_vocal_framework.platforms_handlers.alexa.context import Context
from inoft_vocal_framework.platforms_handlers.alexa.request import Request
from inoft_vocal_framework.platforms_handlers.alexa.response import Response
from inoft_vocal_framework.platforms_handlers.alexa.session import Session
from inoft_vocal_framework.safe_dict import SafeDict
from inoft_vocal_framework.audio_editing.audioclip import AudioBlock
class AlexaHandlerInput(BaseModel):
from inoft_vocal_framework.platforms_handlers.handler_input import HandlerInput
session: Session
context: Context
request: Request
_response: Response = PrivateAttr(default_factory=Response)
_parent_handler_input: HandlerInput = PrivateAttr()
_is_new_session: Optional[bool] = PrivateAttr(default=None)
_session_attributes: Optional[dict] = PrivateAttr(default=None)
_audio_player: Optional[AudioPlayerWrapper] = PrivateAttr(default=None)
def __init__(self, parent_handler_input: HandlerInput, **kwargs):
super().__init__(**kwargs)
self._parent_handler_input = parent_handler_input
@staticmethod
def create_dummy(parent_handler_input: HandlerInput):
return AlexaHandlerInput(
parent_handler_input=parent_handler_input,
session=Session(new=False, sessionId="", application={}, user={}),
context=Context(
System=Context.SystemModel(
application=Context.SystemModel.ApplicationModel(),
user=Context.SystemModel.UserModel(),
device=Context.SystemModel.DeviceModel(
supportedInterfaces=Context.SystemModel.DeviceModel.SupportedInterfacesModel()
)
)
),
request=Request(
type='dummyRequestType',
requestId='dummyRequestId',
timestamp=1000000,
locale='fr-FR'
)
)
@property
def response(self) -> Response:
return self._response
@property
def is_new_session(self) -> bool:
if self._is_new_session is None:
self._is_new_session = self.session.new if isinstance(self.session.new, bool) else False
return self._is_new_session
def is_launch_request(self) -> bool:
return self.request.is_launch_request()
def active_intent_name(self) -> Optional[str]:
return self.request.active_intent_name()
def is_in_intent_names(self, intent_names_list: List[str] or str) -> bool:
return self.request.is_in_intent_names(intent_names_list=intent_names_list)
def is_in_request_types(self, request_types_list: list):
return self.request.is_in_request_types(request_types_list=request_types_list)
# todo: create a new function that handle the end of a session (like an optionnal function in each class type ?)
def say(self, text_or_ssml: str) -> None:
self._response.say(text_or_ssml=text_or_ssml)
def say_ssml(self, ssml: str) -> None:
self._response.say_ssml(ssml=ssml)
def reprompt(self, text_or_ssml: str) -> None:
# todo: fix reprompt (for alexa and dialogflow)
self._response.say_reprompt(text_or_ssml=text_or_ssml)
def play_audio_block(self, audio_block: AudioBlock, num_channels: 1 or 2 = 1, sample_rate: 24000 or 22050 or 16000 = 24000) -> bool:
# todo: stop using the infrastructure_speech_synthesis setting, and use a kind of 'audio_engine' setting ?
override_engine_base_url = INFRASTRUCTURE_TO_BASE_URL.get(self._parent_handler_input.settings.infrastructure_speech_synthesis, '')
# todo: only include if the engine is not INFRASTRUCTURE_ENGINE
file_url = audio_block.manual_render(
engine_account_id=self._parent_handler_input.settings.engine_account_id,
engine_project_id=self._parent_handler_input.settings.engine_project_id,
engine_api_key=self._parent_handler_input.settings.engine_api_key,
override_engine_base_url=override_engine_base_url,
num_channels=num_channels, sample_rate=sample_rate, bitrate=48,
out_filepath="null", format_type="mp3"
)
# todo: make out_filepath argument optional
self.say_ssml(f'<audio src="{file_url}" />')
return True # todo: return False is rendering failed
@property
def audioplayer(self) -> AudioPlayerWrapper:
if self._audioplayer is None:
self._audioplayer = AudioPlayerWrapper(parent_handler_input=self._parent_handler_input)
return self._audioplayer
def save_audioplayer_handlers_group_class(self, handlers_group_class_type: type, group_class_kwargs: dict = None):
from inspect import getfile
# We use the persistent attributes and not the session, because after launching an audio file with the audio player,
# the session of the user will end. Then when interacting with an audio file there will be no session id.
# So, if we save this data as session attributes, it would be considered of the same session only if the smart
# session timeout has not been exceeded. Which is not at all what we want.
self._parent_handler_input.persistent_memorize(
data_key='lastUsedAudioPlayerHandlersGroupClass',
data_value={
"fileFilepathContainingClass": getfile(handlers_group_class_type),
"classPath": handlers_group_class_type.__qualname__,
"classKwargs": group_class_kwargs
}
)
def get_last_used_audioplayer_handlers_group(self) -> SafeDict:
return SafeDict(self._parent_handler_input.persistent_remember('lastUsedAudioPlayerHandlersGroupClass', specific_object_type=dict))
def show_basic_card(self, title: str, text: str, small_image_url: Optional[str] = None, large_image_url: Optional[str] = None) -> None:
from inoft_vocal_framework.platforms_handlers.alexa.response import Card
if small_image_url is not None or large_image_url is not None:
from inoft_vocal_framework.platforms_handlers.alexa.response import Image
if small_image_url is None and large_image_url is not None:
small_image_url = large_image_url
print("WARNING ! The small_image_url argument was not specified, the large_image_url"
"argument (which has been specified) is now also used as the small_image_url.")
elif large_image_url is None and small_image_url is not None:
large_image_url = small_image_url
print("WARNING ! The large_image_url argument was not specified, the small_image_url"
"argument (which has been specified) is now also used as the large_image_url.")
self._response.card = Card(
type_value=Card.type_standard, title=title, text=text,
image=Image(small_image_url=small_image_url, large_image_url=large_image_url)
)
else:
self._response.card = Card(type_value=Card.type_simple, title=title, content_text=text)
def show_link_account_card(self) -> None:
raise Exception("Not yet implemented")
def show_ask_permissions_card(self) -> None:
raise Exception("Not yet implemented")
def end_session(self, should_end: bool = True) -> None:
self._response.end_session(should_end=should_end)
@property
def session_attributes(self) -> dict:
if self._session_attributes is None:
self._session_attributes = self.session.attributes if isinstance(self.session.attributes, dict) else {}
return self._session_attributes | 0.69451 | 0.13887 |
import socket
from .base import Test, passive
class Redis(Test):
def setUp(self):
super().setUp()
import zorro.redis
self.r = zorro.redis.Redis(db=13)
class SingleThread(Redis):
@passive
def test_execute(self):
self.assertEquals(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.assertEquals(self.r.execute('GET', 'test:key1'), b'value')
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
@passive
def test_reconnect(self):
self.assertEquals(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.r._channel._sock.shutdown(socket.SHUT_RDWR)
self.z.sleep(0.01)
self.assertEquals(self.r.execute('GET', 'test:key1'), b'value')
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
@passive
def test_disconnect(self):
fut = self.r.future('SET',
'test:big', b'0123456789abcdef'*1000000)
self.r._channel._sock.shutdown(socket.SHUT_RDWR)
self.z.sleep(0.01)
with self.assertRaises(self.z.channel.PipeError):
fut.get()
@passive
def test_bulk(self):
self.assertEquals(self.r.bulk([
('MULTI',),
('SET', 'test:key1', '10'),
('INCR', 'test:key1'),
('DEL', 'test:key1'),
('EXEC',),
]), ['OK', 11, 1])
@passive
def test_keys(self):
self.r.execute('DEL', 'test:big')
self.assertEqual(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.assertEqual(self.r.execute('SET', 'test:key2', 'value'), 'OK')
self.assertEqual(set(map(bytes, self.r.execute('KEYS', '*'))),
set([b'test:key1', b'test:key2']))
val = self.r.bulk([('MULTI',),
('GET', 'test:key1'),
('MGET', 'test:key1', 'test:key2'),
('KEYS', '*'),
('EXEC',)])
self.assertEqual(val[0], b'value')
self.assertEqual(val[1], [b'value', b'value'])
self.assertSetEqual(set(map(bytes, val[2])),
set([b'test:key1', b'test:key2']))
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
self.assertEquals(self.r.execute('DEL', 'test:key2'), 1)
class BigTest(Redis):
test_timeout = 10
@passive
def test_time(self):
def get100():
for i in range(100):
self.r.execute('GET', 'test:key1')
import time
old = time.time()
f = []
for i in range(200):
f.append(self.z.Future(get100))
for i in f:
i.get()
print("TOTAL TIME", time.time() - old)
if __name__ == '__main__':
import unittest
unittest.main() | tests/test_redis.py | import socket
from .base import Test, passive
class Redis(Test):
def setUp(self):
super().setUp()
import zorro.redis
self.r = zorro.redis.Redis(db=13)
class SingleThread(Redis):
@passive
def test_execute(self):
self.assertEquals(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.assertEquals(self.r.execute('GET', 'test:key1'), b'value')
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
@passive
def test_reconnect(self):
self.assertEquals(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.r._channel._sock.shutdown(socket.SHUT_RDWR)
self.z.sleep(0.01)
self.assertEquals(self.r.execute('GET', 'test:key1'), b'value')
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
@passive
def test_disconnect(self):
fut = self.r.future('SET',
'test:big', b'0123456789abcdef'*1000000)
self.r._channel._sock.shutdown(socket.SHUT_RDWR)
self.z.sleep(0.01)
with self.assertRaises(self.z.channel.PipeError):
fut.get()
@passive
def test_bulk(self):
self.assertEquals(self.r.bulk([
('MULTI',),
('SET', 'test:key1', '10'),
('INCR', 'test:key1'),
('DEL', 'test:key1'),
('EXEC',),
]), ['OK', 11, 1])
@passive
def test_keys(self):
self.r.execute('DEL', 'test:big')
self.assertEqual(self.r.execute('SET', 'test:key1', 'value'), 'OK')
self.assertEqual(self.r.execute('SET', 'test:key2', 'value'), 'OK')
self.assertEqual(set(map(bytes, self.r.execute('KEYS', '*'))),
set([b'test:key1', b'test:key2']))
val = self.r.bulk([('MULTI',),
('GET', 'test:key1'),
('MGET', 'test:key1', 'test:key2'),
('KEYS', '*'),
('EXEC',)])
self.assertEqual(val[0], b'value')
self.assertEqual(val[1], [b'value', b'value'])
self.assertSetEqual(set(map(bytes, val[2])),
set([b'test:key1', b'test:key2']))
self.assertEquals(self.r.execute('DEL', 'test:key1'), 1)
self.assertEquals(self.r.execute('DEL', 'test:key2'), 1)
class BigTest(Redis):
test_timeout = 10
@passive
def test_time(self):
def get100():
for i in range(100):
self.r.execute('GET', 'test:key1')
import time
old = time.time()
f = []
for i in range(200):
f.append(self.z.Future(get100))
for i in f:
i.get()
print("TOTAL TIME", time.time() - old)
if __name__ == '__main__':
import unittest
unittest.main() | 0.423816 | 0.326137 |
# -*- coding: utf-8 -*-
from CmdBase import CmdBase
from Msg_Src import Msg_Src
from Msg_Dest import Msg_Dest
from ErrorCommandFormat import ErrorCommandFormat
class CmdSend(CmdBase):
"""
send command.
Command Format :
send <v1>
send <v1><[-|,]><v2>
"""
__msg_src = None
__msg_dest = None
__mode = None
def __init__(self, params, srcDb, destMode, destDb, s):
self.id = ""
self.name = "send command"
self.params = params
self.cmd = "SEND"
self.__mode = destMode
self.__msg_src = Msg_Src(srcDb)
self.__msg_dest = Msg_Dest(destDb)
self.frm = None
self.to = None
self.__s = s
self.validate()
def validate(self):
CmdBase.validate(self)
if len(self.params) < 2:
raise ErrorCommandFormat("Command Params Error.")
else :
frmtos = str.split(self.params[1].replace('-',','),',')
if len(frmtos) == 1:
self.frm = frmtos[0]
self.to = None
elif len(frmtos) == 2:
self.frm = frmtos[0]
self.to = frmtos[1]
else:
raise ErrorCommandFormat("Command Params Error.")
def execute(self):
try:
if self.__mode == 'database':
self.toDatabase()
elif self.__mode == 'tcp':
self.toTcp()
elif self.__mode == 'both':
self.toDatabase()
self.toTcp()
except Exception as e:
print(e.args)
def toDatabase(self):
msg = None
if self.to == None:
msg = self.__msg_src.getMsg(self.frm)
else:
msg = self.__msg_src.getMsgCollection(self.frm, self.to)
for id,clientIp,processingData,type in msg:
print(self.cmd + ' ID=' + str(id) + '...')
bl = self.__msg_dest.insertMsg(clientIp, processingData, type)
print(bl)
def toTcp(self):
msg = None
if self.to == None:
msg = self.__msg_src.getMsg(self.frm)
else:
msg = self.__msg_src.getMsgCollection(self.frm, self.to)
for id,clientIp,processingData,type in msg:
print(self.cmd + ' ID=' + str(id) + '...')
self.__s.send(processingData)
data = self.__s.recv(100)
if data == None or len(data) == 0:
continue
print(data)
'''tmp = []
while True:
data = self.__s.recv(1024)
if not data:
break
tmp.append(data)
print(tmp)''' | CmdSend.py | # -*- coding: utf-8 -*-
from CmdBase import CmdBase
from Msg_Src import Msg_Src
from Msg_Dest import Msg_Dest
from ErrorCommandFormat import ErrorCommandFormat
class CmdSend(CmdBase):
"""
send command.
Command Format :
send <v1>
send <v1><[-|,]><v2>
"""
__msg_src = None
__msg_dest = None
__mode = None
def __init__(self, params, srcDb, destMode, destDb, s):
self.id = ""
self.name = "send command"
self.params = params
self.cmd = "SEND"
self.__mode = destMode
self.__msg_src = Msg_Src(srcDb)
self.__msg_dest = Msg_Dest(destDb)
self.frm = None
self.to = None
self.__s = s
self.validate()
def validate(self):
CmdBase.validate(self)
if len(self.params) < 2:
raise ErrorCommandFormat("Command Params Error.")
else :
frmtos = str.split(self.params[1].replace('-',','),',')
if len(frmtos) == 1:
self.frm = frmtos[0]
self.to = None
elif len(frmtos) == 2:
self.frm = frmtos[0]
self.to = frmtos[1]
else:
raise ErrorCommandFormat("Command Params Error.")
def execute(self):
try:
if self.__mode == 'database':
self.toDatabase()
elif self.__mode == 'tcp':
self.toTcp()
elif self.__mode == 'both':
self.toDatabase()
self.toTcp()
except Exception as e:
print(e.args)
def toDatabase(self):
msg = None
if self.to == None:
msg = self.__msg_src.getMsg(self.frm)
else:
msg = self.__msg_src.getMsgCollection(self.frm, self.to)
for id,clientIp,processingData,type in msg:
print(self.cmd + ' ID=' + str(id) + '...')
bl = self.__msg_dest.insertMsg(clientIp, processingData, type)
print(bl)
def toTcp(self):
msg = None
if self.to == None:
msg = self.__msg_src.getMsg(self.frm)
else:
msg = self.__msg_src.getMsgCollection(self.frm, self.to)
for id,clientIp,processingData,type in msg:
print(self.cmd + ' ID=' + str(id) + '...')
self.__s.send(processingData)
data = self.__s.recv(100)
if data == None or len(data) == 0:
continue
print(data)
'''tmp = []
while True:
data = self.__s.recv(1024)
if not data:
break
tmp.append(data)
print(tmp)''' | 0.067824 | 0.051035 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO_Nowcast framework system configuration object.
Provides :py:class:`dict`-like access to the configuration loaded from the
YAML system configuration file.
"""
import os
import re
import attr
import yaml
@attr.s
class Config:
"""Construct a :py:class:`nemo_nowcast.config.Config` instance.
"""
#: Path/name of YAML configuration file for the NEMO nowcast system.
#: Assigned when :py:meth:`~nemo_nowcast.config.Config.load` method
#: is called.
file = attr.ib(init=False, default="")
#: :py:class:`dict` containing the nowcast system configuration
#: that is read from the configuration file by the
#: :py:meth:`~nemo_nowcast.config.Config.load` method.
_dict = attr.ib(init=False, repr=False, default=attr.Factory(dict))
def __contains__(self, item):
return item in self._dict
def __getitem__(self, item):
return self._dict[item]
def __setitem__(self, key, value):
self._dict[key] = value
def get(self, key, default=None):
try:
return self._dict[key]
except KeyError:
return default
def load(self, config_file):
"""Load the YAML config_file.
The value of config_file is stored on the
:py:attr:`nemo_nowcast.config.Config.file` attribute.
:arg config_file: Path/name of YAML configuration file for the NEMO nowcast system.
:type config_file: :py:class:`pathlib.Path` or str
"""
self.file = config_file
with open(config_file, "rt") as f:
self._dict = yaml.safe_load(f)
envvar_pattern = re.compile(r"\$\(NOWCAST\.ENV\.(\w*)\)\w*")
envvar_sub_keys = ("checklist file", "python")
for key in envvar_sub_keys:
self._dict[key] = envvar_pattern.sub(self._replace_env, self._dict[key])
try:
# Local logging
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["handlers"]
)
except KeyError:
# Distributed logging
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["aggregator"]["handlers"]
)
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["publisher"]["handlers"]
)
def _replace_handler_envvars(self, envvar_pattern, handlers):
for handler in handlers:
try:
handlers[handler]["filename"] = envvar_pattern.sub(
self._replace_env, handlers[handler]["filename"]
)
except KeyError:
# Not a file handler
pass
@staticmethod
def _replace_env(var):
try:
return os.environ[var.group(1)]
except KeyError:
raise KeyError(f"environment variable not set: {var.group(1)}") | nemo_nowcast/config.py |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""NEMO_Nowcast framework system configuration object.
Provides :py:class:`dict`-like access to the configuration loaded from the
YAML system configuration file.
"""
import os
import re
import attr
import yaml
@attr.s
class Config:
"""Construct a :py:class:`nemo_nowcast.config.Config` instance.
"""
#: Path/name of YAML configuration file for the NEMO nowcast system.
#: Assigned when :py:meth:`~nemo_nowcast.config.Config.load` method
#: is called.
file = attr.ib(init=False, default="")
#: :py:class:`dict` containing the nowcast system configuration
#: that is read from the configuration file by the
#: :py:meth:`~nemo_nowcast.config.Config.load` method.
_dict = attr.ib(init=False, repr=False, default=attr.Factory(dict))
def __contains__(self, item):
return item in self._dict
def __getitem__(self, item):
return self._dict[item]
def __setitem__(self, key, value):
self._dict[key] = value
def get(self, key, default=None):
try:
return self._dict[key]
except KeyError:
return default
def load(self, config_file):
"""Load the YAML config_file.
The value of config_file is stored on the
:py:attr:`nemo_nowcast.config.Config.file` attribute.
:arg config_file: Path/name of YAML configuration file for the NEMO nowcast system.
:type config_file: :py:class:`pathlib.Path` or str
"""
self.file = config_file
with open(config_file, "rt") as f:
self._dict = yaml.safe_load(f)
envvar_pattern = re.compile(r"\$\(NOWCAST\.ENV\.(\w*)\)\w*")
envvar_sub_keys = ("checklist file", "python")
for key in envvar_sub_keys:
self._dict[key] = envvar_pattern.sub(self._replace_env, self._dict[key])
try:
# Local logging
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["handlers"]
)
except KeyError:
# Distributed logging
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["aggregator"]["handlers"]
)
self._replace_handler_envvars(
envvar_pattern, self._dict["logging"]["publisher"]["handlers"]
)
def _replace_handler_envvars(self, envvar_pattern, handlers):
for handler in handlers:
try:
handlers[handler]["filename"] = envvar_pattern.sub(
self._replace_env, handlers[handler]["filename"]
)
except KeyError:
# Not a file handler
pass
@staticmethod
def _replace_env(var):
try:
return os.environ[var.group(1)]
except KeyError:
raise KeyError(f"environment variable not set: {var.group(1)}") | 0.841435 | 0.196229 |
from __future__ import absolute_import, unicode_literals, division
from puls.models import Supplier, SupplierForm
from puls.compat import unquote_plus
from puls import app, paginate
import flask
@app.route("/admin/suppliers/", methods=["GET", "POST"],
endpoint="manage_suppliers")
@app.route("/admin/suppliers/<int:page>/")
@app.template("admin/suppliers/list.html")
@app.logged_in
def list_suppliers(page=1):
term = flask.request.form.get("term", "")
if term:
page = Supplier.search(term)
else:
page = paginate(Supplier.objects, page, 20)
return {"term": term,
"page": page}
@app.route("/admin/manufacturers/search/")
@app.logged_in
def search_suppliers():
term = flask.request.args.get("term", "")
if term:
results = Supplier.search(term)
else:
results = Supplier.objects.limit(100)
return flask.jsonify({"results": [{"id": str(item.id),
"text": str(item.name)}
for item in results]})
@app.route("/admin/suppliers/new/", methods=["GET", "POST"],
endpoint="add_supplier")
@app.route("/admin/suppliers/<id>/edit/", methods=["GET", "POST"])
@app.template("admin/suppliers/form.html")
@app.logged_in
def edit_supplier(id=None):
if id is None:
item = None
else:
item = Supplier.objects.get_or_404(id=unquote_plus(id))
form = SupplierForm(obj=item)
if form.validate_on_submit():
if not item:
item = Supplier()
form.populate_obj(item)
item.save()
flask.flash("The supplier was saved", "success")
return flask.redirect(flask.url_for("manage_suppliers"))
return {"form": form,
"item": item}
@app.route("/admin/suppliers/<id>/delete/")
@app.logged_in
def delete_supplier(id):
item = Supplier.objects.get_or_404(id=unquote_plus(id))
item.delete()
flask.flash("Your supplier has been deleted!", "warning")
return flask.redirect(flask.url_for("manage_suppliers")) | puls/views/admin/suppliers.py | from __future__ import absolute_import, unicode_literals, division
from puls.models import Supplier, SupplierForm
from puls.compat import unquote_plus
from puls import app, paginate
import flask
@app.route("/admin/suppliers/", methods=["GET", "POST"],
endpoint="manage_suppliers")
@app.route("/admin/suppliers/<int:page>/")
@app.template("admin/suppliers/list.html")
@app.logged_in
def list_suppliers(page=1):
term = flask.request.form.get("term", "")
if term:
page = Supplier.search(term)
else:
page = paginate(Supplier.objects, page, 20)
return {"term": term,
"page": page}
@app.route("/admin/manufacturers/search/")
@app.logged_in
def search_suppliers():
term = flask.request.args.get("term", "")
if term:
results = Supplier.search(term)
else:
results = Supplier.objects.limit(100)
return flask.jsonify({"results": [{"id": str(item.id),
"text": str(item.name)}
for item in results]})
@app.route("/admin/suppliers/new/", methods=["GET", "POST"],
endpoint="add_supplier")
@app.route("/admin/suppliers/<id>/edit/", methods=["GET", "POST"])
@app.template("admin/suppliers/form.html")
@app.logged_in
def edit_supplier(id=None):
if id is None:
item = None
else:
item = Supplier.objects.get_or_404(id=unquote_plus(id))
form = SupplierForm(obj=item)
if form.validate_on_submit():
if not item:
item = Supplier()
form.populate_obj(item)
item.save()
flask.flash("The supplier was saved", "success")
return flask.redirect(flask.url_for("manage_suppliers"))
return {"form": form,
"item": item}
@app.route("/admin/suppliers/<id>/delete/")
@app.logged_in
def delete_supplier(id):
item = Supplier.objects.get_or_404(id=unquote_plus(id))
item.delete()
flask.flash("Your supplier has been deleted!", "warning")
return flask.redirect(flask.url_for("manage_suppliers")) | 0.450118 | 0.163112 |
from field.fq import Fq
from field.fq2 import Fq2
from field.fq6 import Fq6
from field.fq12 import Fq12
from bn128.g1 import G1
from bn128.g2 import G2
from bn128.ate import *
class BN128:
def __init__(self):
# Parameters for base field (Fq)
Fq.field_modulus = 21888242871839275222246405745257275088696311157297823662689037894645226208583
# Parameters for twist field (Fq2)
Fq2.non_residue = Fq(
21888242871839275222246405745257275088696311157297823662689037894645226208582
)
Fq2.frobenius_coeffs_c1 = [
Fq(1),
Fq(
21888242871839275222246405745257275088696311157297823662689037894645226208582
),
]
# Parameters for Fq6
Fq6.non_residue = Fq2([9, 1])
Fq6.frobenius_coeffs_c1 = [
Fq2([1, 0]),
Fq2(
[
21575463638280843010398324269430826099269044274347216827212613867836435027261,
10307601595873709700152284273816112264069230130616436755625194854815875713954,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
3772000881919853776433695186713858239009073593817195771773381919316419345261,
2236595495967245188281701248203181795121068902605861227855261137820944008926,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
18429021223477853657660792034369865839114504446431234726392080002137598044644,
9344045779998320333812420223237981029506012124075525679208581902008406485703,
]
),
]
Fq6.frobenius_coeffs_c2 = [
Fq2([1, 0]),
Fq2(
[
2581911344467009335267311115468803099551665605076196740867805258568234346338,
19937756971775647987995932169929341994314640652964949448313374472400716661030,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
5324479202449903542726783395506214481928257762400643279780343368557297135718,
16208900380737693084919495127334387981393726419856888799917914180988844123039,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
13981852324922362344252311234282257507216387789820983642040889267519694726527,
7629828391165209371577384193250820201684255241773809077146787135900891633097,
]
),
]
# Parameters for Fq12
Fq12.non_residue = Fq2([9, 1])
Fq12.frobenius_coeffs_c1 = [
Fq2([1, 0]),
Fq2(
[
8376118865763821496583973867626364092589906065868298776909617916018768340080,
16469823323077808223889137241176536799009286646108169935659301613961712198316,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556617,
0,
]
),
Fq2(
[
11697423496358154304825782922584725312912383441159505038794027105778954184319,
303847389135065887422783454877609941456349188919719272345083954437860409601,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
3321304630594332808241809054958361220322477375291206261884409189760185844239,
5722266937896532885780051958958348231143373700109372999374820235121374419868,
]
),
Fq2(
[
21888242871839275222246405745257275088696311157297823662689037894645226208582,
0,
]
),
Fq2(
[
13512124006075453725662431877630910996106405091429524885779419978626457868503,
5418419548761466998357268504080738289687024511189653727029736280683514010267,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
10190819375481120917420622822672549775783927716138318623895010788866272024264,
21584395482704209334823622290379665147239961968378104390343953940207365798982,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651967, 0]),
Fq2(
[
18566938241244942414004596690298913868373833782006617400804628704885040364344,
16165975933942742336466353786298926857552937457188450663314217659523851788715,
]
),
]
G1.field_modulus = Fq.field_modulus
G2.field_modulus = Fq.field_modulus
G2.non_residue = Fq2.non_residue
# y ** 2 = x ** 3 + b (Ellipitic Curve Equation)
self.b = Fq(3)
# Twist curve parameters
self.twist = Fq2([9, 1])
self.twist_b = self.twist.inverse().mul_scalar(self.b)
self.two_inv = Fq.one() / Fq(2)
self.twist_mul_by_x = Fq2(
[
21575463638280843010398324269430826099269044274347216827212613867836435027261,
10307601595873709700152284273816112264069230130616436755625194854815875713954,
]
)
self.twist_mul_by_y = Fq2(
[
2821565182194536844548159561693502659359617185244120367078079554186484126554,
3505843767911556378687030309984248845540243509899259641013678093033130930403,
]
)
# Ate pairing constans
self.final_exp = 552484233613224096312617126783173147097382103762957654188882734314196910839907541213974502761540629817009608548654680343627701153829446747810907373256841551006201639677726139946029199968412598804882391702273019083653272047566316584365559776493027495458238373902875937659943504873220554161550525926302303331747463515644711876653177129578303191095900909191624817826566688241804408081892785725967931714097716709526092261278071952560171111444072049229123565057483750161460024353346284167282452756217662335528813519139808291170539072125381230815729071544861602750936964829313608137325426383735122175229541155376346436093930287402089517426973178917569713384748081827255472576937471496195752727188261435633271238710131736096299798168852925540549342330775279877006784354801422249722573783561685179618816480037695005515426162362431072245638324744480
self.final_exp_z = 4965661367192848881
self.ate_loop_count = Fq(29793968203157093288)
self.ate_is_loop_count_neg = False
self.final_exp_is_z_neg = False
# Generators
self.g1_zero = G1.zero()
self.g1 = G1([1, 2])
self.g2_zero = G2.zero()
self.g2 = G2(
[
Fq2(
[
10857046999023057135944570762232829481370756359578518086990519993285655852781,
11559732032986387107991004021392285783925812861821192530917403151452391805634,
]
),
Fq2(
[
8495653923123431417604973247489272438418190587263600148770280649306958101930,
4082367875863433681332203403145435568316851327593401208105741076214120093531,
]
),
]
)
def ate_precompute_g1(self, p):
print("Entering precompute G1")
assert(isinstance(p, G1))
pcopy = p.affine()
ate_g1_precomp = AteG1PreComp(pcopy.val[0], pcopy.val[1])
print("End with precompute G1")
return ate_g1_precomp
def doubling_step_for_flipped_miller_loop(self, curr_g2):
x = curr_g2.val[0]
y = curr_g2.val[1]
z = curr_g2.val[2]
a = x * y
a = a.mul_scalar(self.two_inv)
b = y * y
c = z * z
d = c + c + c
e = self.twist_b * d
f = e + e + e
g = (b + f)
g = g.mul_scalar(self.two_inv)
h = ((y + z) * (y + z)) - (b + c)
i = e - b
j = x * x
e_sq = e * e
res_x = a * (b - f)
res_y = (g * g) - (e_sq + e_sq + e_sq)
res_z = b * h
ell0 = i * self.twist
ellvw = -h
ellvv = j + j + j
ate_ell_coeff = AteEllCoeffs(ell0, ellvw, ellvv)
return G2([res_x, res_y, res_z]), ate_ell_coeff
def mul_by_q(self, q):
assert(isinstance(q, G2))
fmx = self.twist_mul_by_x * q.val[0].frobenius_map(1)
fmy = self.twist_mul_by_y * q.val[1].frobenius_map(1)
fmz = q.val[2].frobenius_map(1)
return G2([fmx, fmy, fmz])
def mixed_addition_step_for_flipped_miller(self, base_g2, curr_g2):
x1 = curr_g2.val[0]
y1 = curr_g2.val[1]
z1 = curr_g2.val[2]
x2 = base_g2.val[0]
y2 = base_g2.val[1]
d = x1 - (x2 * z1)
e = y1 - (y2 * z1)
f = d * d
g = e * e
h = d * f
i = x1 * f
j = (h + (z1 * g)) - (i + i)
res_x = d * j
res_y = (e * (i - j)) - (h * y1)
res_z = z1 * h
ell0 = self.twist * ((e * x2) - (d * y2))
ellvv = -e
ellvw = d
ate_ell_coeff = AteEllCoeffs(ell0, ellvw, ellvv)
return G2([res_x, res_y, res_z]), ate_ell_coeff
def ate_precompute_g2(self, q):
print("Entering precompute G2")
assert(isinstance(q, G2))
qcopy = q.affine()
ate_g2_precomp = AteG2PreComp(qcopy.val[0], qcopy.val[1])
r = G2([qcopy.val[0], qcopy.val[1], Fq2.one()])
found_one = False
for i in range(self.ate_loop_count.bit_length(), -1, -1):
bit = self.ate_loop_count.bit(i)
if not found_one:
found_one |= bit
continue
r, c = self.doubling_step_for_flipped_miller_loop(r)
ate_g2_precomp.coeffs.append(c)
if bit:
r, c = self.mixed_addition_step_for_flipped_miller(qcopy, r)
found_one = True
ate_g2_precomp.coeffs.append(c)
q1 = self.mul_by_q(qcopy).affine()
assert(q1.val[2] == Fq2.one())
q2 = self.mul_by_q(q1).affine()
assert(q2.val[2] == Fq2.one())
if self.ate_is_loop_count_neg:
r.val[1] = -r.val[1]
q2.val[1] = -q2.val[1]
r, c = self.mixed_addition_step_for_flipped_miller(q1, r)
ate_g2_precomp.coeffs.append(c)
r, c = self.mixed_addition_step_for_flipped_miller(q2, r)
ate_g2_precomp.coeffs.append(c)
print("Done with precompute G2")
return ate_g2_precomp
def ate_miller_loop(self, p, q):
assert(isinstance(p, AteG1PreComp) and isinstance(q, AteG2PreComp))
f = Fq12.one()
found_one = False
idx = 0
c = q.coeffs[idx]
for i in range(self.ate_loop_count.bit_length(), -1, -1):
bit = self.ate_loop_count.bit(i)
if not found_one:
found_one |= bit
continue
c = q.coeffs[idx]
assert(isinstance(c, AteEllCoeffs))
idx += 1
f = f.square()
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
if bit:
c = q.coeffs[idx]
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
idx += 1
if self.ate_is_loop_count_neg:
f = f.inverse()
c = q.coeffs[idx]
idx += 1
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
c = q.coeffs[idx]
idx += 1
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
return f
def final_exp_first_chunk(self, elt):
assert(isinstance(elt, Fq12))
a = elt.unitary_inverse()
b = elt.inverse()
c = a * b
d = c.frobenius_map(2)
result = d * c
return result
def final_exp_by_neg_z(self, elt):
assert(isinstance(elt, Fq12))
result = elt.cyclotomic_exp(self.final_exp_z)
if not self.final_exp_is_z_neg:
result = result.unitary_inverse()
return result
def final_exp_last_chunk(self, elt):
assert(isinstance(elt, Fq12))
a = self.final_exp_by_neg_z(elt)
b = a.cyclotomic_square()
c = b.cyclotomic_square()
d = c * b
e = self.final_exp_by_neg_z(d)
f = e.cyclotomic_square()
g = self.final_exp_by_neg_z(f)
h = d.unitary_inverse()
i = g.unitary_inverse()
j = i * e
k = j * h
l = k * b
m = k * e
n = m * elt
o = l.frobenius_map(1)
p = o * n
q = k.frobenius_map(2)
r = q * p
s = elt.unitary_inverse()
t = s * l
u = t.frobenius_map(3)
v = u * r
result = v
return result
def final_exponentation(self, elt):
assert(isinstance(elt, Fq12))
a = self.final_exp_first_chunk(elt)
result = self.final_exp_last_chunk(a)
return result
def pairing_check(self, p, q):
assert(isinstance(p, G1) and isinstance(q, G2))
print("Starting pairing check")
precomp_g1 = self.ate_precompute_g1(p)
assert(isinstance(precomp_g1, AteG1PreComp))
precomp_g2 = self.ate_precompute_g2(q)
assert(isinstance(precomp_g2, AteG2PreComp))
result = self.ate_miller_loop(precomp_g1, precomp_g2)
result = self.final_exponentation(result)
print("Done with pairing check")
return result | bn128/bn128.py | from field.fq import Fq
from field.fq2 import Fq2
from field.fq6 import Fq6
from field.fq12 import Fq12
from bn128.g1 import G1
from bn128.g2 import G2
from bn128.ate import *
class BN128:
def __init__(self):
# Parameters for base field (Fq)
Fq.field_modulus = 21888242871839275222246405745257275088696311157297823662689037894645226208583
# Parameters for twist field (Fq2)
Fq2.non_residue = Fq(
21888242871839275222246405745257275088696311157297823662689037894645226208582
)
Fq2.frobenius_coeffs_c1 = [
Fq(1),
Fq(
21888242871839275222246405745257275088696311157297823662689037894645226208582
),
]
# Parameters for Fq6
Fq6.non_residue = Fq2([9, 1])
Fq6.frobenius_coeffs_c1 = [
Fq2([1, 0]),
Fq2(
[
21575463638280843010398324269430826099269044274347216827212613867836435027261,
10307601595873709700152284273816112264069230130616436755625194854815875713954,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
3772000881919853776433695186713858239009073593817195771773381919316419345261,
2236595495967245188281701248203181795121068902605861227855261137820944008926,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
18429021223477853657660792034369865839114504446431234726392080002137598044644,
9344045779998320333812420223237981029506012124075525679208581902008406485703,
]
),
]
Fq6.frobenius_coeffs_c2 = [
Fq2([1, 0]),
Fq2(
[
2581911344467009335267311115468803099551665605076196740867805258568234346338,
19937756971775647987995932169929341994314640652964949448313374472400716661030,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
5324479202449903542726783395506214481928257762400643279780343368557297135718,
16208900380737693084919495127334387981393726419856888799917914180988844123039,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
13981852324922362344252311234282257507216387789820983642040889267519694726527,
7629828391165209371577384193250820201684255241773809077146787135900891633097,
]
),
]
# Parameters for Fq12
Fq12.non_residue = Fq2([9, 1])
Fq12.frobenius_coeffs_c1 = [
Fq2([1, 0]),
Fq2(
[
8376118865763821496583973867626364092589906065868298776909617916018768340080,
16469823323077808223889137241176536799009286646108169935659301613961712198316,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556617,
0,
]
),
Fq2(
[
11697423496358154304825782922584725312912383441159505038794027105778954184319,
303847389135065887422783454877609941456349188919719272345083954437860409601,
]
),
Fq2(
[
21888242871839275220042445260109153167277707414472061641714758635765020556616,
0,
]
),
Fq2(
[
3321304630594332808241809054958361220322477375291206261884409189760185844239,
5722266937896532885780051958958348231143373700109372999374820235121374419868,
]
),
Fq2(
[
21888242871839275222246405745257275088696311157297823662689037894645226208582,
0,
]
),
Fq2(
[
13512124006075453725662431877630910996106405091429524885779419978626457868503,
5418419548761466998357268504080738289687024511189653727029736280683514010267,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651966, 0]),
Fq2(
[
10190819375481120917420622822672549775783927716138318623895010788866272024264,
21584395482704209334823622290379665147239961968378104390343953940207365798982,
]
),
Fq2([2203960485148121921418603742825762020974279258880205651967, 0]),
Fq2(
[
18566938241244942414004596690298913868373833782006617400804628704885040364344,
16165975933942742336466353786298926857552937457188450663314217659523851788715,
]
),
]
G1.field_modulus = Fq.field_modulus
G2.field_modulus = Fq.field_modulus
G2.non_residue = Fq2.non_residue
# y ** 2 = x ** 3 + b (Ellipitic Curve Equation)
self.b = Fq(3)
# Twist curve parameters
self.twist = Fq2([9, 1])
self.twist_b = self.twist.inverse().mul_scalar(self.b)
self.two_inv = Fq.one() / Fq(2)
self.twist_mul_by_x = Fq2(
[
21575463638280843010398324269430826099269044274347216827212613867836435027261,
10307601595873709700152284273816112264069230130616436755625194854815875713954,
]
)
self.twist_mul_by_y = Fq2(
[
2821565182194536844548159561693502659359617185244120367078079554186484126554,
3505843767911556378687030309984248845540243509899259641013678093033130930403,
]
)
# Ate pairing constans
self.final_exp = 552484233613224096312617126783173147097382103762957654188882734314196910839907541213974502761540629817009608548654680343627701153829446747810907373256841551006201639677726139946029199968412598804882391702273019083653272047566316584365559776493027495458238373902875937659943504873220554161550525926302303331747463515644711876653177129578303191095900909191624817826566688241804408081892785725967931714097716709526092261278071952560171111444072049229123565057483750161460024353346284167282452756217662335528813519139808291170539072125381230815729071544861602750936964829313608137325426383735122175229541155376346436093930287402089517426973178917569713384748081827255472576937471496195752727188261435633271238710131736096299798168852925540549342330775279877006784354801422249722573783561685179618816480037695005515426162362431072245638324744480
self.final_exp_z = 4965661367192848881
self.ate_loop_count = Fq(29793968203157093288)
self.ate_is_loop_count_neg = False
self.final_exp_is_z_neg = False
# Generators
self.g1_zero = G1.zero()
self.g1 = G1([1, 2])
self.g2_zero = G2.zero()
self.g2 = G2(
[
Fq2(
[
10857046999023057135944570762232829481370756359578518086990519993285655852781,
11559732032986387107991004021392285783925812861821192530917403151452391805634,
]
),
Fq2(
[
8495653923123431417604973247489272438418190587263600148770280649306958101930,
4082367875863433681332203403145435568316851327593401208105741076214120093531,
]
),
]
)
def ate_precompute_g1(self, p):
print("Entering precompute G1")
assert(isinstance(p, G1))
pcopy = p.affine()
ate_g1_precomp = AteG1PreComp(pcopy.val[0], pcopy.val[1])
print("End with precompute G1")
return ate_g1_precomp
def doubling_step_for_flipped_miller_loop(self, curr_g2):
x = curr_g2.val[0]
y = curr_g2.val[1]
z = curr_g2.val[2]
a = x * y
a = a.mul_scalar(self.two_inv)
b = y * y
c = z * z
d = c + c + c
e = self.twist_b * d
f = e + e + e
g = (b + f)
g = g.mul_scalar(self.two_inv)
h = ((y + z) * (y + z)) - (b + c)
i = e - b
j = x * x
e_sq = e * e
res_x = a * (b - f)
res_y = (g * g) - (e_sq + e_sq + e_sq)
res_z = b * h
ell0 = i * self.twist
ellvw = -h
ellvv = j + j + j
ate_ell_coeff = AteEllCoeffs(ell0, ellvw, ellvv)
return G2([res_x, res_y, res_z]), ate_ell_coeff
def mul_by_q(self, q):
assert(isinstance(q, G2))
fmx = self.twist_mul_by_x * q.val[0].frobenius_map(1)
fmy = self.twist_mul_by_y * q.val[1].frobenius_map(1)
fmz = q.val[2].frobenius_map(1)
return G2([fmx, fmy, fmz])
def mixed_addition_step_for_flipped_miller(self, base_g2, curr_g2):
x1 = curr_g2.val[0]
y1 = curr_g2.val[1]
z1 = curr_g2.val[2]
x2 = base_g2.val[0]
y2 = base_g2.val[1]
d = x1 - (x2 * z1)
e = y1 - (y2 * z1)
f = d * d
g = e * e
h = d * f
i = x1 * f
j = (h + (z1 * g)) - (i + i)
res_x = d * j
res_y = (e * (i - j)) - (h * y1)
res_z = z1 * h
ell0 = self.twist * ((e * x2) - (d * y2))
ellvv = -e
ellvw = d
ate_ell_coeff = AteEllCoeffs(ell0, ellvw, ellvv)
return G2([res_x, res_y, res_z]), ate_ell_coeff
def ate_precompute_g2(self, q):
print("Entering precompute G2")
assert(isinstance(q, G2))
qcopy = q.affine()
ate_g2_precomp = AteG2PreComp(qcopy.val[0], qcopy.val[1])
r = G2([qcopy.val[0], qcopy.val[1], Fq2.one()])
found_one = False
for i in range(self.ate_loop_count.bit_length(), -1, -1):
bit = self.ate_loop_count.bit(i)
if not found_one:
found_one |= bit
continue
r, c = self.doubling_step_for_flipped_miller_loop(r)
ate_g2_precomp.coeffs.append(c)
if bit:
r, c = self.mixed_addition_step_for_flipped_miller(qcopy, r)
found_one = True
ate_g2_precomp.coeffs.append(c)
q1 = self.mul_by_q(qcopy).affine()
assert(q1.val[2] == Fq2.one())
q2 = self.mul_by_q(q1).affine()
assert(q2.val[2] == Fq2.one())
if self.ate_is_loop_count_neg:
r.val[1] = -r.val[1]
q2.val[1] = -q2.val[1]
r, c = self.mixed_addition_step_for_flipped_miller(q1, r)
ate_g2_precomp.coeffs.append(c)
r, c = self.mixed_addition_step_for_flipped_miller(q2, r)
ate_g2_precomp.coeffs.append(c)
print("Done with precompute G2")
return ate_g2_precomp
def ate_miller_loop(self, p, q):
assert(isinstance(p, AteG1PreComp) and isinstance(q, AteG2PreComp))
f = Fq12.one()
found_one = False
idx = 0
c = q.coeffs[idx]
for i in range(self.ate_loop_count.bit_length(), -1, -1):
bit = self.ate_loop_count.bit(i)
if not found_one:
found_one |= bit
continue
c = q.coeffs[idx]
assert(isinstance(c, AteEllCoeffs))
idx += 1
f = f.square()
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
if bit:
c = q.coeffs[idx]
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
idx += 1
if self.ate_is_loop_count_neg:
f = f.inverse()
c = q.coeffs[idx]
idx += 1
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
c = q.coeffs[idx]
idx += 1
f = f.mul_by_024(c.ell0, c.ellvw.mul_scalar(p.py), c.ellvv.mul_scalar(p.px))
return f
def final_exp_first_chunk(self, elt):
assert(isinstance(elt, Fq12))
a = elt.unitary_inverse()
b = elt.inverse()
c = a * b
d = c.frobenius_map(2)
result = d * c
return result
def final_exp_by_neg_z(self, elt):
assert(isinstance(elt, Fq12))
result = elt.cyclotomic_exp(self.final_exp_z)
if not self.final_exp_is_z_neg:
result = result.unitary_inverse()
return result
def final_exp_last_chunk(self, elt):
assert(isinstance(elt, Fq12))
a = self.final_exp_by_neg_z(elt)
b = a.cyclotomic_square()
c = b.cyclotomic_square()
d = c * b
e = self.final_exp_by_neg_z(d)
f = e.cyclotomic_square()
g = self.final_exp_by_neg_z(f)
h = d.unitary_inverse()
i = g.unitary_inverse()
j = i * e
k = j * h
l = k * b
m = k * e
n = m * elt
o = l.frobenius_map(1)
p = o * n
q = k.frobenius_map(2)
r = q * p
s = elt.unitary_inverse()
t = s * l
u = t.frobenius_map(3)
v = u * r
result = v
return result
def final_exponentation(self, elt):
assert(isinstance(elt, Fq12))
a = self.final_exp_first_chunk(elt)
result = self.final_exp_last_chunk(a)
return result
def pairing_check(self, p, q):
assert(isinstance(p, G1) and isinstance(q, G2))
print("Starting pairing check")
precomp_g1 = self.ate_precompute_g1(p)
assert(isinstance(precomp_g1, AteG1PreComp))
precomp_g2 = self.ate_precompute_g2(q)
assert(isinstance(precomp_g2, AteG2PreComp))
result = self.ate_miller_loop(precomp_g1, precomp_g2)
result = self.final_exponentation(result)
print("Done with pairing check")
return result | 0.599837 | 0.176494 |
from .vertex import Vertex
from .object_manager import Manager
class Polygon:
def __init__(self):
self.id = -1
# store the actual vertices, vertices are ordered in sequence!
self.vertices = []
self.vertex_ids = set()
self.edge_ids = set()
self.obstacle_ids = set()
# gradient stands for plane equation for Ax+By+C = 0
self.gradient = [0, 0, 0]
self.center = Vertex([0, 0])
def add_vertex(self, vertex):
assert(isinstance(vertex, Vertex))
if vertex.id in self.vertex_ids:
return
self.vertex_ids.add(vertex.id)
self.vertices.append(vertex)
def calulate_center(self):
num = len(self.vertices)
assert(num >= 3), "Polygon has less than 3 vertices"
x = 0.0
y = 0.0
for vtx in self.vertices:
x += vtx.x
y += vtx.y
self.center = Vertex([x / num, y / num])
def get_variable(self):
result = []
self.calulate_center()
result.append(self.center.get_coords())
vertices_id = []
for v in self.vertices:
vertices_id.append(v.id)
result.append(vertices_id)
result.append(self.gradient)
result.append(self.edge_ids)
result.append(self.obstacle_ids)
return result
class HubPolygon (Polygon):
def __init__(self):
Polygon.__init__(self)
self.hub_vertex_id = -1
self.related_lane_ids = []
class LanePolygon (Polygon):
def __init__(self):
Polygon.__init__(self)
self.related_lane_id = -1
class PolygonManager (Manager):
def __init__(self):
Manager.__init__(self)
# pair (hub_vertex, hub_polygon)
self.polygon_map = dict()
def update_polygon_set(self, polygon):
assert(polygon.id >= 0)
if(isinstance(polygon, HubPolygon)):
self.polygon_map[polygon.hub_vertex_id] = polygon.id
def get_hub_polygon_from_hub_vertex(self, hub_vertex_id):
if(hub_vertex_id in self.polygon_map):
return self.polygon_map[hub_vertex_id]
return -1 | rmf_building_map_tools/building_crowdsim/navmesh/polygon.py | from .vertex import Vertex
from .object_manager import Manager
class Polygon:
def __init__(self):
self.id = -1
# store the actual vertices, vertices are ordered in sequence!
self.vertices = []
self.vertex_ids = set()
self.edge_ids = set()
self.obstacle_ids = set()
# gradient stands for plane equation for Ax+By+C = 0
self.gradient = [0, 0, 0]
self.center = Vertex([0, 0])
def add_vertex(self, vertex):
assert(isinstance(vertex, Vertex))
if vertex.id in self.vertex_ids:
return
self.vertex_ids.add(vertex.id)
self.vertices.append(vertex)
def calulate_center(self):
num = len(self.vertices)
assert(num >= 3), "Polygon has less than 3 vertices"
x = 0.0
y = 0.0
for vtx in self.vertices:
x += vtx.x
y += vtx.y
self.center = Vertex([x / num, y / num])
def get_variable(self):
result = []
self.calulate_center()
result.append(self.center.get_coords())
vertices_id = []
for v in self.vertices:
vertices_id.append(v.id)
result.append(vertices_id)
result.append(self.gradient)
result.append(self.edge_ids)
result.append(self.obstacle_ids)
return result
class HubPolygon (Polygon):
def __init__(self):
Polygon.__init__(self)
self.hub_vertex_id = -1
self.related_lane_ids = []
class LanePolygon (Polygon):
def __init__(self):
Polygon.__init__(self)
self.related_lane_id = -1
class PolygonManager (Manager):
def __init__(self):
Manager.__init__(self)
# pair (hub_vertex, hub_polygon)
self.polygon_map = dict()
def update_polygon_set(self, polygon):
assert(polygon.id >= 0)
if(isinstance(polygon, HubPolygon)):
self.polygon_map[polygon.hub_vertex_id] = polygon.id
def get_hub_polygon_from_hub_vertex(self, hub_vertex_id):
if(hub_vertex_id in self.polygon_map):
return self.polygon_map[hub_vertex_id]
return -1 | 0.752195 | 0.45181 |
import numpy as np
import torch
import torch.nn.functional as F
from copy import deepcopy
import model_zoo
from model_zoo.utils.training import save_best
from model_zoo import utils
class MaxLikelihoodClassifier(torch.nn.Module):
def __init__(self, input_dim, num_classes, model_class, model_kwargs):
super().__init__()
model_constructor = getattr(model_zoo.architecture, model_class)
self.model = model_constructor(input_dim, num_classes, **model_kwargs)
self.register_buffer("input_mean", torch.zeros(input_dim, dtype=torch.get_default_dtype()))
self.register_buffer("input_std", torch.ones(input_dim, dtype=torch.get_default_dtype()))
self.train_checkpoint = deepcopy(self.state_dict())
self.eval_checkpoint = deepcopy(self.state_dict())
def forward(self, inputs):
"""
Args:
inputs (torch.Tensor): [n x input_dim]
Returns:
mean (torch.Tensor): [n x target_dim]
var (torch.Tensor): [n x target_dim]
"""
self._check_dim(inputs)
inputs = inputs.to(self.device)
inputs = (inputs - self.input_mean) / self.input_std
logits = self.model.forward(inputs)
return logits
def predict(self, inputs, compat_mode='np'):
"""
Args:
inputs (np.array): [num_batch x input_dim] or [num_batch x seq_len x input_dim]
compat_mode: 'np' or 'torch'
Returns:
mean (np.array): [*batch_shape x target_dim]
var (np.array): [*batch_shape x target_dim]
"""
if torch.is_tensor(inputs):
pass
else:
inputs = torch.tensor(inputs, dtype=torch.get_default_dtype())
logits = self(inputs)
if compat_mode == 'np':
logits = logits.detach().cpu().numpy()
elif compat_mode == 'torch':
pass
else:
raise ValueError("unrecognized compatibility mode, use 'np' (NumPy) or 'torch' (PyTorch)")
return logits
def sample(self, np_inputs):
logits = self.predict(np_inputs)
pred_dist = torch.distributions.Categorical(logits=logits)
return pred_dist.sample()
def validate(self, val_loader):
metrics = {'val_acc': 0., 'val_loss': 0.}
for inputs, targets in val_loader:
self.model.reset()
with torch.no_grad():
logits = self(inputs)
targets = targets.to(logits).long()
metrics['val_acc'] += utils.metrics.top_k_accuracy(logits, targets, k=1) / len(val_loader)
metrics['val_loss'] += self.loss_fn(inputs, targets).item() / len(val_loader)
return metrics
def fit(self, dataset, fit_params):
"""
:param dataset (model_zoo.utils.data.Dataset)
:param fit_params = {
lr=1e-3,
weight_decay=1e-4,
batch_size=32,
logvar_penalty_coeff=1e-2,
early_stopping=True,
wait_epochs=10,
wait_tol=1e-3,
}
:return: metrics dict
"""
fit_params = dict(fit_params)
val_loader = dataset.get_loader(fit_params['batch_size'], split='holdout')
val_loss = self.validate(val_loader)["val_loss"]
snapshot = (0, val_loss, self.eval_checkpoint)
self.load_state_dict(self.train_checkpoint)
normalize = fit_params.setdefault('normalize', True)
if normalize:
# note that self.input_mean is used to determine self.device, so be careful here
input_stats, target_stats = dataset.get_stats(compat_mode='torch')
self.input_mean, self.input_std = [arr.to(self.device) for arr in input_stats]
# main training loop
train_loader = dataset.get_loader(fit_params['batch_size'])
# adding this to the config causes issues with nested instantiation.
# optimizer = hydra.utils.instantiate(fit_params['optimizer'], params=self._optim_p_groups)
optimizer = torch.optim.Adam(self._optim_p_groups, lr=fit_params["lr"], weight_decay=fit_params['weight_decay'])
snapshot, train_metrics = self._training_loop(train_loader, optimizer,
val_loader, snapshot, fit_params)
self.train_checkpoint = deepcopy(self.state_dict())
_, holdout_loss, self.eval_checkpoint = snapshot
self.load_state_dict(self.eval_checkpoint)
self.eval()
fit_metrics = self.validate(val_loader)
fit_metrics.update(train_metrics)
return fit_metrics
def loss_fn(self, inputs, targets):
logits = self(inputs)
targets = targets.to(logits.device)
return F.cross_entropy(logits, targets)
def likelihood(self, logits, targets):
return -self.loss_fn(logits, targets)
def _training_loop(self, train_loader, optimizer, val_loader, snapshot, fit_params):
metrics = {'train_loss': [], 'val_loss': []}
num_batches = len(train_loader)
num_updates = 0
alpha = 2 / (num_batches + 1) # exp. moving average parameter
train_loss = None
early_stopping = fit_params.setdefault('early_stopping', False)
wait_epochs = fit_params.setdefault('wait_epochs', None)
wait_tol = fit_params.setdefault('wait_tol', None)
max_grad_norm = fit_params.setdefault('max_grad_norm', None)
max_updates = fit_params.setdefault('max_updates', None)
exit_training = False
epoch, _, _ = snapshot
while not exit_training:
self.train()
for inputs, targets in train_loader:
optimizer.zero_grad()
loss = self.loss_fn(inputs, targets.long())
loss.backward()
if max_grad_norm:
self._clip_grads(optimizer, max_grad_norm)
optimizer.step()
train_loss = loss.item() if train_loss is None else ((1 - alpha) * train_loss + alpha * loss.item())
num_updates += 1
if max_updates is not None and num_updates == max_updates:
exit_training = True
break
self.eval()
with torch.no_grad():
holdout_metrics = self.validate(val_loader)
conv_metric = holdout_metrics['val_loss'] if early_stopping else train_loss
converged, snapshot = save_best(self, conv_metric, epoch, snapshot, wait_epochs, wait_tol)
exit_training = converged if converged else exit_training
metrics['train_loss'].append(train_loss)
metrics['val_loss'].append(holdout_metrics['val_loss'])
epoch += 1
return snapshot, metrics
def _clip_grads(self, optimizer, max_grad_norm):
assert len(optimizer.param_groups) == 1
params = optimizer.param_groups[0]['params']
torch.nn.utils.clip_grad_norm_(params, max_grad_norm)
def _check_dim(self, inputs):
if torch.is_tensor(inputs):
if inputs.dim() < 2 or inputs.dim() > 3:
raise ValueError('2D or 3D inputs expected')
elif isinstance(inputs, np.ndarray):
if inputs.ndim < 2 or inputs.ndim > 3:
raise ValueError('2D or 3D inputs expected')
@property
def _optim_p_groups(self):
return self.parameters()
def reset(self):
self.model.reset()
@property
def device(self):
return self.input_mean.device | model_zoo/classifier/mle_classifier.py | import numpy as np
import torch
import torch.nn.functional as F
from copy import deepcopy
import model_zoo
from model_zoo.utils.training import save_best
from model_zoo import utils
class MaxLikelihoodClassifier(torch.nn.Module):
def __init__(self, input_dim, num_classes, model_class, model_kwargs):
super().__init__()
model_constructor = getattr(model_zoo.architecture, model_class)
self.model = model_constructor(input_dim, num_classes, **model_kwargs)
self.register_buffer("input_mean", torch.zeros(input_dim, dtype=torch.get_default_dtype()))
self.register_buffer("input_std", torch.ones(input_dim, dtype=torch.get_default_dtype()))
self.train_checkpoint = deepcopy(self.state_dict())
self.eval_checkpoint = deepcopy(self.state_dict())
def forward(self, inputs):
"""
Args:
inputs (torch.Tensor): [n x input_dim]
Returns:
mean (torch.Tensor): [n x target_dim]
var (torch.Tensor): [n x target_dim]
"""
self._check_dim(inputs)
inputs = inputs.to(self.device)
inputs = (inputs - self.input_mean) / self.input_std
logits = self.model.forward(inputs)
return logits
def predict(self, inputs, compat_mode='np'):
"""
Args:
inputs (np.array): [num_batch x input_dim] or [num_batch x seq_len x input_dim]
compat_mode: 'np' or 'torch'
Returns:
mean (np.array): [*batch_shape x target_dim]
var (np.array): [*batch_shape x target_dim]
"""
if torch.is_tensor(inputs):
pass
else:
inputs = torch.tensor(inputs, dtype=torch.get_default_dtype())
logits = self(inputs)
if compat_mode == 'np':
logits = logits.detach().cpu().numpy()
elif compat_mode == 'torch':
pass
else:
raise ValueError("unrecognized compatibility mode, use 'np' (NumPy) or 'torch' (PyTorch)")
return logits
def sample(self, np_inputs):
logits = self.predict(np_inputs)
pred_dist = torch.distributions.Categorical(logits=logits)
return pred_dist.sample()
def validate(self, val_loader):
metrics = {'val_acc': 0., 'val_loss': 0.}
for inputs, targets in val_loader:
self.model.reset()
with torch.no_grad():
logits = self(inputs)
targets = targets.to(logits).long()
metrics['val_acc'] += utils.metrics.top_k_accuracy(logits, targets, k=1) / len(val_loader)
metrics['val_loss'] += self.loss_fn(inputs, targets).item() / len(val_loader)
return metrics
def fit(self, dataset, fit_params):
"""
:param dataset (model_zoo.utils.data.Dataset)
:param fit_params = {
lr=1e-3,
weight_decay=1e-4,
batch_size=32,
logvar_penalty_coeff=1e-2,
early_stopping=True,
wait_epochs=10,
wait_tol=1e-3,
}
:return: metrics dict
"""
fit_params = dict(fit_params)
val_loader = dataset.get_loader(fit_params['batch_size'], split='holdout')
val_loss = self.validate(val_loader)["val_loss"]
snapshot = (0, val_loss, self.eval_checkpoint)
self.load_state_dict(self.train_checkpoint)
normalize = fit_params.setdefault('normalize', True)
if normalize:
# note that self.input_mean is used to determine self.device, so be careful here
input_stats, target_stats = dataset.get_stats(compat_mode='torch')
self.input_mean, self.input_std = [arr.to(self.device) for arr in input_stats]
# main training loop
train_loader = dataset.get_loader(fit_params['batch_size'])
# adding this to the config causes issues with nested instantiation.
# optimizer = hydra.utils.instantiate(fit_params['optimizer'], params=self._optim_p_groups)
optimizer = torch.optim.Adam(self._optim_p_groups, lr=fit_params["lr"], weight_decay=fit_params['weight_decay'])
snapshot, train_metrics = self._training_loop(train_loader, optimizer,
val_loader, snapshot, fit_params)
self.train_checkpoint = deepcopy(self.state_dict())
_, holdout_loss, self.eval_checkpoint = snapshot
self.load_state_dict(self.eval_checkpoint)
self.eval()
fit_metrics = self.validate(val_loader)
fit_metrics.update(train_metrics)
return fit_metrics
def loss_fn(self, inputs, targets):
logits = self(inputs)
targets = targets.to(logits.device)
return F.cross_entropy(logits, targets)
def likelihood(self, logits, targets):
return -self.loss_fn(logits, targets)
def _training_loop(self, train_loader, optimizer, val_loader, snapshot, fit_params):
metrics = {'train_loss': [], 'val_loss': []}
num_batches = len(train_loader)
num_updates = 0
alpha = 2 / (num_batches + 1) # exp. moving average parameter
train_loss = None
early_stopping = fit_params.setdefault('early_stopping', False)
wait_epochs = fit_params.setdefault('wait_epochs', None)
wait_tol = fit_params.setdefault('wait_tol', None)
max_grad_norm = fit_params.setdefault('max_grad_norm', None)
max_updates = fit_params.setdefault('max_updates', None)
exit_training = False
epoch, _, _ = snapshot
while not exit_training:
self.train()
for inputs, targets in train_loader:
optimizer.zero_grad()
loss = self.loss_fn(inputs, targets.long())
loss.backward()
if max_grad_norm:
self._clip_grads(optimizer, max_grad_norm)
optimizer.step()
train_loss = loss.item() if train_loss is None else ((1 - alpha) * train_loss + alpha * loss.item())
num_updates += 1
if max_updates is not None and num_updates == max_updates:
exit_training = True
break
self.eval()
with torch.no_grad():
holdout_metrics = self.validate(val_loader)
conv_metric = holdout_metrics['val_loss'] if early_stopping else train_loss
converged, snapshot = save_best(self, conv_metric, epoch, snapshot, wait_epochs, wait_tol)
exit_training = converged if converged else exit_training
metrics['train_loss'].append(train_loss)
metrics['val_loss'].append(holdout_metrics['val_loss'])
epoch += 1
return snapshot, metrics
def _clip_grads(self, optimizer, max_grad_norm):
assert len(optimizer.param_groups) == 1
params = optimizer.param_groups[0]['params']
torch.nn.utils.clip_grad_norm_(params, max_grad_norm)
def _check_dim(self, inputs):
if torch.is_tensor(inputs):
if inputs.dim() < 2 or inputs.dim() > 3:
raise ValueError('2D or 3D inputs expected')
elif isinstance(inputs, np.ndarray):
if inputs.ndim < 2 or inputs.ndim > 3:
raise ValueError('2D or 3D inputs expected')
@property
def _optim_p_groups(self):
return self.parameters()
def reset(self):
self.model.reset()
@property
def device(self):
return self.input_mean.device | 0.945562 | 0.435361 |
import os
import pytest
import json
from typing import (
Sequence,
)
from staking_deposit.utils.constants import (
MNEMONIC_LANG_OPTIONS,
)
from staking_deposit.key_handling.key_derivation.mnemonic import (
_index_to_word,
_get_word_list,
abbreviate_words,
get_seed,
get_mnemonic,
reconstruct_mnemonic,
)
WORD_LISTS_PATH = os.path.join(os.getcwd(), 'staking_deposit', 'key_handling', 'key_derivation', 'word_lists')
all_languages = MNEMONIC_LANG_OPTIONS.keys()
test_vector_filefolder = os.path.join('tests', 'test_key_handling',
'test_key_derivation', 'test_vectors', 'mnemonic.json')
with open(test_vector_filefolder, 'r', encoding='utf-8') as f:
test_vectors = json.load(f)
@pytest.mark.parametrize(
'language,test',
[(language, test) for language, language_test_vectors in test_vectors.items() for test in language_test_vectors]
)
def test_bip39(language: str, test: Sequence[str]) -> None:
test_entropy = bytes.fromhex(test[0])
test_mnemonic = test[1]
test_seed = bytes.fromhex(test[2])
assert get_mnemonic(language=language, words_path=WORD_LISTS_PATH, entropy=test_entropy) == test_mnemonic
assert get_seed(mnemonic=test_mnemonic, password='<PASSWORD>') == test_seed
@pytest.mark.parametrize(
'test_mnemonic',
[(test_mnemonic[1])
for _, language_test_vectors in test_vectors.items()
for test_mnemonic in language_test_vectors]
)
def test_reconstruct_mnemonic(test_mnemonic: str) -> None:
assert reconstruct_mnemonic(test_mnemonic, WORD_LISTS_PATH) is not None
def abbreviate_mnemonic(mnemonic: str) -> str:
words = str.split(mnemonic)
words = abbreviate_words(words)
assert all([len(word) <= 4 for word in words])
return str.join(' ', words)
@pytest.mark.parametrize(
'test_mnemonic',
[abbreviate_mnemonic(test_mnemonic[1])
for _, language_test_vectors in test_vectors.items()
for test_mnemonic in language_test_vectors]
)
def test_reconstruct_abbreviated_mnemonic(test_mnemonic: str) -> None:
assert reconstruct_mnemonic(test_mnemonic, WORD_LISTS_PATH) is not None
@pytest.mark.parametrize(
'language', ['english']
)
@pytest.mark.parametrize(
'index, valid',
[
(0, True),
(2047, True),
(2048, False),
]
)
def test_get_word(language: str, index: int, valid: bool) -> None:
word_list = _get_word_list(language, WORD_LISTS_PATH)
if valid:
_index_to_word(word_list=word_list, index=index)
else:
with pytest.raises(IndexError):
_index_to_word(word_list=word_list, index=index) | tests/test_key_handling/test_key_derivation/test_mnemonic.py | import os
import pytest
import json
from typing import (
Sequence,
)
from staking_deposit.utils.constants import (
MNEMONIC_LANG_OPTIONS,
)
from staking_deposit.key_handling.key_derivation.mnemonic import (
_index_to_word,
_get_word_list,
abbreviate_words,
get_seed,
get_mnemonic,
reconstruct_mnemonic,
)
WORD_LISTS_PATH = os.path.join(os.getcwd(), 'staking_deposit', 'key_handling', 'key_derivation', 'word_lists')
all_languages = MNEMONIC_LANG_OPTIONS.keys()
test_vector_filefolder = os.path.join('tests', 'test_key_handling',
'test_key_derivation', 'test_vectors', 'mnemonic.json')
with open(test_vector_filefolder, 'r', encoding='utf-8') as f:
test_vectors = json.load(f)
@pytest.mark.parametrize(
'language,test',
[(language, test) for language, language_test_vectors in test_vectors.items() for test in language_test_vectors]
)
def test_bip39(language: str, test: Sequence[str]) -> None:
test_entropy = bytes.fromhex(test[0])
test_mnemonic = test[1]
test_seed = bytes.fromhex(test[2])
assert get_mnemonic(language=language, words_path=WORD_LISTS_PATH, entropy=test_entropy) == test_mnemonic
assert get_seed(mnemonic=test_mnemonic, password='<PASSWORD>') == test_seed
@pytest.mark.parametrize(
'test_mnemonic',
[(test_mnemonic[1])
for _, language_test_vectors in test_vectors.items()
for test_mnemonic in language_test_vectors]
)
def test_reconstruct_mnemonic(test_mnemonic: str) -> None:
assert reconstruct_mnemonic(test_mnemonic, WORD_LISTS_PATH) is not None
def abbreviate_mnemonic(mnemonic: str) -> str:
words = str.split(mnemonic)
words = abbreviate_words(words)
assert all([len(word) <= 4 for word in words])
return str.join(' ', words)
@pytest.mark.parametrize(
'test_mnemonic',
[abbreviate_mnemonic(test_mnemonic[1])
for _, language_test_vectors in test_vectors.items()
for test_mnemonic in language_test_vectors]
)
def test_reconstruct_abbreviated_mnemonic(test_mnemonic: str) -> None:
assert reconstruct_mnemonic(test_mnemonic, WORD_LISTS_PATH) is not None
@pytest.mark.parametrize(
'language', ['english']
)
@pytest.mark.parametrize(
'index, valid',
[
(0, True),
(2047, True),
(2048, False),
]
)
def test_get_word(language: str, index: int, valid: bool) -> None:
word_list = _get_word_list(language, WORD_LISTS_PATH)
if valid:
_index_to_word(word_list=word_list, index=index)
else:
with pytest.raises(IndexError):
_index_to_word(word_list=word_list, index=index) | 0.637934 | 0.423518 |
from mxnet import ndarray as nd
from mxnet import autograd
from mxnet import gluon
import matplotlib as mpl
mpl.rcParams['figure.dpi'] = 120
import matplotlib.pyplot as plt
num_train = 1000
num_test = 100
true_w = [1.2, -3.4, 5.6]
true_b = 5.0
x = nd.random.normal(shape=(num_train + num_test, 1))
X = nd.concat(x, nd.power(x, 2), nd.power(x, 3))
y = true_w[0] * X[:, 0] + true_w[1] * X[:, 1] + true_w[2] * X[:, 2] + true_b
y += 0.1 * nd.random.normal(shape=y.shape)
def train(X_train, X_test, y_train, y_test):
net = gluon.nn.Sequential()
with net.name_scope():
net.add(gluon.nn.Dense(1)) # 只有一个神经元
net.initialize()
learning_rate = 0.01
epochs = 100
batch_size = min(10, y_train.shape[0])
dataset_train = gluon.data.ArrayDataset(X_train, y_train)
data_iter_train = gluon.data.DataLoader(dataset_train, batch_size, shuffle=True)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {"learning_rate": learning_rate})
square_loss = gluon.loss.L2Loss()
train_loss = []
test_loss = []
for e in range(epochs):
for data, label in data_iter_train:
with autograd.record():
output = net(data)
loss = square_loss(output, label)
loss.backward()
trainer.step(batch_size)
train_loss.append(square_loss(net(X_train), y_train).mean().asscalar())
test_loss.append(square_loss(net(X_test), y_test).mean().asscalar())
plt.plot(train_loss)
plt.plot(test_loss)
plt.legend(['train', 'test'])
title = 'learned weight' + str(net[0].weight.data()) + 'learned bias' + str(net[0].bias.data())
title = title.replace('\n', '')
plt.title(title, color='blue', wrap=True)
plt.show()
return ('learned weight', net[0].weight.data(), 'learned bias', net[0].bias.data())
train(X[:num_train, :], X[num_train:, :], y[:num_train], y[num_train:])
# train(x[:num_train, :], x[num_train:, :], y[:num_train], y[num_train:])
# train(X[0:2, :], X[num_train:, :], y[0:2], y[num_train:]) | MXnet/supervised-learning/over_fit_under_fit.py | from mxnet import ndarray as nd
from mxnet import autograd
from mxnet import gluon
import matplotlib as mpl
mpl.rcParams['figure.dpi'] = 120
import matplotlib.pyplot as plt
num_train = 1000
num_test = 100
true_w = [1.2, -3.4, 5.6]
true_b = 5.0
x = nd.random.normal(shape=(num_train + num_test, 1))
X = nd.concat(x, nd.power(x, 2), nd.power(x, 3))
y = true_w[0] * X[:, 0] + true_w[1] * X[:, 1] + true_w[2] * X[:, 2] + true_b
y += 0.1 * nd.random.normal(shape=y.shape)
def train(X_train, X_test, y_train, y_test):
net = gluon.nn.Sequential()
with net.name_scope():
net.add(gluon.nn.Dense(1)) # 只有一个神经元
net.initialize()
learning_rate = 0.01
epochs = 100
batch_size = min(10, y_train.shape[0])
dataset_train = gluon.data.ArrayDataset(X_train, y_train)
data_iter_train = gluon.data.DataLoader(dataset_train, batch_size, shuffle=True)
trainer = gluon.Trainer(net.collect_params(), 'sgd', {"learning_rate": learning_rate})
square_loss = gluon.loss.L2Loss()
train_loss = []
test_loss = []
for e in range(epochs):
for data, label in data_iter_train:
with autograd.record():
output = net(data)
loss = square_loss(output, label)
loss.backward()
trainer.step(batch_size)
train_loss.append(square_loss(net(X_train), y_train).mean().asscalar())
test_loss.append(square_loss(net(X_test), y_test).mean().asscalar())
plt.plot(train_loss)
plt.plot(test_loss)
plt.legend(['train', 'test'])
title = 'learned weight' + str(net[0].weight.data()) + 'learned bias' + str(net[0].bias.data())
title = title.replace('\n', '')
plt.title(title, color='blue', wrap=True)
plt.show()
return ('learned weight', net[0].weight.data(), 'learned bias', net[0].bias.data())
train(X[:num_train, :], X[num_train:, :], y[:num_train], y[num_train:])
# train(x[:num_train, :], x[num_train:, :], y[:num_train], y[num_train:])
# train(X[0:2, :], X[num_train:, :], y[0:2], y[num_train:]) | 0.663233 | 0.614192 |
import os
import sys
import string
import re
import optparse
import CGAT.Experiment as E
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id: cgat_log2wiki.py 2782 2009-09-10 11:40:29Z andreas $")
parser.add_option("-s", "--start", dest="start", type="string",
help="start of section.")
parser.add_option("-e", "--end", dest="end", type="string",
help="end of section.")
parser.add_option("-l", "--level", dest="level", type="int",
help="depth of sections.")
parser.set_defaults(
level=2,
start=None,
end=None)
(options, args) = E.Start(parser)
if not options.start:
keep = True
else:
keep = False
section_id1 = 0
section_id2 = 0
last_l = None
for line in sys.stdin:
if options.start and not keep and re.search(options.start, line):
keep = True
if not keep:
continue
if options.end and re.search(options.end, line):
break
if line[0] != "#":
l = "| " + re.sub("\t", " | ", line[:-1]) + " | "
else:
if last_l:
if re.match("# ", line):
header = line[2:-1]
print last_l % header
continue
last_l = None
if re.match("#----------------------------", line):
section_id2 += 1
last_l = "\n---%s subsection %i: %%s" % ("+" *
(options.level + 1), section_id2)
l = None
elif re.match("#===========================", line):
section_id1 += 1
section_id2 = 0
last_l = "\n---%s section %i: %%s" % ("+" *
(options.level), section_id1)
l = None
else:
l = line[:-1]
if l:
print l
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv)) | scripts/cgat_log2wiki.py | import os
import sys
import string
import re
import optparse
import CGAT.Experiment as E
def main(argv=None):
"""script main.
parses command line options in sys.argv, unless *argv* is given.
"""
if argv is None:
argv = sys.argv
parser = E.OptionParser(
version="%prog version: $Id: cgat_log2wiki.py 2782 2009-09-10 11:40:29Z andreas $")
parser.add_option("-s", "--start", dest="start", type="string",
help="start of section.")
parser.add_option("-e", "--end", dest="end", type="string",
help="end of section.")
parser.add_option("-l", "--level", dest="level", type="int",
help="depth of sections.")
parser.set_defaults(
level=2,
start=None,
end=None)
(options, args) = E.Start(parser)
if not options.start:
keep = True
else:
keep = False
section_id1 = 0
section_id2 = 0
last_l = None
for line in sys.stdin:
if options.start and not keep and re.search(options.start, line):
keep = True
if not keep:
continue
if options.end and re.search(options.end, line):
break
if line[0] != "#":
l = "| " + re.sub("\t", " | ", line[:-1]) + " | "
else:
if last_l:
if re.match("# ", line):
header = line[2:-1]
print last_l % header
continue
last_l = None
if re.match("#----------------------------", line):
section_id2 += 1
last_l = "\n---%s subsection %i: %%s" % ("+" *
(options.level + 1), section_id2)
l = None
elif re.match("#===========================", line):
section_id1 += 1
section_id2 = 0
last_l = "\n---%s section %i: %%s" % ("+" *
(options.level), section_id1)
l = None
else:
l = line[:-1]
if l:
print l
E.Stop()
if __name__ == "__main__":
sys.exit(main(sys.argv)) | 0.065852 | 0.100437 |
from __future__ import absolute_import, unicode_literals
from adminsortable2.admin import CustomInlineFormSet as SortableInlineFormSet
from django import forms
from django.forms.models import BaseInlineFormSet
from django.utils.module_loading import import_string
from parler.forms import TranslatableModelForm
from shopit.conf import app_settings
from shopit.models.product import AttributeValue, Product
from shopit.utils import get_error_message as em
try:
TextEditor = import_string(app_settings.TEXT_EDITOR)
except ImportError: # pragma: no cover
from django.forms.widgets import Textarea as TextEditor
class ProductModelForm(TranslatableModelForm):
class Meta:
model = Product
exclude = []
widgets = {
'_caption': forms.Textarea(attrs={'rows': 2}),
'_description': TextEditor(),
}
def clean_slug(self):
slug = self.cleaned_data.get('slug')
queryset = Product.objects.translated(slug=slug)
if self.instance.pk is not None:
queryset = queryset.exclude(pk=self.instance.pk)
if queryset.exists():
raise forms.ValidationError(em('duplicate_slug'))
return slug
def clean_kind(self):
kind = self.cleaned_data.get('kind')
if kind != Product.GROUP:
if self.instance.variants.exists():
raise forms.ValidationError(em('not_group_has_variants'))
return kind
def clean_category(self):
return self._clean_categorization('category') # pragma: no cover
def clean_brand(self):
return self._clean_categorization('brand') # pragma: no cover
def clean_manufacturer(self):
return self._clean_categorization('manufacturer') # pragma: no cover
def clean__tax(self):
tax = self.cleaned_data.get('_tax')
kind = self.cleaned_data.get('kind')
if kind == Product.VARIANT and tax is not None:
raise forms.ValidationError(em('variant_has_tax'))
return tax
def clean_group(self):
group = self.cleaned_data.get('group')
kind = self.cleaned_data.get('kind')
if group is None:
if kind == Product.VARIANT:
raise forms.ValidationError(em('variant_no_group'))
else:
if kind != Product.VARIANT:
raise forms.ValidationError(em('group_has_group'))
if group.is_variant:
raise forms.ValidationError(em('varinat_group_variant'))
return group
def clean_available_attributes(self):
attrs = self.cleaned_data.get('available_attributes')
kind = self.cleaned_data.get('kind')
if kind != Product.GROUP and attrs:
raise forms.ValidationError(em('not_group_has_available_attributes'))
elif kind == Product.GROUP and not attrs:
raise forms.ValidationError(em('group_no_available_attributes'))
return attrs
def _clean_categorization(self, name):
data = self.cleaned_data.get(name, None)
kind = self.cleaned_data.get('kind')
if data and kind == Product.VARIANT:
raise forms.ValidationError(em('variant_has_category'))
return data
class AttributeChoiceInlineFormSet(SortableInlineFormSet):
def clean(self):
super(AttributeChoiceInlineFormSet, self).clean()
instance = getattr(self, 'instance', None)
if any(self.errors) or instance is None:
return
clean_forms = [x for x in self.forms if 'value' in x.cleaned_data and not x.cleaned_data['DELETE']]
if not clean_forms:
raise forms.ValidationError(em('attribute_no_choices'))
clean_values = [x.cleaned_data['value'] for x in clean_forms]
if len(clean_values) != len(set(clean_values)):
raise forms.ValidationError(em('attribute_duplicate_choices'))
class AttributeValueInlineFormSet(BaseInlineFormSet):
def clean(self):
super(AttributeValueInlineFormSet, self).clean()
instance = getattr(self, 'instance', None)
if any(self.errors) or instance is None:
return
clean_forms = [x for x in self.forms if 'attribute' in x.cleaned_data and not x.cleaned_data['DELETE']]
if instance.is_variant:
if not clean_forms:
raise forms.ValidationError(em('variant_no_attributes'))
elif self.variant_exists(instance, clean_forms):
raise forms.ValidationError(em('variant_already_exists'))
elif clean_forms:
raise forms.ValidationError(em('not_variant_has_attributes'))
def variant_exists(self, variant, forms):
"""
Checks if variant with this attributes already exists.
"""
variations = [dict(x[1]) for x in variant.group.get_variations() if x[0] != variant.pk]
if variations:
attrs = {}
for form in forms:
del form.cleaned_data['DELETE']
key = form.cleaned_data['attribute'].key
attrs[key] = AttributeValue(**form.cleaned_data).as_dict
return attrs in variations
class AttributeValueModelForm(forms.ModelForm):
class Meta:
model = AttributeValue
fields = ['attribute', 'choice']
def clean_choice(self):
choice = self.cleaned_data.get('choice')
attribute = self.cleaned_data.get('attribute')
# Make sure correct choice is selected for the attribute.
if attribute and not attribute.nullable:
if choice not in attribute.get_choices():
raise forms.ValidationError(em('incorrect_attribute_choice'))
return choice | shopit/forms/product.py | from __future__ import absolute_import, unicode_literals
from adminsortable2.admin import CustomInlineFormSet as SortableInlineFormSet
from django import forms
from django.forms.models import BaseInlineFormSet
from django.utils.module_loading import import_string
from parler.forms import TranslatableModelForm
from shopit.conf import app_settings
from shopit.models.product import AttributeValue, Product
from shopit.utils import get_error_message as em
try:
TextEditor = import_string(app_settings.TEXT_EDITOR)
except ImportError: # pragma: no cover
from django.forms.widgets import Textarea as TextEditor
class ProductModelForm(TranslatableModelForm):
class Meta:
model = Product
exclude = []
widgets = {
'_caption': forms.Textarea(attrs={'rows': 2}),
'_description': TextEditor(),
}
def clean_slug(self):
slug = self.cleaned_data.get('slug')
queryset = Product.objects.translated(slug=slug)
if self.instance.pk is not None:
queryset = queryset.exclude(pk=self.instance.pk)
if queryset.exists():
raise forms.ValidationError(em('duplicate_slug'))
return slug
def clean_kind(self):
kind = self.cleaned_data.get('kind')
if kind != Product.GROUP:
if self.instance.variants.exists():
raise forms.ValidationError(em('not_group_has_variants'))
return kind
def clean_category(self):
return self._clean_categorization('category') # pragma: no cover
def clean_brand(self):
return self._clean_categorization('brand') # pragma: no cover
def clean_manufacturer(self):
return self._clean_categorization('manufacturer') # pragma: no cover
def clean__tax(self):
tax = self.cleaned_data.get('_tax')
kind = self.cleaned_data.get('kind')
if kind == Product.VARIANT and tax is not None:
raise forms.ValidationError(em('variant_has_tax'))
return tax
def clean_group(self):
group = self.cleaned_data.get('group')
kind = self.cleaned_data.get('kind')
if group is None:
if kind == Product.VARIANT:
raise forms.ValidationError(em('variant_no_group'))
else:
if kind != Product.VARIANT:
raise forms.ValidationError(em('group_has_group'))
if group.is_variant:
raise forms.ValidationError(em('varinat_group_variant'))
return group
def clean_available_attributes(self):
attrs = self.cleaned_data.get('available_attributes')
kind = self.cleaned_data.get('kind')
if kind != Product.GROUP and attrs:
raise forms.ValidationError(em('not_group_has_available_attributes'))
elif kind == Product.GROUP and not attrs:
raise forms.ValidationError(em('group_no_available_attributes'))
return attrs
def _clean_categorization(self, name):
data = self.cleaned_data.get(name, None)
kind = self.cleaned_data.get('kind')
if data and kind == Product.VARIANT:
raise forms.ValidationError(em('variant_has_category'))
return data
class AttributeChoiceInlineFormSet(SortableInlineFormSet):
def clean(self):
super(AttributeChoiceInlineFormSet, self).clean()
instance = getattr(self, 'instance', None)
if any(self.errors) or instance is None:
return
clean_forms = [x for x in self.forms if 'value' in x.cleaned_data and not x.cleaned_data['DELETE']]
if not clean_forms:
raise forms.ValidationError(em('attribute_no_choices'))
clean_values = [x.cleaned_data['value'] for x in clean_forms]
if len(clean_values) != len(set(clean_values)):
raise forms.ValidationError(em('attribute_duplicate_choices'))
class AttributeValueInlineFormSet(BaseInlineFormSet):
def clean(self):
super(AttributeValueInlineFormSet, self).clean()
instance = getattr(self, 'instance', None)
if any(self.errors) or instance is None:
return
clean_forms = [x for x in self.forms if 'attribute' in x.cleaned_data and not x.cleaned_data['DELETE']]
if instance.is_variant:
if not clean_forms:
raise forms.ValidationError(em('variant_no_attributes'))
elif self.variant_exists(instance, clean_forms):
raise forms.ValidationError(em('variant_already_exists'))
elif clean_forms:
raise forms.ValidationError(em('not_variant_has_attributes'))
def variant_exists(self, variant, forms):
"""
Checks if variant with this attributes already exists.
"""
variations = [dict(x[1]) for x in variant.group.get_variations() if x[0] != variant.pk]
if variations:
attrs = {}
for form in forms:
del form.cleaned_data['DELETE']
key = form.cleaned_data['attribute'].key
attrs[key] = AttributeValue(**form.cleaned_data).as_dict
return attrs in variations
class AttributeValueModelForm(forms.ModelForm):
class Meta:
model = AttributeValue
fields = ['attribute', 'choice']
def clean_choice(self):
choice = self.cleaned_data.get('choice')
attribute = self.cleaned_data.get('attribute')
# Make sure correct choice is selected for the attribute.
if attribute and not attribute.nullable:
if choice not in attribute.get_choices():
raise forms.ValidationError(em('incorrect_attribute_choice'))
return choice | 0.516108 | 0.172694 |
class LinkedList_Example:
## initialization
def init(this):
LinkedList = MeowMeow()
assert LinkedList.getFront() == None
assert LinkedList.getBack() == None
## Adding to empty LinkedList
def AddToEmptyList(this, obj):
#1
LinkedList = MeowMeow()
item = 12
LinkedList.appendBack(item)
assert LinkedList.getBack().obj == item
assert LinkedList.getFront().obj == item
#2
LinkedList = MeowMeow()
item = 18
LinkedList.appendFront(item)
assert LinkedList.getBack().obj == item
assert LinkedList.getFront().obj == item
## Appending from back
## Next pointer towards the back has none
def backLast(this):
LinkedList = MeowMeow()
LinkedList.appendBack(0)
assert LinkedList.getBack().next == None
LinkedList.appendBack(123)
assert LinkedList.getBack().next == None
## Appending from front
## Previous pointer towards the front has none
def frontPrev(this):
LinkedList = MeowMeow()
LinkedList.appendFront(0)
assert LinkedList.getFront().prev == None
LinkedList.appendFront(123)
assert LinkedList.getFront().prev == None
## deletion
def remove(this):
LinkedList = MeowMeow()
item1 = 141
item2 = 123
item3 = 552
LinkedList.appendBack(item1)
LinkedList.appendBack(item2)
LinkedList.appendBack(item3)
front = LinkedList.getFront()
back = LinkedList.getBack()
mid = front.next
assert front.next.obj == back.prev.obj
LinkedList.remove(mid)
assert front.next.obj == back.obj
assert back.prev.obj == front.obj
## Deletion of item
def removeItem(this):
LinkedList = MeowMeow()
item = LinkedListItem(123, None, None)
LinkedList.remove(item)
## Appending back to back
## outcome: This makes the previous item take one step backward
def appendBackNext(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 1233
LinkedList.appendBack(item1)
LinkedList.appendBack(item2)
assert LinkedList.getBack().prev.obj == item1
assert LinkedList.getBack().prev.next.obj == item2
## Appending front to front
## outcome: This makes the previous item take one step forward
def appendFrontPrev(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 1233
LinkedList.appendFront(item1)
LinkedList.appendFront(item2)
assert LinkedList.getFront().next.obj == item1
assert LinkedList.getFront().next.prev.obj == item2
## Empty list having zero length
def zeroLen(this):
LinkedList = MeowMeow()
assert LinkedList.length == 0
## Empty list having non-zero length
def nonZeroLen(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 34234
item3 = 1212
LinkedList.appendFront(item1)
assert LinkedList.length == 1
LinkedList.appendBack(item2)
assert LinkedList.length == 2
LinkedList.appendFront(item3)
assert LinkedList.length == 3
## Removing items reduces length
def reduceLen(this):
LinkedList = MeowMeow()
removeItemm = LinkedListItem(1414141414, None, None)
LinkedList.remove(removeItemm)
assert LinkedList.length == 0
LinkedList.appendFront(123)
LinkedList.appendFront(12312)
assert LinkedList.length == 2
item = LinkedList.getFront()
LinkedList.remove(item)
assert LinkedList.length == 1
item = LinkedList.getFront()
LinkedList.remove(item)
assert LinkedList.length == 0 | resource-1/examples/LinkedList_Example.py | class LinkedList_Example:
## initialization
def init(this):
LinkedList = MeowMeow()
assert LinkedList.getFront() == None
assert LinkedList.getBack() == None
## Adding to empty LinkedList
def AddToEmptyList(this, obj):
#1
LinkedList = MeowMeow()
item = 12
LinkedList.appendBack(item)
assert LinkedList.getBack().obj == item
assert LinkedList.getFront().obj == item
#2
LinkedList = MeowMeow()
item = 18
LinkedList.appendFront(item)
assert LinkedList.getBack().obj == item
assert LinkedList.getFront().obj == item
## Appending from back
## Next pointer towards the back has none
def backLast(this):
LinkedList = MeowMeow()
LinkedList.appendBack(0)
assert LinkedList.getBack().next == None
LinkedList.appendBack(123)
assert LinkedList.getBack().next == None
## Appending from front
## Previous pointer towards the front has none
def frontPrev(this):
LinkedList = MeowMeow()
LinkedList.appendFront(0)
assert LinkedList.getFront().prev == None
LinkedList.appendFront(123)
assert LinkedList.getFront().prev == None
## deletion
def remove(this):
LinkedList = MeowMeow()
item1 = 141
item2 = 123
item3 = 552
LinkedList.appendBack(item1)
LinkedList.appendBack(item2)
LinkedList.appendBack(item3)
front = LinkedList.getFront()
back = LinkedList.getBack()
mid = front.next
assert front.next.obj == back.prev.obj
LinkedList.remove(mid)
assert front.next.obj == back.obj
assert back.prev.obj == front.obj
## Deletion of item
def removeItem(this):
LinkedList = MeowMeow()
item = LinkedListItem(123, None, None)
LinkedList.remove(item)
## Appending back to back
## outcome: This makes the previous item take one step backward
def appendBackNext(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 1233
LinkedList.appendBack(item1)
LinkedList.appendBack(item2)
assert LinkedList.getBack().prev.obj == item1
assert LinkedList.getBack().prev.next.obj == item2
## Appending front to front
## outcome: This makes the previous item take one step forward
def appendFrontPrev(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 1233
LinkedList.appendFront(item1)
LinkedList.appendFront(item2)
assert LinkedList.getFront().next.obj == item1
assert LinkedList.getFront().next.prev.obj == item2
## Empty list having zero length
def zeroLen(this):
LinkedList = MeowMeow()
assert LinkedList.length == 0
## Empty list having non-zero length
def nonZeroLen(this):
LinkedList = MeowMeow()
item1 = 123
item2 = 34234
item3 = 1212
LinkedList.appendFront(item1)
assert LinkedList.length == 1
LinkedList.appendBack(item2)
assert LinkedList.length == 2
LinkedList.appendFront(item3)
assert LinkedList.length == 3
## Removing items reduces length
def reduceLen(this):
LinkedList = MeowMeow()
removeItemm = LinkedListItem(1414141414, None, None)
LinkedList.remove(removeItemm)
assert LinkedList.length == 0
LinkedList.appendFront(123)
LinkedList.appendFront(12312)
assert LinkedList.length == 2
item = LinkedList.getFront()
LinkedList.remove(item)
assert LinkedList.length == 1
item = LinkedList.getFront()
LinkedList.remove(item)
assert LinkedList.length == 0 | 0.66888 | 0.442576 |
from pybreaker.manager import CircuitBreakerManager, CircuitBreakerError
from time import sleep
from threading import Thread
import os
import logging
from argparse import ArgumentParser
import sys
from ConfigParser import SafeConfigParser
log = logging.getLogger(__file__)
class InvalidIdentifier(Exception):
def __init__(self, ident):
super(InvalidIdentifier, self).__init__("Invalid identifier: {id}".format(id=ident))
class SpinningFifoManager(CircuitBreakerManager):
@classmethod
def from_config(cls, config_parser, exclude=None, listeners=None,
exception=CircuitBreakerError):
"""
Create a new SpinningFifoManager using values from a configuration file
Configs used are:
[pybreaker]
input_path = <path>
output_path = <path>
fail_max = <int, default 5>
reset_timout = <float, default 60>
sleep_delay = <float, default .1>
`input_path`: the path to read incoming commands from. Works best with a FIFO
pipe. If the path contains the string {pid}, the current process id will
be substituted into it
`output_path`: the path to write command results. Each result will overwrite
the previous. If the path contains the string {pid}, the current process id
will be substituted into it
`fail_max`: the maximum number of times an action can fail before the breaker
opens
`reset_timeout`: the number of seconds that it takes a breaker to reset from
open to half_open
`sleep_delay`: the number of seconds the i/o thread will sleep after not
finding a command to executed
"""
return cls(
config_parser.get('pybreaker', 'input_path').format(pid=os.getpid()),
config_parser.get('pybreaker', 'output_path').format(pid=os.getpid()),
int(config_parser.get('pybreaker', 'fail_max', 5)),
float(config_parser.get('pybreaker', 'reset_timeout', 60)),
exclude,
listeners,
exception,
float(config_parser.get('pybreaker', 'sleep_delay', .1)),
)
def __init__(self, input_pipe, output_file, fail_max=5, reset_timeout=60,
exclude=None, listeners=None, exception=CircuitBreakerError,
sleep_delay=.1):
"""
Create a new SpinningFifoManager. All arguments not listed are as for a normal
CircuitBreakerManager
`input_pipe`: the path to read incoming commands from. Works best with a FIFO
pipe
`output_file`: the path to write command results. Each result will overwrite
the previous
`sleep_delay`: the number of seconds the i/o thread will sleep after not
finding a command to executed
This manager spawns a daemonized thread that watches the `input_pipe` for
commands, and then dumps results from those commands into `output_file`.
The available commands are:
status [IDENT]
open IDENT
close IDENT
half_open IDENT
force_open IDENT
force_closed IDENT
IDENT identifies a circuit breaker and it's parameter (if such exists), and can
be found in the first column of the results of calling the 'status' command
"""
super(SpinningFifoManager, self).__init__(fail_max, reset_timeout,
exclude, listeners, exception)
self._input_pipe = input_pipe
self._output_file = output_file
self.sleep_delay = sleep_delay
self.ident_map = {}
self.command_thread = Thread(target=self.io_loop)
self.command_thread.daemon = True
self.command_thread.start()
def io_loop(self):
"""
Loop forever, checking for a line of input from the input_pipe,
checking if the line is a command, and executing the command if it is
"""
if not os.path.exists(self._input_pipe):
os.mkfifo(self._input_pipe)
with open(self._input_pipe) as input_pipe:
while True:
next_line = input_pipe.readline().strip()
if next_line == '':
sleep(self.sleep_delay)
continue
cmd, _, ident = next_line.partition(' ')
try:
getattr(self, 'do_'+cmd)(ident)
except AttributeError:
log.exception('Invalid command: '+next_line)
self.error('Invalid command: '+next_line)
except InvalidIdentifier as exc:
self.error(exc)
except:
log.exception('Unexpected error')
self.error('Unexpected error')
def _compute_id(self, key):
"""
Returns the IDENT for a particular key from the CircuitBreakerManager.status()
array. This key uniquely identifies the relevant circuit breaker
"""
# These indicate a parameterized circuit breaker
# used with no parameters, so we'll strip off the excess
# baggage
if key[1] == None or key[1] == ((), ()):
return str(key[0])
else:
return str(key)
def status(self):
"""
Returns the CircuitBreakerManager.status() array, and caches a map from
of IDENT keys to status() array entries
"""
status_dict = super(SpinningFifoManager, self).status()
self.ident_map = dict(
(self._compute_id(key), key)
for key in status_dict.keys()
)
return status_dict
def process_ident(self, ident):
"""
Returns the key in the status() array that is identified by the particular
IDENT key.
Raises InvalidIdentifier if no key could be found
"""
if ident in self.ident_map:
return self.ident_map[ident]
# Recompute the identity map
self.status()
if ident in self.ident_map:
return self.ident_map[ident]
raise InvalidIdentifier(ident)
def _format_status(self, items):
"""
Formats the status array into a text table of data. Each column is
headed by the column name, and separated by 4 spaces (' ')
"""
items.sort()
column_order = ['state', 'time until half_open', 'failure count', 'failures until open']
lines = [['id'] + column_order]
for key, status in items:
lines.append([self._compute_id(key)] + [str(status[col]) for col in column_order])
cols = zip(*lines)
col_widths = [max(len(value) for value in col) for col in cols]
format = ' '.join(['%%%ds' % width for width in col_widths ])
return '\n'.join(format % tuple(line) for line in lines)
def do_status(self, ident):
"""
Execute the status command
"""
if ident == '':
self.success(self._format_status(self.status().items()))
else:
key = self.process_ident(ident)
self.success(self._format_status([(key, self.status()[key])]))
def do_open(self, ident):
"""
Execute the open command on the circuit breaker specified by IDENT
"""
self._set_state('open', ident)
def do_half_open(self, ident):
"""
Execute the half_open command on the circuit breaker specified by IDENT
"""
self._set_state('half_open', ident)
def do_close(self, ident):
"""
Execute the close command on the circuit breaker specified by IDENT
"""
self._set_state('close', ident)
def do_force_open(self, ident):
"""
Execute the force_open command on the circuit breaker specified by IDENT
"""
self._set_state('force_open', ident)
def do_force_closed(self, ident):
"""
Execute the force_closed command on the circuit breaker specified by IDENT
"""
self._set_state('force_closed', ident)
def _set_state(self, action, ident):
"""
Execute `action` on the circuit breaker specified by IDENT
"""
breaker_ident, params = self.process_ident(ident)
getattr(self, action)(breaker_ident, params)
self.success("Changed `{ident}` state".format(ident=ident))
def _output(self, flag, data):
"""
Write a `flag` and some `data` to the output file
"""
with open(self._output_file, 'w') as output:
print >> output, flag
print >> output, data
def error(self, line):
"""
Write the result of a failed command to the output file
"""
self._output('ERROR', line)
def success(self, data):
"""
Write the result of a successful command to the output file
"""
self._output('SUCCESS', data)
def argparser():
parser = ArgumentParser()
parser.add_argument('-i', '--input-path', default=None,
help='The path that the program is watching for commands. May include the '
'string "{pid}", which will have the contents of the --pid argument '
'substituted in. Will override an input_path supplied via the --config '
'argument.')
parser.add_argument('-o', '--output-path', default=None,
help='The path that the program will write responses to. May include the '
'string "{pid}", which will have the contents of the --pid argument '
'substituted in. Will override an output_path supplied via the --config '
'argument.')
parser.add_argument('-p', '--pid', default=None,
help='The process id of the process to connect to. Will be substituted in to '
'the input_path and output_path if they use the "{pid}" string.')
parser.add_argument('-c', '--config', default=None,
help='The path to a config file that specifies the input_path and output_path '
'of the process to connect to.')
subparsers = parser.add_subparsers()
status_parser = subparsers.add_parser('status',
help='Find the status of one or all of the active circuit breakers')
status_parser.add_argument('ident', nargs='?', default='',
help='The id string of the circuit breaker to display the status of. If not '
'given, return the status of all circuit breakers')
status_parser.set_defaults(cmd_string='status {ident}')
for command in ['open', 'close', 'half_open', 'force_open', 'force_closed']:
cmd_parser = subparsers.add_parser(command,
help='{cmd} the specified circuit breaker'.format(cmd=command))
cmd_parser.add_argument('ident',
help='The id string of the circuit breaker to {cmd}'.format(cmd=command))
cmd_parser.set_defaults(cmd_string='{cmd} {ident}', cmd=command)
return parser
def command_fifo_manager(input_path, output_path, command, sleep_delay=0.1):
"""
Execute a `command` against the SpinningFifoManager that uses `input_path` and
`output_path` to communicate. Wait `sleep_delay` seconds before trying to read `output_path`
Returns (success, lines), where success is a boolean indicating if the command was
successful, and lines is an array of the output lines returned by the command
"""
with open(input_path, 'a') as input_file:
print >> input_file, command
sleep(sleep_delay)
with open(output_path) as output_file:
success = output_file.readline().strip() == 'SUCCESS'
return success, [line.strip() for line in output_file.readlines()]
def main(args=sys.argv[1:]):
"""
Main function for a commandline program used to communicate with a SpinningFifoManager
"""
args = argparser().parse_args()
input_path = args.input_path
output_path = args.output_path
sleep_delay = .1
if args.config:
config_parser = SafeConfigParser()
config_parser.read(args.config_path)
input_path = config_parser.get('pybreaker', 'input_path', input_path).format(pid=args.pid)
output_path = config_parser.get('pybreaker', 'output_path', output_path).format(pid=args.pid)
sleep_delay = float(config_parser.get('pybreaker', 'sleep_delay', sleep_delay))
if not os.path.exists(input_path):
print ("No input pipe/file found at {path}. Are you sure you "
"specified the correct path or config, and the correct pid?")
return 1
success, lines = command_fifo_manager(input_path, output_path, args.cmd_string.format(**args.__dict__), sleep_delay)
print '\n'.join(lines)
return 0 if success else 1
if __name__ == '__main__':
sys.exit(main()) | src/pybreaker/manager/pipe.py | from pybreaker.manager import CircuitBreakerManager, CircuitBreakerError
from time import sleep
from threading import Thread
import os
import logging
from argparse import ArgumentParser
import sys
from ConfigParser import SafeConfigParser
log = logging.getLogger(__file__)
class InvalidIdentifier(Exception):
def __init__(self, ident):
super(InvalidIdentifier, self).__init__("Invalid identifier: {id}".format(id=ident))
class SpinningFifoManager(CircuitBreakerManager):
@classmethod
def from_config(cls, config_parser, exclude=None, listeners=None,
exception=CircuitBreakerError):
"""
Create a new SpinningFifoManager using values from a configuration file
Configs used are:
[pybreaker]
input_path = <path>
output_path = <path>
fail_max = <int, default 5>
reset_timout = <float, default 60>
sleep_delay = <float, default .1>
`input_path`: the path to read incoming commands from. Works best with a FIFO
pipe. If the path contains the string {pid}, the current process id will
be substituted into it
`output_path`: the path to write command results. Each result will overwrite
the previous. If the path contains the string {pid}, the current process id
will be substituted into it
`fail_max`: the maximum number of times an action can fail before the breaker
opens
`reset_timeout`: the number of seconds that it takes a breaker to reset from
open to half_open
`sleep_delay`: the number of seconds the i/o thread will sleep after not
finding a command to executed
"""
return cls(
config_parser.get('pybreaker', 'input_path').format(pid=os.getpid()),
config_parser.get('pybreaker', 'output_path').format(pid=os.getpid()),
int(config_parser.get('pybreaker', 'fail_max', 5)),
float(config_parser.get('pybreaker', 'reset_timeout', 60)),
exclude,
listeners,
exception,
float(config_parser.get('pybreaker', 'sleep_delay', .1)),
)
def __init__(self, input_pipe, output_file, fail_max=5, reset_timeout=60,
exclude=None, listeners=None, exception=CircuitBreakerError,
sleep_delay=.1):
"""
Create a new SpinningFifoManager. All arguments not listed are as for a normal
CircuitBreakerManager
`input_pipe`: the path to read incoming commands from. Works best with a FIFO
pipe
`output_file`: the path to write command results. Each result will overwrite
the previous
`sleep_delay`: the number of seconds the i/o thread will sleep after not
finding a command to executed
This manager spawns a daemonized thread that watches the `input_pipe` for
commands, and then dumps results from those commands into `output_file`.
The available commands are:
status [IDENT]
open IDENT
close IDENT
half_open IDENT
force_open IDENT
force_closed IDENT
IDENT identifies a circuit breaker and it's parameter (if such exists), and can
be found in the first column of the results of calling the 'status' command
"""
super(SpinningFifoManager, self).__init__(fail_max, reset_timeout,
exclude, listeners, exception)
self._input_pipe = input_pipe
self._output_file = output_file
self.sleep_delay = sleep_delay
self.ident_map = {}
self.command_thread = Thread(target=self.io_loop)
self.command_thread.daemon = True
self.command_thread.start()
def io_loop(self):
"""
Loop forever, checking for a line of input from the input_pipe,
checking if the line is a command, and executing the command if it is
"""
if not os.path.exists(self._input_pipe):
os.mkfifo(self._input_pipe)
with open(self._input_pipe) as input_pipe:
while True:
next_line = input_pipe.readline().strip()
if next_line == '':
sleep(self.sleep_delay)
continue
cmd, _, ident = next_line.partition(' ')
try:
getattr(self, 'do_'+cmd)(ident)
except AttributeError:
log.exception('Invalid command: '+next_line)
self.error('Invalid command: '+next_line)
except InvalidIdentifier as exc:
self.error(exc)
except:
log.exception('Unexpected error')
self.error('Unexpected error')
def _compute_id(self, key):
"""
Returns the IDENT for a particular key from the CircuitBreakerManager.status()
array. This key uniquely identifies the relevant circuit breaker
"""
# These indicate a parameterized circuit breaker
# used with no parameters, so we'll strip off the excess
# baggage
if key[1] == None or key[1] == ((), ()):
return str(key[0])
else:
return str(key)
def status(self):
"""
Returns the CircuitBreakerManager.status() array, and caches a map from
of IDENT keys to status() array entries
"""
status_dict = super(SpinningFifoManager, self).status()
self.ident_map = dict(
(self._compute_id(key), key)
for key in status_dict.keys()
)
return status_dict
def process_ident(self, ident):
"""
Returns the key in the status() array that is identified by the particular
IDENT key.
Raises InvalidIdentifier if no key could be found
"""
if ident in self.ident_map:
return self.ident_map[ident]
# Recompute the identity map
self.status()
if ident in self.ident_map:
return self.ident_map[ident]
raise InvalidIdentifier(ident)
def _format_status(self, items):
"""
Formats the status array into a text table of data. Each column is
headed by the column name, and separated by 4 spaces (' ')
"""
items.sort()
column_order = ['state', 'time until half_open', 'failure count', 'failures until open']
lines = [['id'] + column_order]
for key, status in items:
lines.append([self._compute_id(key)] + [str(status[col]) for col in column_order])
cols = zip(*lines)
col_widths = [max(len(value) for value in col) for col in cols]
format = ' '.join(['%%%ds' % width for width in col_widths ])
return '\n'.join(format % tuple(line) for line in lines)
def do_status(self, ident):
"""
Execute the status command
"""
if ident == '':
self.success(self._format_status(self.status().items()))
else:
key = self.process_ident(ident)
self.success(self._format_status([(key, self.status()[key])]))
def do_open(self, ident):
"""
Execute the open command on the circuit breaker specified by IDENT
"""
self._set_state('open', ident)
def do_half_open(self, ident):
"""
Execute the half_open command on the circuit breaker specified by IDENT
"""
self._set_state('half_open', ident)
def do_close(self, ident):
"""
Execute the close command on the circuit breaker specified by IDENT
"""
self._set_state('close', ident)
def do_force_open(self, ident):
"""
Execute the force_open command on the circuit breaker specified by IDENT
"""
self._set_state('force_open', ident)
def do_force_closed(self, ident):
"""
Execute the force_closed command on the circuit breaker specified by IDENT
"""
self._set_state('force_closed', ident)
def _set_state(self, action, ident):
"""
Execute `action` on the circuit breaker specified by IDENT
"""
breaker_ident, params = self.process_ident(ident)
getattr(self, action)(breaker_ident, params)
self.success("Changed `{ident}` state".format(ident=ident))
def _output(self, flag, data):
"""
Write a `flag` and some `data` to the output file
"""
with open(self._output_file, 'w') as output:
print >> output, flag
print >> output, data
def error(self, line):
"""
Write the result of a failed command to the output file
"""
self._output('ERROR', line)
def success(self, data):
"""
Write the result of a successful command to the output file
"""
self._output('SUCCESS', data)
def argparser():
parser = ArgumentParser()
parser.add_argument('-i', '--input-path', default=None,
help='The path that the program is watching for commands. May include the '
'string "{pid}", which will have the contents of the --pid argument '
'substituted in. Will override an input_path supplied via the --config '
'argument.')
parser.add_argument('-o', '--output-path', default=None,
help='The path that the program will write responses to. May include the '
'string "{pid}", which will have the contents of the --pid argument '
'substituted in. Will override an output_path supplied via the --config '
'argument.')
parser.add_argument('-p', '--pid', default=None,
help='The process id of the process to connect to. Will be substituted in to '
'the input_path and output_path if they use the "{pid}" string.')
parser.add_argument('-c', '--config', default=None,
help='The path to a config file that specifies the input_path and output_path '
'of the process to connect to.')
subparsers = parser.add_subparsers()
status_parser = subparsers.add_parser('status',
help='Find the status of one or all of the active circuit breakers')
status_parser.add_argument('ident', nargs='?', default='',
help='The id string of the circuit breaker to display the status of. If not '
'given, return the status of all circuit breakers')
status_parser.set_defaults(cmd_string='status {ident}')
for command in ['open', 'close', 'half_open', 'force_open', 'force_closed']:
cmd_parser = subparsers.add_parser(command,
help='{cmd} the specified circuit breaker'.format(cmd=command))
cmd_parser.add_argument('ident',
help='The id string of the circuit breaker to {cmd}'.format(cmd=command))
cmd_parser.set_defaults(cmd_string='{cmd} {ident}', cmd=command)
return parser
def command_fifo_manager(input_path, output_path, command, sleep_delay=0.1):
"""
Execute a `command` against the SpinningFifoManager that uses `input_path` and
`output_path` to communicate. Wait `sleep_delay` seconds before trying to read `output_path`
Returns (success, lines), where success is a boolean indicating if the command was
successful, and lines is an array of the output lines returned by the command
"""
with open(input_path, 'a') as input_file:
print >> input_file, command
sleep(sleep_delay)
with open(output_path) as output_file:
success = output_file.readline().strip() == 'SUCCESS'
return success, [line.strip() for line in output_file.readlines()]
def main(args=sys.argv[1:]):
"""
Main function for a commandline program used to communicate with a SpinningFifoManager
"""
args = argparser().parse_args()
input_path = args.input_path
output_path = args.output_path
sleep_delay = .1
if args.config:
config_parser = SafeConfigParser()
config_parser.read(args.config_path)
input_path = config_parser.get('pybreaker', 'input_path', input_path).format(pid=args.pid)
output_path = config_parser.get('pybreaker', 'output_path', output_path).format(pid=args.pid)
sleep_delay = float(config_parser.get('pybreaker', 'sleep_delay', sleep_delay))
if not os.path.exists(input_path):
print ("No input pipe/file found at {path}. Are you sure you "
"specified the correct path or config, and the correct pid?")
return 1
success, lines = command_fifo_manager(input_path, output_path, args.cmd_string.format(**args.__dict__), sleep_delay)
print '\n'.join(lines)
return 0 if success else 1
if __name__ == '__main__':
sys.exit(main()) | 0.64232 | 0.191668 |
import ray
import numpy as np
import tensorflow as tf
def get_batch(data, batch_index, batch_size):
# This method currently drops data when num_data is not divisible by
# batch_size.
num_data = data.shape[0]
num_batches = num_data / batch_size
batch_index %= num_batches
return data[(batch_index * batch_size):((batch_index + 1) * batch_size)]
def weight(shape, stddev):
initial = tf.truncated_normal(shape, stddev=stddev)
return tf.Variable(initial)
def bias(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding="SAME")
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
def cnn_setup(x, y, keep_prob, lr, stddev):
first_hidden = 32
second_hidden = 64
fc_hidden = 1024
W_conv1 = weight([5, 5, 1, first_hidden], stddev)
B_conv1 = bias([first_hidden])
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + B_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight([5, 5, first_hidden, second_hidden], stddev)
b_conv2 = bias([second_hidden])
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
W_fc1 = weight([7 * 7 * second_hidden, fc_hidden], stddev)
b_fc1 = bias([fc_hidden])
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * second_hidden])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop= tf.nn.dropout(h_fc1, keep_prob)
W_fc2 = weight([fc_hidden, 10], stddev)
b_fc2 = bias([10])
y_conv = tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y * tf.log(y_conv), reduction_indices=[1]))
correct_pred = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y, 1))
return tf.train.AdamOptimizer(lr).minimize(cross_entropy), tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# Define a remote function that takes a set of hyperparameters as well as the
# data, consructs and trains a network, and returns the validation accuracy.
@ray.remote
def train_cnn_and_compute_accuracy(params, steps, train_images, train_labels, validation_images, validation_labels):
# Extract the hyperparameters from the params dictionary.
learning_rate = params["learning_rate"]
batch_size = params["batch_size"]
keep = 1 - params["dropout"]
stddev = params["stddev"]
# Create the input placeholders for the network.
x = tf.placeholder(tf.float32, shape=[None, 784])
y = tf.placeholder(tf.float32, shape=[None, 10])
keep_prob = tf.placeholder(tf.float32)
# Create the network.
train_step, accuracy = cnn_setup(x, y, keep_prob, learning_rate, stddev)
# Do the training and evaluation.
with tf.Session() as sess:
# Initialize the network weights.
sess.run(tf.initialize_all_variables())
for i in range(1, steps + 1):
# Fetch the next batch of data.
image_batch = get_batch(train_images, i, batch_size)
label_batch = get_batch(train_labels, i, batch_size)
# Do one step of training.
sess.run(train_step, feed_dict={x: image_batch, y: label_batch, keep_prob: keep})
if i % 100 == 0:
# Estimate the training accuracy every once in a while.
train_ac = accuracy.eval(feed_dict={x: image_batch, y: label_batch, keep_prob: 1.0})
# If the training accuracy is too low, stop early in order to avoid
# wasting computation.
if train_ac < 0.25:
# Compute the validation accuracy and return.
totalacc = accuracy.eval(feed_dict={x: validation_images, y: validation_labels, keep_prob: 1.0})
return float(totalacc)
# Training is done, compute the validation accuracy and return.
totalacc = accuracy.eval(feed_dict={x: validation_images, y: validation_labels, keep_prob: 1.0})
return float(totalacc) | examples/hyperopt/hyperopt.py | import ray
import numpy as np
import tensorflow as tf
def get_batch(data, batch_index, batch_size):
# This method currently drops data when num_data is not divisible by
# batch_size.
num_data = data.shape[0]
num_batches = num_data / batch_size
batch_index %= num_batches
return data[(batch_index * batch_size):((batch_index + 1) * batch_size)]
def weight(shape, stddev):
initial = tf.truncated_normal(shape, stddev=stddev)
return tf.Variable(initial)
def bias(shape):
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def conv2d(x, W):
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding="SAME")
def max_pool_2x2(x):
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding="SAME")
def cnn_setup(x, y, keep_prob, lr, stddev):
first_hidden = 32
second_hidden = 64
fc_hidden = 1024
W_conv1 = weight([5, 5, 1, first_hidden], stddev)
B_conv1 = bias([first_hidden])
x_image = tf.reshape(x, [-1, 28, 28, 1])
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + B_conv1)
h_pool1 = max_pool_2x2(h_conv1)
W_conv2 = weight([5, 5, first_hidden, second_hidden], stddev)
b_conv2 = bias([second_hidden])
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
h_pool2 = max_pool_2x2(h_conv2)
W_fc1 = weight([7 * 7 * second_hidden, fc_hidden], stddev)
b_fc1 = bias([fc_hidden])
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * second_hidden])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
h_fc1_drop= tf.nn.dropout(h_fc1, keep_prob)
W_fc2 = weight([fc_hidden, 10], stddev)
b_fc2 = bias([10])
y_conv = tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y * tf.log(y_conv), reduction_indices=[1]))
correct_pred = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y, 1))
return tf.train.AdamOptimizer(lr).minimize(cross_entropy), tf.reduce_mean(tf.cast(correct_pred, tf.float32))
# Define a remote function that takes a set of hyperparameters as well as the
# data, consructs and trains a network, and returns the validation accuracy.
@ray.remote
def train_cnn_and_compute_accuracy(params, steps, train_images, train_labels, validation_images, validation_labels):
# Extract the hyperparameters from the params dictionary.
learning_rate = params["learning_rate"]
batch_size = params["batch_size"]
keep = 1 - params["dropout"]
stddev = params["stddev"]
# Create the input placeholders for the network.
x = tf.placeholder(tf.float32, shape=[None, 784])
y = tf.placeholder(tf.float32, shape=[None, 10])
keep_prob = tf.placeholder(tf.float32)
# Create the network.
train_step, accuracy = cnn_setup(x, y, keep_prob, learning_rate, stddev)
# Do the training and evaluation.
with tf.Session() as sess:
# Initialize the network weights.
sess.run(tf.initialize_all_variables())
for i in range(1, steps + 1):
# Fetch the next batch of data.
image_batch = get_batch(train_images, i, batch_size)
label_batch = get_batch(train_labels, i, batch_size)
# Do one step of training.
sess.run(train_step, feed_dict={x: image_batch, y: label_batch, keep_prob: keep})
if i % 100 == 0:
# Estimate the training accuracy every once in a while.
train_ac = accuracy.eval(feed_dict={x: image_batch, y: label_batch, keep_prob: 1.0})
# If the training accuracy is too low, stop early in order to avoid
# wasting computation.
if train_ac < 0.25:
# Compute the validation accuracy and return.
totalacc = accuracy.eval(feed_dict={x: validation_images, y: validation_labels, keep_prob: 1.0})
return float(totalacc)
# Training is done, compute the validation accuracy and return.
totalacc = accuracy.eval(feed_dict={x: validation_images, y: validation_labels, keep_prob: 1.0})
return float(totalacc) | 0.854004 | 0.490297 |
from ._ffi import *
from ctypes import *
from wasmtime import Store, Extern, Func, Global, Table, Memory, Instance
from wasmtime import Module, Trap, WasiInstance
dll.wasmtime_linker_new.restype = P_wasmtime_linker_t
dll.wasmtime_linker_define.restype = c_bool
dll.wasmtime_linker_define_instance.restype = c_bool
dll.wasmtime_linker_instantiate.restype = P_wasm_instance_t
class Linker(object):
def __init__(self, store):
if not isinstance(store, Store):
raise TypeError("expected a Store")
self.__ptr__ = dll.wasmtime_linker_new(store.__ptr__)
self.store = store
def allow_shadowing(self, allow):
if not isinstance(allow, bool):
raise TypeError("expected a boolean")
dll.wasmtime_linker_allow_shadowing(self.__ptr__, allow)
def define(self, module, name, item):
if isinstance(item, Extern):
raw_item = item.__ptr__
elif isinstance(item, Func):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Global):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Memory):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Table):
raw_item = item.as_extern().__ptr__
else:
raise TypeError("expected an `Extern`")
module_raw = str_to_name(module)
name_raw = str_to_name(name)
ok = dll.wasmtime_linker_define(self.__ptr__, byref(module_raw),
byref(name_raw), raw_item)
if not ok:
raise RuntimeError("failed to define item")
def define_instance(self, name, instance):
if not isinstance(instance, Instance):
raise TypeError("expected an `Instance`")
name_raw = str_to_name(name)
ok = dll.wasmtime_linker_define_instance(self.__ptr__, byref(name_raw),
instance.__ptr__)
if not ok:
raise RuntimeError("failed to define item")
def define_wasi(self, instance):
if not isinstance(instance, WasiInstance):
raise TypeError("expected an `WasiInstance`")
ok = dll.wasmtime_linker_define_wasi(self.__ptr__, instance.__ptr__)
if not ok:
raise RuntimeError("failed to define item")
def instantiate(self, module):
if not isinstance(module, Module):
raise TypeError("expected a `Module`")
trap = P_wasm_trap_t()
ptr = dll.wasmtime_linker_instantiate(
self.__ptr__, module.__ptr__, byref(trap))
if not ptr:
if trap:
raise Trap.__from_ptr__(trap)
raise RuntimeError("failed to instantiate")
return Instance.__from_ptr__(ptr, module)
def __del__(self):
if hasattr(self, '__ptr__'):
dll.wasmtime_linker_delete(self.__ptr__) | wasmtime/_linker.py | from ._ffi import *
from ctypes import *
from wasmtime import Store, Extern, Func, Global, Table, Memory, Instance
from wasmtime import Module, Trap, WasiInstance
dll.wasmtime_linker_new.restype = P_wasmtime_linker_t
dll.wasmtime_linker_define.restype = c_bool
dll.wasmtime_linker_define_instance.restype = c_bool
dll.wasmtime_linker_instantiate.restype = P_wasm_instance_t
class Linker(object):
def __init__(self, store):
if not isinstance(store, Store):
raise TypeError("expected a Store")
self.__ptr__ = dll.wasmtime_linker_new(store.__ptr__)
self.store = store
def allow_shadowing(self, allow):
if not isinstance(allow, bool):
raise TypeError("expected a boolean")
dll.wasmtime_linker_allow_shadowing(self.__ptr__, allow)
def define(self, module, name, item):
if isinstance(item, Extern):
raw_item = item.__ptr__
elif isinstance(item, Func):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Global):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Memory):
raw_item = item.as_extern().__ptr__
elif isinstance(item, Table):
raw_item = item.as_extern().__ptr__
else:
raise TypeError("expected an `Extern`")
module_raw = str_to_name(module)
name_raw = str_to_name(name)
ok = dll.wasmtime_linker_define(self.__ptr__, byref(module_raw),
byref(name_raw), raw_item)
if not ok:
raise RuntimeError("failed to define item")
def define_instance(self, name, instance):
if not isinstance(instance, Instance):
raise TypeError("expected an `Instance`")
name_raw = str_to_name(name)
ok = dll.wasmtime_linker_define_instance(self.__ptr__, byref(name_raw),
instance.__ptr__)
if not ok:
raise RuntimeError("failed to define item")
def define_wasi(self, instance):
if not isinstance(instance, WasiInstance):
raise TypeError("expected an `WasiInstance`")
ok = dll.wasmtime_linker_define_wasi(self.__ptr__, instance.__ptr__)
if not ok:
raise RuntimeError("failed to define item")
def instantiate(self, module):
if not isinstance(module, Module):
raise TypeError("expected a `Module`")
trap = P_wasm_trap_t()
ptr = dll.wasmtime_linker_instantiate(
self.__ptr__, module.__ptr__, byref(trap))
if not ptr:
if trap:
raise Trap.__from_ptr__(trap)
raise RuntimeError("failed to instantiate")
return Instance.__from_ptr__(ptr, module)
def __del__(self):
if hasattr(self, '__ptr__'):
dll.wasmtime_linker_delete(self.__ptr__) | 0.538012 | 0.079961 |
import json
import unittest
from tests.base_test import BaseTest
from tests.model_test_data import (
TEST_MEMBER_1,
TEST_ROLE_1,
TEST_ROLE_2,
TEST_ROLE_400,
)
# noinspection PyArgumentList
class TestRoleResource(BaseTest):
"""Test all endpoints for the role resource"""
PASSWORD_1 = TEST_MEMBER_1["password"]
ROLE_NAME_1 = TEST_ROLE_1["role_name"]
ROLE_NAME_2 = TEST_ROLE_2["role_name"]
MSG_200 = "Role modified successfully."
MSG_201 = "Role created successfully."
MSG_400 = "400 BAD REQUEST"
MSG_404 = "404 Not Found: Role with id '99' was not found."
MSG_409_1 = (
f"409 Conflict: Role with role_name '{TEST_ROLE_1['role_name']}' "
f"already exists."
)
MSG_DEL = "Role deleted successfully."
def test_get_role_200(self):
with self.client as c:
with self.app_context:
role = self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_1)
def test_get_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.get(
f"/roles/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_post_role_201(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_201)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_2)
def test_post_role_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_400),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertTrue("role_name" in data["error"])
results = c.post(
"/roles",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_post_role_409(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_1),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_409_1)
def test_put_role_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/1",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_200)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_2)
def test_put_role_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_put_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/99",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_put_role_409(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/{role.id}",
data=json.dumps(TEST_ROLE_1),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_409_1)
def test_delete_role_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.delete(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["role"]["role_name"], "member")
self.assertEqual(data["message"], self.MSG_DEL)
def test_delete_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.delete(
f"/roles/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_get_roles_200(self):
with self.client as c:
with self.app_context:
role_1 = self.add_permissions_to_admin()
member, role_2 = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.get(
f"/roles",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(len(data["roles"]), 2)
self.assertEqual(data["roles"][0]["id"], role_1.id)
self.assertEqual(data["roles"][1]["id"], role_2.id)
if __name__ == "__main__": # pragma: no cover
unittest.main() | tests/resources/test_role.py | import json
import unittest
from tests.base_test import BaseTest
from tests.model_test_data import (
TEST_MEMBER_1,
TEST_ROLE_1,
TEST_ROLE_2,
TEST_ROLE_400,
)
# noinspection PyArgumentList
class TestRoleResource(BaseTest):
"""Test all endpoints for the role resource"""
PASSWORD_1 = TEST_MEMBER_1["password"]
ROLE_NAME_1 = TEST_ROLE_1["role_name"]
ROLE_NAME_2 = TEST_ROLE_2["role_name"]
MSG_200 = "Role modified successfully."
MSG_201 = "Role created successfully."
MSG_400 = "400 BAD REQUEST"
MSG_404 = "404 Not Found: Role with id '99' was not found."
MSG_409_1 = (
f"409 Conflict: Role with role_name '{TEST_ROLE_1['role_name']}' "
f"already exists."
)
MSG_DEL = "Role deleted successfully."
def test_get_role_200(self):
with self.client as c:
with self.app_context:
role = self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, TEST_MEMBER_1["password"])
results = c.get(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_1)
def test_get_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.get(
f"/roles/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_post_role_201(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_201)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_2)
def test_post_role_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_400),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertTrue("role_name" in data["error"])
results = c.post(
"/roles",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_post_role_409(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.post(
"/roles",
data=json.dumps(TEST_ROLE_1),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_409_1)
def test_put_role_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_1)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/1",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["message"], self.MSG_200)
self.assertEqual(data["role"]["role_name"], self.ROLE_NAME_2)
def test_put_role_400(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
self.assertEqual(results.status, self.MSG_400)
def test_put_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/99",
data=json.dumps(TEST_ROLE_2),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_put_role_409(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.put(
f"/roles/{role.id}",
data=json.dumps(TEST_ROLE_1),
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_409_1)
def test_delete_role_200(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, role = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.delete(
f"/roles/{role.id}",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["role"]["role_name"], "member")
self.assertEqual(data["message"], self.MSG_DEL)
def test_delete_role_404(self):
with self.client as c:
with self.app_context:
self.add_permissions_to_admin()
member, _ = self.add_member_to_db(self.member_1, self.role_2)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.delete(
f"/roles/99",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(data["error"], self.MSG_404)
def test_get_roles_200(self):
with self.client as c:
with self.app_context:
role_1 = self.add_permissions_to_admin()
member, role_2 = self.add_member_to_db(
self.member_1, self.role_2
)
login = self.login(c, member.email, self.PASSWORD_1)
results = c.get(
f"/roles",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {login['access_token']}",
},
)
data = json.loads(results.data)
self.assertEqual(len(data["roles"]), 2)
self.assertEqual(data["roles"][0]["id"], role_1.id)
self.assertEqual(data["roles"][1]["id"], role_2.id)
if __name__ == "__main__": # pragma: no cover
unittest.main() | 0.307566 | 0.13589 |
import sympy
import catamount
from catamount.graph import Graph
from catamount.tensors.tensor_shape import Dimension
from catamount.tests.utils.helpers import *
# [_] TODO (Joel): Move these to the Catamount API
def linear(name, weights_shape, out_shape, input):
output_weights = variable('{}_weights'.format(name), weights_shape)
output = matmul('{}_projection'.format(name), out_shape, input,
output_weights)
output_bias = variable('{}_bias'.format(name), [out_shape[1]])
output = pointwise('{}_point'.format(name), catamount.AddOp, out_shape,
output, output_bias)
return output
def lstm_cell(name, input, state):
batch_size = input.shape.dims[0]
hidden_dim = input.shape.dims[1]
if hidden_dim.value is None:
hidden_dim = None
if hidden_dim is not None:
in_dim = Dimension(2) * hidden_dim
out_dim = Dimension(4) * hidden_dim
else:
in_dim = None
out_dim = None
assert len(state) == 2
c, h = state
lstm_concat_seq = concat('{}_concat'.format(name), [batch_size, in_dim], [h, input], axis=1)
recur_linear = linear('{}_proj'.format(name), [in_dim, out_dim], [batch_size, out_dim], lstm_concat_seq)
i, j, f, o = split('{}_split'.format(name), [batch_size, hidden_dim], recur_linear, axis=1, num_split=4)
forget_bias = variable('{}_f_bias'.format(name), [hidden_dim])
i = pointwise('{}_i_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], i)
j = pointwise('{}_j_tanh'.format(name), catamount.TanhOp, [batch_size, hidden_dim], j)
f = pointwise('{}_f_add'.format(name), catamount.AddOp, [batch_size, hidden_dim], f, forget_bias)
f = pointwise('{}_f_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], f)
o = pointwise('{}_o_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], o)
mul_i_j = pointwise('{}_i_j_mul'.format(name), catamount.MulOp, [batch_size, hidden_dim], i, j)
new_c = pointwise('{}_c_mul'.format(name), catamount.MulOp, [batch_size, hidden_dim], c, f)
new_c = pointwise('{}_c_add'.format(name), catamount.AddOp, [batch_size, hidden_dim], new_c, mul_i_j)
new_c_sig = pointwise('{}_new_c_tanh'.format(name), catamount.TanhOp, [batch_size, hidden_dim], new_c)
new_h = pointwise('{}_new_h'.format(name), catamount.MulOp, [batch_size, hidden_dim], new_c_sig, o)
state = [new_c, new_h]
return new_h, state
def test_lstm_cell():
''' Specify graph containing an LSTM cell and make sure it generates the
correct number of Flops. Test combination specifies batch_size and
hidden_dim.
'''
combos = [[None, None],
[32, None],
[None, 1024],
[32, 1024]]
for combo in combos:
graph = Graph()
with graph.asDefault():
batch_size, hidden_dim = combo
print('Testing LSTM cell with in batch_size {}, hidden_dim {}'
.format(batch_size, hidden_dim))
input_ph = placeholder('input', [batch_size, hidden_dim])
state_c_ph = placeholder('c_state', [batch_size, hidden_dim])
state_h_ph = placeholder('h_state', [batch_size, hidden_dim])
out_t, state_t = lstm_cell('lstm_cell', input_ph,
[state_c_ph, state_h_ph])
algorithmic_flops = graph.calcAlgFlops()
correct_alg_flops = get_correct_alg_flops()
subs_table = get_subs_table()
correct_alg_flops = correct_alg_flops.subs(subs_table)
print(' Catamount: {}'.format(algorithmic_flops))
print(' Correct: {}'.format(correct_alg_flops))
assert sympy.simplify(algorithmic_flops - correct_alg_flops) == 0,\
'Alg flops incorrect!\n Expecting: {}\n Calculated: {}' \
.format(correct_alg_flops, algorithmic_flops)
# TODO: Bind Nones and check outputs
reset_symbols()
if __name__ == "__main__":
test_lstm_cell() | catamount/tests/api/lstm_cell.py | import sympy
import catamount
from catamount.graph import Graph
from catamount.tensors.tensor_shape import Dimension
from catamount.tests.utils.helpers import *
# [_] TODO (Joel): Move these to the Catamount API
def linear(name, weights_shape, out_shape, input):
output_weights = variable('{}_weights'.format(name), weights_shape)
output = matmul('{}_projection'.format(name), out_shape, input,
output_weights)
output_bias = variable('{}_bias'.format(name), [out_shape[1]])
output = pointwise('{}_point'.format(name), catamount.AddOp, out_shape,
output, output_bias)
return output
def lstm_cell(name, input, state):
batch_size = input.shape.dims[0]
hidden_dim = input.shape.dims[1]
if hidden_dim.value is None:
hidden_dim = None
if hidden_dim is not None:
in_dim = Dimension(2) * hidden_dim
out_dim = Dimension(4) * hidden_dim
else:
in_dim = None
out_dim = None
assert len(state) == 2
c, h = state
lstm_concat_seq = concat('{}_concat'.format(name), [batch_size, in_dim], [h, input], axis=1)
recur_linear = linear('{}_proj'.format(name), [in_dim, out_dim], [batch_size, out_dim], lstm_concat_seq)
i, j, f, o = split('{}_split'.format(name), [batch_size, hidden_dim], recur_linear, axis=1, num_split=4)
forget_bias = variable('{}_f_bias'.format(name), [hidden_dim])
i = pointwise('{}_i_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], i)
j = pointwise('{}_j_tanh'.format(name), catamount.TanhOp, [batch_size, hidden_dim], j)
f = pointwise('{}_f_add'.format(name), catamount.AddOp, [batch_size, hidden_dim], f, forget_bias)
f = pointwise('{}_f_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], f)
o = pointwise('{}_o_sig'.format(name), catamount.SigmoidOp, [batch_size, hidden_dim], o)
mul_i_j = pointwise('{}_i_j_mul'.format(name), catamount.MulOp, [batch_size, hidden_dim], i, j)
new_c = pointwise('{}_c_mul'.format(name), catamount.MulOp, [batch_size, hidden_dim], c, f)
new_c = pointwise('{}_c_add'.format(name), catamount.AddOp, [batch_size, hidden_dim], new_c, mul_i_j)
new_c_sig = pointwise('{}_new_c_tanh'.format(name), catamount.TanhOp, [batch_size, hidden_dim], new_c)
new_h = pointwise('{}_new_h'.format(name), catamount.MulOp, [batch_size, hidden_dim], new_c_sig, o)
state = [new_c, new_h]
return new_h, state
def test_lstm_cell():
''' Specify graph containing an LSTM cell and make sure it generates the
correct number of Flops. Test combination specifies batch_size and
hidden_dim.
'''
combos = [[None, None],
[32, None],
[None, 1024],
[32, 1024]]
for combo in combos:
graph = Graph()
with graph.asDefault():
batch_size, hidden_dim = combo
print('Testing LSTM cell with in batch_size {}, hidden_dim {}'
.format(batch_size, hidden_dim))
input_ph = placeholder('input', [batch_size, hidden_dim])
state_c_ph = placeholder('c_state', [batch_size, hidden_dim])
state_h_ph = placeholder('h_state', [batch_size, hidden_dim])
out_t, state_t = lstm_cell('lstm_cell', input_ph,
[state_c_ph, state_h_ph])
algorithmic_flops = graph.calcAlgFlops()
correct_alg_flops = get_correct_alg_flops()
subs_table = get_subs_table()
correct_alg_flops = correct_alg_flops.subs(subs_table)
print(' Catamount: {}'.format(algorithmic_flops))
print(' Correct: {}'.format(correct_alg_flops))
assert sympy.simplify(algorithmic_flops - correct_alg_flops) == 0,\
'Alg flops incorrect!\n Expecting: {}\n Calculated: {}' \
.format(correct_alg_flops, algorithmic_flops)
# TODO: Bind Nones and check outputs
reset_symbols()
if __name__ == "__main__":
test_lstm_cell() | 0.371707 | 0.583085 |
import asyncio
import enum
import queue
import threading
from typing import AsyncIterator, Any
# Queue status
class Status(enum.Enum):
"""Status codes for `FinishableQueue`"""
INIT = 0
READY = 103
ERROR = 500
FINISHED = 200
class FinishableQueue():
"""Thread-safe class that takes a built-in `queue.Queue` object to handle
the async tasks by managing its' status based on elements retrieve from the
`Queue` object.
Args:
queue_to_finish (:obj:`queue.Queue`): queue object assigned to handle
the async task
"""
def __init__(self, queue_to_finish: queue.Queue):
self._lock = threading.Lock()
self._queue = queue_to_finish
self._status = Status.INIT
@property
def status(self) -> Status:
""":obj:`ibpy_native.utils.finishable_queue.Status`: Status represents
wether the queue is newly initialised, ready for use, finished,
timeout, or encountered error.
"""
return self._status
@property
def finished(self) -> bool:
"""Indicates is the pervious task associated with this finishable queue
finished.
Returns:
bool: True is task last associated is finished, False otherwise.
"""
return self._status is Status.FINISHED
def reset(self):
"""Reset the status to `READY` for reusing the queue if the
status is marked as either `INIT` or `FINISHED`
"""
if self.finished or self._status is Status.INIT:
self._status = Status.READY
def put(self, element: Any):
"""Setter to put element to internal synchronised queue."""
if self._status is Status.INIT:
with self._lock:
self._status = Status.READY
self._queue.put(element)
async def get(self) -> list:
"""Returns a list of elements retrieved from queue once the FINISHED
flag is received, or an exception is retrieved.
Returns:
list: The list of element(s) returned from the queue.
"""
contents_of_queue = []
loop = asyncio.get_event_loop()
while not self.finished and self.status is not Status.ERROR:
current_element = await loop.run_in_executor(
None, self._queue.get
)
if current_element is Status.FINISHED:
with self._lock:
self._status = Status.FINISHED
else:
if isinstance(current_element, BaseException):
with self._lock:
self._status = Status.ERROR
contents_of_queue.append(current_element)
return contents_of_queue
async def stream(self) -> AsyncIterator[Any]:
"""Yields the elements in queue as soon as an element has been put into
the queue.
"""
loop = asyncio.get_event_loop()
while not self.finished and self.status is not Status.ERROR:
current_element = await loop.run_in_executor(
None, self._queue.get
)
if current_element is Status.FINISHED:
with self._lock:
self._status = current_element
elif isinstance(current_element, BaseException):
with self._lock:
self._status = Status.ERROR
yield current_element | ibpy_native/utils/finishable_queue.py | import asyncio
import enum
import queue
import threading
from typing import AsyncIterator, Any
# Queue status
class Status(enum.Enum):
"""Status codes for `FinishableQueue`"""
INIT = 0
READY = 103
ERROR = 500
FINISHED = 200
class FinishableQueue():
"""Thread-safe class that takes a built-in `queue.Queue` object to handle
the async tasks by managing its' status based on elements retrieve from the
`Queue` object.
Args:
queue_to_finish (:obj:`queue.Queue`): queue object assigned to handle
the async task
"""
def __init__(self, queue_to_finish: queue.Queue):
self._lock = threading.Lock()
self._queue = queue_to_finish
self._status = Status.INIT
@property
def status(self) -> Status:
""":obj:`ibpy_native.utils.finishable_queue.Status`: Status represents
wether the queue is newly initialised, ready for use, finished,
timeout, or encountered error.
"""
return self._status
@property
def finished(self) -> bool:
"""Indicates is the pervious task associated with this finishable queue
finished.
Returns:
bool: True is task last associated is finished, False otherwise.
"""
return self._status is Status.FINISHED
def reset(self):
"""Reset the status to `READY` for reusing the queue if the
status is marked as either `INIT` or `FINISHED`
"""
if self.finished or self._status is Status.INIT:
self._status = Status.READY
def put(self, element: Any):
"""Setter to put element to internal synchronised queue."""
if self._status is Status.INIT:
with self._lock:
self._status = Status.READY
self._queue.put(element)
async def get(self) -> list:
"""Returns a list of elements retrieved from queue once the FINISHED
flag is received, or an exception is retrieved.
Returns:
list: The list of element(s) returned from the queue.
"""
contents_of_queue = []
loop = asyncio.get_event_loop()
while not self.finished and self.status is not Status.ERROR:
current_element = await loop.run_in_executor(
None, self._queue.get
)
if current_element is Status.FINISHED:
with self._lock:
self._status = Status.FINISHED
else:
if isinstance(current_element, BaseException):
with self._lock:
self._status = Status.ERROR
contents_of_queue.append(current_element)
return contents_of_queue
async def stream(self) -> AsyncIterator[Any]:
"""Yields the elements in queue as soon as an element has been put into
the queue.
"""
loop = asyncio.get_event_loop()
while not self.finished and self.status is not Status.ERROR:
current_element = await loop.run_in_executor(
None, self._queue.get
)
if current_element is Status.FINISHED:
with self._lock:
self._status = current_element
elif isinstance(current_element, BaseException):
with self._lock:
self._status = Status.ERROR
yield current_element | 0.848345 | 0.217379 |
from __future__ import print_function
import cv2
import glob
import os
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This is the simple VSLAM pipeline')
parser.add_argument('-f', '--folder', help='folder where all the images are',
default=os.path.curdir)
parser.add_argument('-e', '--ext', help='extensions of image files', nargs='+',
default=['png'])
parser.add_argument('-d', '--dry', help='dry run, report trim value but not modify files',
action='store_true')
parser.add_argument('-p', '--padding', type=int, help='Extra padding in pixels around crop area',
default=0)
args = parser.parse_args()
extensions = args.ext
extensions = [e.upper() for e in extensions] + [e.lower() for e in extensions]
dryrun = args.dry
folder = os.path.abspath(args.folder)
padding = 0;
print("Working on folder: {}".format(folder))
print("Exts: ",extensions)
print("Padding set to {:d}".format(padding))
image_names = []
for ext in extensions:
image_names = image_names + glob.glob(folder+'/*.'+ext)
images = []
heights = []
widths = []
x1s=[]; y1s=[]; x2s=[]; y2s=[]
for image_name in image_names:
img = cv2.imread(image_name)
images.append(img)
heights.append(img.shape[0])
widths.append(img.shape[1])
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray_img_inv = cv2.bitwise_not(gray_img)
coords = cv2.findNonZero(gray_img_inv) # Find all non-zero points (text)
x, y, w, h = cv2.boundingRect(coords) # Find min
x1s.append(x)
y1s.append(y)
x2s.append(x+w)
y2s.append(y+h)
x1_crop = min(x1s)-padding
x2_crop = max(x2s)+padding
y1_crop = min(y1s)-padding
y2_crop = max(y2s)+padding
assert max(heights) == min(heights)
assert max(widths) == min(widths)
print("Image dimensions: width x height: {:d} x {:d}".format(widths[0], heights[0]))
print("Valid crop limits x:{:d} to {:d}, y:{:d} to {:d}".format(x1_crop,x2_crop,y1_crop,y2_crop))
print("Valid crop numpy indices [rows,cols]:[{:d} : -{:d}, {:d} : -{:d}]".format(y1_crop,heights[0]-y2_crop, x1_crop, widths[0]-x2_crop))
if dryrun:
print("This was a dry run")
else:
for image_name, img in zip(image_names, images):
img_name_base, img_name_ext = os.path.splitext(image_name)
img_name_out = img_name_base + '_trim' + img_name_ext
img_cropped = img[y1_crop:y2_crop, x1_crop:x2_crop]
if img_name_ext == '.png' or img_name_ext == '.PNG':
cv2.imwrite(img_name_out, img_cropped, [cv2.IMWRITE_PNG_COMPRESSION, 9]) | trim_whitespaces_in_folder.py | from __future__ import print_function
import cv2
import glob
import os
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='This is the simple VSLAM pipeline')
parser.add_argument('-f', '--folder', help='folder where all the images are',
default=os.path.curdir)
parser.add_argument('-e', '--ext', help='extensions of image files', nargs='+',
default=['png'])
parser.add_argument('-d', '--dry', help='dry run, report trim value but not modify files',
action='store_true')
parser.add_argument('-p', '--padding', type=int, help='Extra padding in pixels around crop area',
default=0)
args = parser.parse_args()
extensions = args.ext
extensions = [e.upper() for e in extensions] + [e.lower() for e in extensions]
dryrun = args.dry
folder = os.path.abspath(args.folder)
padding = 0;
print("Working on folder: {}".format(folder))
print("Exts: ",extensions)
print("Padding set to {:d}".format(padding))
image_names = []
for ext in extensions:
image_names = image_names + glob.glob(folder+'/*.'+ext)
images = []
heights = []
widths = []
x1s=[]; y1s=[]; x2s=[]; y2s=[]
for image_name in image_names:
img = cv2.imread(image_name)
images.append(img)
heights.append(img.shape[0])
widths.append(img.shape[1])
gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
gray_img_inv = cv2.bitwise_not(gray_img)
coords = cv2.findNonZero(gray_img_inv) # Find all non-zero points (text)
x, y, w, h = cv2.boundingRect(coords) # Find min
x1s.append(x)
y1s.append(y)
x2s.append(x+w)
y2s.append(y+h)
x1_crop = min(x1s)-padding
x2_crop = max(x2s)+padding
y1_crop = min(y1s)-padding
y2_crop = max(y2s)+padding
assert max(heights) == min(heights)
assert max(widths) == min(widths)
print("Image dimensions: width x height: {:d} x {:d}".format(widths[0], heights[0]))
print("Valid crop limits x:{:d} to {:d}, y:{:d} to {:d}".format(x1_crop,x2_crop,y1_crop,y2_crop))
print("Valid crop numpy indices [rows,cols]:[{:d} : -{:d}, {:d} : -{:d}]".format(y1_crop,heights[0]-y2_crop, x1_crop, widths[0]-x2_crop))
if dryrun:
print("This was a dry run")
else:
for image_name, img in zip(image_names, images):
img_name_base, img_name_ext = os.path.splitext(image_name)
img_name_out = img_name_base + '_trim' + img_name_ext
img_cropped = img[y1_crop:y2_crop, x1_crop:x2_crop]
if img_name_ext == '.png' or img_name_ext == '.PNG':
cv2.imwrite(img_name_out, img_cropped, [cv2.IMWRITE_PNG_COMPRESSION, 9]) | 0.449393 | 0.265012 |
import scrapy
from futhead.items import FutheadItem
class FutheadSpider(scrapy.Spider):
name = "futhead"
allowed_domains = ["futhead.com"]
max_pages = 50
def start_requests(self):
for i in range(self.max_pages):
yield scrapy.Request('http://www.futhead.com/17/players/?page=%d&level=all_nif&bin_platform=ps' % i, callback=self.parse)
def parse(self, response):
self.logger.info('Visited on catalogues page %s', response.url)
players_page_link = response.xpath('//div[contains(@class, "content")]/a[contains(@class, "display-block")]/@href').extract()
for link in players_page_link:
self.logger.debug('The items link is %s' % (link, ))
url = response.urljoin(link)
self.logger.debug('The players page absolute url is %s' % (url))
yield scrapy.Request(url, callback=self.parse_players_page_content)
# try:
# next_page = response.xpath('//span[contains(@class, "font-12 font-bold margin-l-r-10")]/following::a/@href').extract()[0]
# next_page_url = response.urljoin(next_page)
# self.logger.debug('This is next players page absolute url %s' % (next_page_url))
# yield scrapy.Request(next_page_url, self.parse)
# except:
# pass
def parse_players_page_content(self, response):
self.logger.info('Players page parse function on %s' % (response.url, ))
item = FutheadItem()
item['Name'] = response.xpath('//div[@class="playercard-name"]/text()').extract()[0]
item['Club'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[0]
item['League'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[1]
item['Nation'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[2]
item['SkillMoves'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[0]
item['WeakFoot'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[1]
item['StrongFoot'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[2]
item['Age'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[3]
item['Height'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[4]
item['Workrates'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[5]
item['Pace'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[0]
item['Shooting'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[1]
item['Passing'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[2]
item['Dribbling'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[3]
item['Defending'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[4]
item['Physical'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[5]
item['AttackerRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[0]
item['CreatorRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[1]
item['DefenderRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[2]
item['BeastRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[3]
item['HeadingRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[4]
item['TotalStats'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[5]
item['TotalLigs'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[6]
item['Rating'] = response.xpath('//div[@class = "playercard-rating"]/text()').extract()
item['Position'] = response.xpath('//div[@class = "playercard-position"]/text()').extract()
yield item | Scrapy/futhead/futhead/spiders/futhead_spider.py | import scrapy
from futhead.items import FutheadItem
class FutheadSpider(scrapy.Spider):
name = "futhead"
allowed_domains = ["futhead.com"]
max_pages = 50
def start_requests(self):
for i in range(self.max_pages):
yield scrapy.Request('http://www.futhead.com/17/players/?page=%d&level=all_nif&bin_platform=ps' % i, callback=self.parse)
def parse(self, response):
self.logger.info('Visited on catalogues page %s', response.url)
players_page_link = response.xpath('//div[contains(@class, "content")]/a[contains(@class, "display-block")]/@href').extract()
for link in players_page_link:
self.logger.debug('The items link is %s' % (link, ))
url = response.urljoin(link)
self.logger.debug('The players page absolute url is %s' % (url))
yield scrapy.Request(url, callback=self.parse_players_page_content)
# try:
# next_page = response.xpath('//span[contains(@class, "font-12 font-bold margin-l-r-10")]/following::a/@href').extract()[0]
# next_page_url = response.urljoin(next_page)
# self.logger.debug('This is next players page absolute url %s' % (next_page_url))
# yield scrapy.Request(next_page_url, self.parse)
# except:
# pass
def parse_players_page_content(self, response):
self.logger.info('Players page parse function on %s' % (response.url, ))
item = FutheadItem()
item['Name'] = response.xpath('//div[@class="playercard-name"]/text()').extract()[0]
item['Club'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[0]
item['League'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[1]
item['Nation'] = response.xpath('//a[@class="futhead-link"]/text()').extract()[2]
item['SkillMoves'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[0]
item['WeakFoot'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[1]
item['StrongFoot'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[2]
item['Age'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[3]
item['Height'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[4]
item['Workrates'] = map(unicode.strip, response.xpath('//div[contains(@class, "col-xs-5 player-sidebar-value")]/text()[normalize-space()]').extract())[5]
item['Pace'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[0]
item['Shooting'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[1]
item['Passing'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[2]
item['Dribbling'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[3]
item['Defending'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[4]
item['Physical'] = response.xpath('//span[@class="chembot-delta"]/@data-chembot-base').extract()[5]
item['AttackerRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[0]
item['CreatorRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[1]
item['DefenderRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[2]
item['BeastRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[3]
item['HeadingRating'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[4]
item['TotalStats'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[5]
item['TotalLigs'] = response.xpath('//span[contains(@class, "player-stat-chembot chembot-delta")]/@data-chembot-base').extract()[6]
item['Rating'] = response.xpath('//div[@class = "playercard-rating"]/text()').extract()
item['Position'] = response.xpath('//div[@class = "playercard-position"]/text()').extract()
yield item | 0.240507 | 0.062617 |
import os
import shutil
from tqdm import tqdm
from luigi.parameter import Parameter
from luigi import Task, ExternalTask, Event
import pandas as pd
import dask.dataframe as dd
from oandapyV20 import API
import oandapyV20.endpoints.transactions as transactions
from helperfiles.task import TargetOutput, Requirement, Requires
from helperfiles.target import ParquetTarget
from contextlib import suppress
"""
Functionality to download trading history
May be run like this: luigi --module tradinghistory GetTradingHistory --local-scheduler
or thru cli.py
Will download history from the account specified in .env
"""
class MoveToArchieve(Task):
"""This moves existing trading history into an archive folder.
This is done so exisitng history, if existing may be read in and used.
Hence, for transactions that are existing locally does not get read again.
Then everything is written into the trading_history folder to ensure
true atomic write"""
local_location = os.getenv("local_location")
if local_location == None:
local_location = "data/"
if ParquetTarget(local_location + "trading_history/").exists() == False:
def complete(self):
return True
if ParquetTarget(local_location + "trading_history/").exists():
output = TargetOutput(local_location + "archive/", target_class=ParquetTarget)
trading_history = ParquetTarget(local_location + "trading_history/")
def run(self):
dsk = self.trading_history.read_dask()
self.output().write(dsk, write_metadata_file=True, compression="gzip")
class env_workaround:
# Fix required for Travis CI
def return_env(self, value):
value = os.getenv(value)
if value == None:
value = "not_availiable"
return value
class S3(ExternalTask):
# If -storage s3 is selected; everything gets stored at AWS S3 as backup
output = TargetOutput(
env_workaround().return_env("S3_location") + "tradinghistory/",
target_class=ParquetTarget,
)
class DownloadS3(ExternalTask):
# Downloading from s3 if history exist there
requires = Requires()
other = Requirement(S3)
# Set output location
output = TargetOutput(
env_workaround().return_env("local_location") + "archive/",
target_class=ParquetTarget,
)
def run(self):
input_target = next(iter(self.input().items()))[1]
dsk = input_target.read()
self.output().write(dsk)
class GetTradingHistory(Task):
"""
The task that does the heavy lifting.
Ensures to reuse existing history and append new.
:param storage: s3 if you want backup to AWS s3
:param max_transactions: The id of the latest event you want downloaded
The account / token we have provided you with, contains about
385 000 events. So a clean download will take some time, unless
this is set"""
storage = Parameter(default="")
max_transactions = Parameter(default=0)
client = API(
access_token=env_workaround().return_env("TOKEN"), environment="practice"
)
if ParquetTarget(
env_workaround().return_env("local_location") + "archive/"
).exists():
with suppress(FileNotFoundError):
shutil.rmtree(
env_workaround().return_env("local_location") + "archive/",
ignore_errors=True,
)
def requires(self):
if self.storage == "s3":
if ParquetTarget(
env_workaround().return_env("S3_location") + "tradinghistory/"
).exists():
return [DownloadS3()]
return MoveToArchieve()
output = TargetOutput(
"./" + env_workaround().return_env("local_location") + "archive/",
target_class=ParquetTarget,
)
store = TargetOutput(
"./" + env_workaround().return_env("local_location") + "trading_history/",
target_class=ParquetTarget,
)
s3store = TargetOutput(
env_workaround().return_env("S3_location") + "tradinghistory/",
target_class=ParquetTarget,
)
def gettransaction(self, first, last):
trans = transactions.TransactionIDRange(
accountID=env_workaround().return_env("ACCOUNT_ID"),
params={"from": first, "to": last},
)
trans = self.client.request(trans)
return trans
def run(self):
# last_trans is the latest transaction on the server
# May be overridden with max_transactions above
last_trans = int(self.gettransaction(1, 2)["lastTransactionID"])
pbar = tqdm(last_trans)
if ParquetTarget(
"./" + env_workaround().return_env("local_location") + "archive/"
).exists():
dsk = dd.read_parquet(
"./"
+ env_workaround().return_env("local_location")
+ "archive/*.parquet"
)
if self.max_transactions != 0:
last_trans = self.max_transactions
else:
# If no local copy exist
trans_df = self.gettransaction(1, 1000)
df = pd.DataFrame(trans_df["transactions"])
dsk = dd.from_pandas(df, chunksize=10000)
if self.max_transactions != 0:
last_trans = self.max_transactions
while int(dsk["id"].astype("int64").max().compute()) < last_trans:
last_recorded = int(dsk["id"].astype("int64").max().compute())
print(" - Reading history until id: {}".format(last_recorded))
trans_df = self.gettransaction(last_recorded, last_recorded + 999)
df = pd.DataFrame(trans_df["transactions"])
df = df.drop(
columns=[
"takeProfitOnFill",
"fullPrice",
"tradeOpened",
"positionFinancings",
"tradeReduced",
"tradesClosed",
"openTradeDividendAdjustments",
],
errors="ignore",
)
dsk = dd.concat([dsk, df])
# The API only allows for 1000 events per request. Hence a progress bar
pbar.update(1000)
dsk = dsk.drop(
columns=[
"takeProfitOnFill",
"fullPrice",
"tradeOpened",
"positionFinancings",
"tradeReduced",
"tradesClosed",
"openTradeDividendAdjustments",
"shortPositionCloseout",
],
errors="ignore",
)
self.store().write(dsk, write_metadata_file=True, compression="gzip")
if self.storage == "s3":
self.s3store().write(dsk)
print("Finished writing to S3") | tools/tradinghistory.py | import os
import shutil
from tqdm import tqdm
from luigi.parameter import Parameter
from luigi import Task, ExternalTask, Event
import pandas as pd
import dask.dataframe as dd
from oandapyV20 import API
import oandapyV20.endpoints.transactions as transactions
from helperfiles.task import TargetOutput, Requirement, Requires
from helperfiles.target import ParquetTarget
from contextlib import suppress
"""
Functionality to download trading history
May be run like this: luigi --module tradinghistory GetTradingHistory --local-scheduler
or thru cli.py
Will download history from the account specified in .env
"""
class MoveToArchieve(Task):
"""This moves existing trading history into an archive folder.
This is done so exisitng history, if existing may be read in and used.
Hence, for transactions that are existing locally does not get read again.
Then everything is written into the trading_history folder to ensure
true atomic write"""
local_location = os.getenv("local_location")
if local_location == None:
local_location = "data/"
if ParquetTarget(local_location + "trading_history/").exists() == False:
def complete(self):
return True
if ParquetTarget(local_location + "trading_history/").exists():
output = TargetOutput(local_location + "archive/", target_class=ParquetTarget)
trading_history = ParquetTarget(local_location + "trading_history/")
def run(self):
dsk = self.trading_history.read_dask()
self.output().write(dsk, write_metadata_file=True, compression="gzip")
class env_workaround:
# Fix required for Travis CI
def return_env(self, value):
value = os.getenv(value)
if value == None:
value = "not_availiable"
return value
class S3(ExternalTask):
# If -storage s3 is selected; everything gets stored at AWS S3 as backup
output = TargetOutput(
env_workaround().return_env("S3_location") + "tradinghistory/",
target_class=ParquetTarget,
)
class DownloadS3(ExternalTask):
# Downloading from s3 if history exist there
requires = Requires()
other = Requirement(S3)
# Set output location
output = TargetOutput(
env_workaround().return_env("local_location") + "archive/",
target_class=ParquetTarget,
)
def run(self):
input_target = next(iter(self.input().items()))[1]
dsk = input_target.read()
self.output().write(dsk)
class GetTradingHistory(Task):
"""
The task that does the heavy lifting.
Ensures to reuse existing history and append new.
:param storage: s3 if you want backup to AWS s3
:param max_transactions: The id of the latest event you want downloaded
The account / token we have provided you with, contains about
385 000 events. So a clean download will take some time, unless
this is set"""
storage = Parameter(default="")
max_transactions = Parameter(default=0)
client = API(
access_token=env_workaround().return_env("TOKEN"), environment="practice"
)
if ParquetTarget(
env_workaround().return_env("local_location") + "archive/"
).exists():
with suppress(FileNotFoundError):
shutil.rmtree(
env_workaround().return_env("local_location") + "archive/",
ignore_errors=True,
)
def requires(self):
if self.storage == "s3":
if ParquetTarget(
env_workaround().return_env("S3_location") + "tradinghistory/"
).exists():
return [DownloadS3()]
return MoveToArchieve()
output = TargetOutput(
"./" + env_workaround().return_env("local_location") + "archive/",
target_class=ParquetTarget,
)
store = TargetOutput(
"./" + env_workaround().return_env("local_location") + "trading_history/",
target_class=ParquetTarget,
)
s3store = TargetOutput(
env_workaround().return_env("S3_location") + "tradinghistory/",
target_class=ParquetTarget,
)
def gettransaction(self, first, last):
trans = transactions.TransactionIDRange(
accountID=env_workaround().return_env("ACCOUNT_ID"),
params={"from": first, "to": last},
)
trans = self.client.request(trans)
return trans
def run(self):
# last_trans is the latest transaction on the server
# May be overridden with max_transactions above
last_trans = int(self.gettransaction(1, 2)["lastTransactionID"])
pbar = tqdm(last_trans)
if ParquetTarget(
"./" + env_workaround().return_env("local_location") + "archive/"
).exists():
dsk = dd.read_parquet(
"./"
+ env_workaround().return_env("local_location")
+ "archive/*.parquet"
)
if self.max_transactions != 0:
last_trans = self.max_transactions
else:
# If no local copy exist
trans_df = self.gettransaction(1, 1000)
df = pd.DataFrame(trans_df["transactions"])
dsk = dd.from_pandas(df, chunksize=10000)
if self.max_transactions != 0:
last_trans = self.max_transactions
while int(dsk["id"].astype("int64").max().compute()) < last_trans:
last_recorded = int(dsk["id"].astype("int64").max().compute())
print(" - Reading history until id: {}".format(last_recorded))
trans_df = self.gettransaction(last_recorded, last_recorded + 999)
df = pd.DataFrame(trans_df["transactions"])
df = df.drop(
columns=[
"takeProfitOnFill",
"fullPrice",
"tradeOpened",
"positionFinancings",
"tradeReduced",
"tradesClosed",
"openTradeDividendAdjustments",
],
errors="ignore",
)
dsk = dd.concat([dsk, df])
# The API only allows for 1000 events per request. Hence a progress bar
pbar.update(1000)
dsk = dsk.drop(
columns=[
"takeProfitOnFill",
"fullPrice",
"tradeOpened",
"positionFinancings",
"tradeReduced",
"tradesClosed",
"openTradeDividendAdjustments",
"shortPositionCloseout",
],
errors="ignore",
)
self.store().write(dsk, write_metadata_file=True, compression="gzip")
if self.storage == "s3":
self.s3store().write(dsk)
print("Finished writing to S3") | 0.595845 | 0.226923 |
import json
import zlib
import sys
import os
try:
from server_common.channel_access import ChannelAccess as ca
except ImportError:
sys.path.append(os.path.join(os.path.dirname(sys.path[0]))) # to allow server common from dir below
from server_common.channel_access import ChannelAccess as ca
def compress_and_hex(value):
compr = zlib.compress(value)
return compr.encode('hex')
def dehex_and_decompress(value):
return zlib.decompress(value.decode("hex"))
def set_env():
epics_ca_addr_list = "EPICS_CA_ADDR_LIST"
""" If we're not in an EPICS terminal, add the address list to the set of
environment keys """
if not epics_ca_addr_list in os.environ.keys():
os.environ[epics_ca_addr_list] = "127.255.255.255 172.16.58.3"
print(epics_ca_addr_list + " = " + str(os.environ.get(epics_ca_addr_list)))
def inst_dictionary(instrument_name, hostname_prefix="NDX", hostname=None, pv_prefix=None, is_scheduled=True):
"""
Generate the instrument dictionary for the instrument list
Args:
instrument_name: instrument name
hostname_prefix: prefix for hostname (defaults to NDX)
hostname: whole host name overrides prefix, defaults to hostname_prefix + instrument name
pv_prefix: the pv prefeix; default to IN:instrument_name
is_scheduled: whether the instrument has scheduled users and so should have user details written to it; default to True
Returns: dictionary for instrument
"""
if hostname is not None:
hostname_to_use = hostname
else:
hostname_to_use = hostname_prefix + instrument_name
if pv_prefix is not None:
pv_prefix_to_use = pv_prefix
else:
pv_prefix_to_use = "IN:{0}:".format(instrument_name)
return {"name": instrument_name,
"hostName": hostname_to_use,
"pvPrefix": pv_prefix_to_use,
"isScheduled": is_scheduled}
if __name__ == "__main__":
set_env()
# The PV address list
pv_address = "CS:INSTLIST"
# instrument list values to set (uses utility to return the dictionary but you can use a dictionary directly)
instruments_list = [
inst_dictionary("LARMOR"),
inst_dictionary("ALF"),
inst_dictionary("DEMO", is_scheduled=False),
inst_dictionary("IMAT"),
inst_dictionary("MUONFE", hostname_prefix="NDE", is_scheduled=False),
inst_dictionary("ZOOM"),
inst_dictionary("IRIS"),
inst_dictionary("IRIS_SETUP", pv_prefix="IN:IRIS_S29:", is_scheduled=False),
inst_dictionary("ENGINX_SETUP", pv_prefix="IN:ENGINX49:", is_scheduled=False),
inst_dictionary("HRPD"),
inst_dictionary("POLARIS"),
inst_dictionary("VESUVIO"),
inst_dictionary("ENGINX"),
inst_dictionary("MERLIN"),
inst_dictionary("RIKENFE", is_scheduled=False),
inst_dictionary("SELAB", is_scheduled=False),
inst_dictionary("EMMA-A", is_scheduled=False),
inst_dictionary("SANDALS"),
inst_dictionary("GEM"),
inst_dictionary("MAPS"),
inst_dictionary("OSIRIS"),
inst_dictionary("INES"),
inst_dictionary("TOSCA"),
inst_dictionary("LOQ"),
inst_dictionary("LET"),
inst_dictionary("MARI"),
inst_dictionary("CRISP"),
inst_dictionary("SOFTMAT", is_scheduled=False),
inst_dictionary("SURF"),
inst_dictionary("NIMROD"),
inst_dictionary("DETMON", hostname_prefix="NDA", is_scheduled=False, pv_prefix="TE:NDADETF1:"),
inst_dictionary("EMU"),
inst_dictionary("INTER"),
inst_dictionary("POLREF"),
inst_dictionary("SANS2D"),
]
new_value = json.dumps(instruments_list)
new_value_compressed = compress_and_hex(new_value)
ca.caput(pv_address, str(new_value_compressed), True)
result_compr = ca.caget(pv_address, True)
result = dehex_and_decompress(result_compr)
print(result)
if result != new_value:
print("Warning! Entered value does not match new value.")
print("Entered value: " + new_value)
print("Actual value: " + result)
else:
print("Success! The PV now reads: {0}".format(result)) | scripts/set_instrument_list.py |
import json
import zlib
import sys
import os
try:
from server_common.channel_access import ChannelAccess as ca
except ImportError:
sys.path.append(os.path.join(os.path.dirname(sys.path[0]))) # to allow server common from dir below
from server_common.channel_access import ChannelAccess as ca
def compress_and_hex(value):
compr = zlib.compress(value)
return compr.encode('hex')
def dehex_and_decompress(value):
return zlib.decompress(value.decode("hex"))
def set_env():
epics_ca_addr_list = "EPICS_CA_ADDR_LIST"
""" If we're not in an EPICS terminal, add the address list to the set of
environment keys """
if not epics_ca_addr_list in os.environ.keys():
os.environ[epics_ca_addr_list] = "127.255.255.255 172.16.58.3"
print(epics_ca_addr_list + " = " + str(os.environ.get(epics_ca_addr_list)))
def inst_dictionary(instrument_name, hostname_prefix="NDX", hostname=None, pv_prefix=None, is_scheduled=True):
"""
Generate the instrument dictionary for the instrument list
Args:
instrument_name: instrument name
hostname_prefix: prefix for hostname (defaults to NDX)
hostname: whole host name overrides prefix, defaults to hostname_prefix + instrument name
pv_prefix: the pv prefeix; default to IN:instrument_name
is_scheduled: whether the instrument has scheduled users and so should have user details written to it; default to True
Returns: dictionary for instrument
"""
if hostname is not None:
hostname_to_use = hostname
else:
hostname_to_use = hostname_prefix + instrument_name
if pv_prefix is not None:
pv_prefix_to_use = pv_prefix
else:
pv_prefix_to_use = "IN:{0}:".format(instrument_name)
return {"name": instrument_name,
"hostName": hostname_to_use,
"pvPrefix": pv_prefix_to_use,
"isScheduled": is_scheduled}
if __name__ == "__main__":
set_env()
# The PV address list
pv_address = "CS:INSTLIST"
# instrument list values to set (uses utility to return the dictionary but you can use a dictionary directly)
instruments_list = [
inst_dictionary("LARMOR"),
inst_dictionary("ALF"),
inst_dictionary("DEMO", is_scheduled=False),
inst_dictionary("IMAT"),
inst_dictionary("MUONFE", hostname_prefix="NDE", is_scheduled=False),
inst_dictionary("ZOOM"),
inst_dictionary("IRIS"),
inst_dictionary("IRIS_SETUP", pv_prefix="IN:IRIS_S29:", is_scheduled=False),
inst_dictionary("ENGINX_SETUP", pv_prefix="IN:ENGINX49:", is_scheduled=False),
inst_dictionary("HRPD"),
inst_dictionary("POLARIS"),
inst_dictionary("VESUVIO"),
inst_dictionary("ENGINX"),
inst_dictionary("MERLIN"),
inst_dictionary("RIKENFE", is_scheduled=False),
inst_dictionary("SELAB", is_scheduled=False),
inst_dictionary("EMMA-A", is_scheduled=False),
inst_dictionary("SANDALS"),
inst_dictionary("GEM"),
inst_dictionary("MAPS"),
inst_dictionary("OSIRIS"),
inst_dictionary("INES"),
inst_dictionary("TOSCA"),
inst_dictionary("LOQ"),
inst_dictionary("LET"),
inst_dictionary("MARI"),
inst_dictionary("CRISP"),
inst_dictionary("SOFTMAT", is_scheduled=False),
inst_dictionary("SURF"),
inst_dictionary("NIMROD"),
inst_dictionary("DETMON", hostname_prefix="NDA", is_scheduled=False, pv_prefix="TE:NDADETF1:"),
inst_dictionary("EMU"),
inst_dictionary("INTER"),
inst_dictionary("POLREF"),
inst_dictionary("SANS2D"),
]
new_value = json.dumps(instruments_list)
new_value_compressed = compress_and_hex(new_value)
ca.caput(pv_address, str(new_value_compressed), True)
result_compr = ca.caget(pv_address, True)
result = dehex_and_decompress(result_compr)
print(result)
if result != new_value:
print("Warning! Entered value does not match new value.")
print("Entered value: " + new_value)
print("Actual value: " + result)
else:
print("Success! The PV now reads: {0}".format(result)) | 0.437583 | 0.164449 |
from django.db import models
class Organisation(models.Model):
"""
Table: Organisation
Comment: The place to store all the Organization
"""
org_id = models.BigIntegerField(null=False, unique=True)
name = models.CharField(max_length=200, null=False, unique=True)
created_at = models.DateField()
location = models.CharField(max_length=20, default='')
expired_contract = models.BooleanField(default=False)
country = models.CharField(max_length=100, null=False)
recently_added = models.BooleanField(default=True)
archived = models.BooleanField(default=True)
archived_date = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Ticket(models.Model):
"""
Table: Tickets
Comments: The placeholder for all the tickets.
"""
ticket_id = models.BigIntegerField(null=False, unique=True)
org_id = models.ForeignKey(Organisation, related_name='tickets_org', on_delete=models.CASCADE)
created = models.DateTimeField(null=False)
updated = models.DateTimeField(null=False)
priority = models.CharField(max_length=50, null=False)
status = models.CharField(max_length=15)
submitter = models.CharField(max_length=50, null=False)
assignee = models.CharField(max_length=50, null=False)
product = models.CharField(max_length=100)
kb = models.CharField(max_length=100)
resolution_code = models.CharField(max_length=100)
defect = models.CharField(max_length=100)
product_version = models.CharField(max_length=100)
product_component = models.CharField(max_length=100)
product_component_category = models.CharField(max_length=50)
iaas = models.CharField(max_length=100)
environment = models.CharField(max_length=50)
type = models.CharField(max_length=50)
def __str__(self):
return str(self.ticket_id)
class LastOrgTicketLoaderRun(models.Model):
"""
Table: LastTicketLoadRun
Comments: Check when was the last used search query used to extract the data.
"""
org_id = models.ForeignKey(Organisation, related_name='last_load_org_tickets', on_delete=models.CASCADE)
last = models.DateTimeField(auto_now=True, null=False)
success = models.BooleanField()
runtime = models.DateTimeField(auto_now=True, null=False)
def __str__(self):
return str(self.org_id)
class TicketAttribute(models.Model):
"""
Table: TicketAttribute
Comment: The place to store all the Hot Tickets
"""
ticket_id = models.BigIntegerField(null=False, unique=True)
org_id = models.ForeignKey(Organisation, related_name='hot_ticket_org', on_delete=models.CASCADE)
hot = models.BooleanField(default=False)
patch = models.BooleanField(default=False)
def __str__(self):
return str(self.ticket_id)
class TicketNote(models.Model):
"""
Table: TicketNotes
Comment: The place to store all the ticketNotes
"""
ticket_id = models.BigIntegerField(null=False, db_index=True)
org_id = models.ForeignKey(Organisation, related_name='org', on_delete=models.CASCADE)
author = models.CharField(max_length=50, null=False, default="Unknown")
created = models.DateTimeField(auto_now=True, null=False)
notes = models.CharField(max_length=500)
def __str__(self):
return str(self.ticket_id) | zendesk/models.py | from django.db import models
class Organisation(models.Model):
"""
Table: Organisation
Comment: The place to store all the Organization
"""
org_id = models.BigIntegerField(null=False, unique=True)
name = models.CharField(max_length=200, null=False, unique=True)
created_at = models.DateField()
location = models.CharField(max_length=20, default='')
expired_contract = models.BooleanField(default=False)
country = models.CharField(max_length=100, null=False)
recently_added = models.BooleanField(default=True)
archived = models.BooleanField(default=True)
archived_date = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class Ticket(models.Model):
"""
Table: Tickets
Comments: The placeholder for all the tickets.
"""
ticket_id = models.BigIntegerField(null=False, unique=True)
org_id = models.ForeignKey(Organisation, related_name='tickets_org', on_delete=models.CASCADE)
created = models.DateTimeField(null=False)
updated = models.DateTimeField(null=False)
priority = models.CharField(max_length=50, null=False)
status = models.CharField(max_length=15)
submitter = models.CharField(max_length=50, null=False)
assignee = models.CharField(max_length=50, null=False)
product = models.CharField(max_length=100)
kb = models.CharField(max_length=100)
resolution_code = models.CharField(max_length=100)
defect = models.CharField(max_length=100)
product_version = models.CharField(max_length=100)
product_component = models.CharField(max_length=100)
product_component_category = models.CharField(max_length=50)
iaas = models.CharField(max_length=100)
environment = models.CharField(max_length=50)
type = models.CharField(max_length=50)
def __str__(self):
return str(self.ticket_id)
class LastOrgTicketLoaderRun(models.Model):
"""
Table: LastTicketLoadRun
Comments: Check when was the last used search query used to extract the data.
"""
org_id = models.ForeignKey(Organisation, related_name='last_load_org_tickets', on_delete=models.CASCADE)
last = models.DateTimeField(auto_now=True, null=False)
success = models.BooleanField()
runtime = models.DateTimeField(auto_now=True, null=False)
def __str__(self):
return str(self.org_id)
class TicketAttribute(models.Model):
"""
Table: TicketAttribute
Comment: The place to store all the Hot Tickets
"""
ticket_id = models.BigIntegerField(null=False, unique=True)
org_id = models.ForeignKey(Organisation, related_name='hot_ticket_org', on_delete=models.CASCADE)
hot = models.BooleanField(default=False)
patch = models.BooleanField(default=False)
def __str__(self):
return str(self.ticket_id)
class TicketNote(models.Model):
"""
Table: TicketNotes
Comment: The place to store all the ticketNotes
"""
ticket_id = models.BigIntegerField(null=False, db_index=True)
org_id = models.ForeignKey(Organisation, related_name='org', on_delete=models.CASCADE)
author = models.CharField(max_length=50, null=False, default="Unknown")
created = models.DateTimeField(auto_now=True, null=False)
notes = models.CharField(max_length=500)
def __str__(self):
return str(self.ticket_id) | 0.727879 | 0.221793 |
from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
import random
import itertools
import torch
from torchvision.utils import make_grid
from tensorboardX import SummaryWriter
from data.utils import get_coord
from tools.utils import create_colormap, color_transfer, color_spread
class Logger():
def __init__(self, opt):
self.writer = SummaryWriter(opt.log_path)
self.colormap = create_colormap(opt)
self.num_semantics = opt.num_semantics
self.semantic_labels = opt.semantic_labels
self.num_things = opt.num_things
self.things_idx = opt.things_idx
self.x_coord, self.y_coord = get_coord(opt.max_dim, opt.aspect_ratio)
self.log_path = opt.log_path
self.eval_idx = opt.eval_idx
def is_empty(self, tensors):
for tensor in tensors:
if 0 in tensor.size():
return True
return False
def log_img(self, name, tensor, nrow, global_iteration, normalize=False, range=None, pad_value=0):
if self.is_empty([tensor]):
return
with torch.no_grad():
grid = make_grid(tensor, nrow=nrow, normalize=normalize, range=range, pad_value=pad_value)
self.writer.add_image(name, grid, global_iteration)
def log_scalar(self, name, scalar, global_iteration):
if scalar is not None:
if type(scalar) == list:
for i, x in enumerate(scalar):
self.log_scalar(f"{name}_{i}", x, global_iteration)
else:
self.writer.add_scalar(name, scalar, global_iteration)
def log_spread(self, name, spread, nrow, global_iteration, max_spread=5):
if self.is_empty([spread]):
return
with torch.no_grad():
im_spread_rgb = color_spread(spread, max_spread)
self.log_img(name + f"-max{max_spread}", im_spread_rgb, nrow, global_iteration)
def log_semantic_mask(self, name, semantic_mask, sem_cond, mask_num, nrow, global_iteration, thresh=0.001):
if self.is_empty([semantic_mask, sem_cond]):
return
with torch.no_grad():
semantic_mask = semantic_mask[0]
sem_cond = sem_cond[0]
possible_idx = list((sem_cond != 0).nonzero().flatten().numpy())
num = min(len(possible_idx), mask_num)
idx = sorted(random.sample(possible_idx, num))
semantic_mask = semantic_mask[idx]
semantic_mask = semantic_mask.view(num, 1, *semantic_mask.size()[1:])
masks_max = torch.max(semantic_mask.view(num, 1, -1), dim=2)[0].view(num, 1, 1, 1)
semantic_mask /= masks_max
colors = torch.FloatTensor(num, 1, *semantic_mask.size()[2:]).zero_()
for i in range(num):
colors[i] = float(idx[i])
colors = color_transfer(colors, self.colormap).cpu() / 2 + 0.5
colors = colors * semantic_mask
cell = max(semantic_mask.size(2) // 60, 1)
background = torch.zeros(semantic_mask.size(2), semantic_mask.size(3))
for i in range(cell):
background[i::2 * cell] += 1
background[:, i::2 * cell] += 1
background = (background % 2) * 0.5 + 0.5
background = background.repeat(3, 1, 1)
for i in range(semantic_mask.size(0)):
bg_mask = (semantic_mask[i] < thresh).repeat(3, 1, 1)
colors[i][bg_mask] = background[bg_mask]
self.log_img(name + f"-thresh{thresh}", colors, nrow, global_iteration)
def log_semantic_seg(self, name, seg_mc, nrow, global_iteration):
if self.is_empty([seg_mc]):
return
with torch.no_grad():
seg = seg_mc if seg_mc.size(1) == 1 else seg_mc.max(1, keepdim=True)[1]
seg[seg > self.num_semantics - 1] = -1
seg = color_transfer(seg, self.colormap)
self.log_img(name, seg, nrow, global_iteration, normalize=True, range=(-1, 1))
def log_cond_distrib(self, name, real_cond, fake_cond, nrow, ncol, global_iteration, width=0.5):
if self.is_empty([real_cond, fake_cond]):
return
with torch.no_grad():
num = fake_cond.size(0)
fake_cond = fake_cond.detach().numpy()
real_cond = real_cond.detach().numpy()
x = np.array(range(fake_cond.shape[1]))
fig, axes = plt.subplots(nrows=nrow, ncols=ncol, figsize=(7, 6))
for i, ax in enumerate(axes.flat):
if i < num:
ax.bar(x - width / 2, real_cond[i], width, label='real')
ax.bar(x + width / 2, fake_cond[i], width, label='fake')
if i == num - 1:
ax.legend()
fig.tight_layout()
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_ins_center(self, name, ins_center, nrow, global_iteration):
if self.is_empty([ins_center]):
return
with torch.no_grad():
ins_center = torch.clamp(ins_center, max=1)
self.log_img(name, ins_center, nrow, global_iteration, pad_value=1)
def log_instance(self, name, seg_mc, center_mask, ins_offset, nrow, global_iteration):
if self.is_empty([seg_mc, center_mask, ins_offset]):
return
with torch.no_grad():
height, width = seg_mc.shape[2:]
seg_mc_one_hot = torch.zeros_like(seg_mc).scatter_(1, seg_mc.max(dim=1, keepdim=True)[1], 1.0)
instance_colors = torch.zeros((seg_mc.shape[0], *seg_mc.shape[2:], 3))
scaled_x_coord = self.x_coord[:height, :width] / height
scaled_y_coord = self.y_coord[:height, :width] / height
shifted_x_coord = ins_offset[:, 0] + scaled_x_coord.view(1, height, width)
shifted_y_coord = ins_offset[:, 1] + scaled_y_coord.view(1, height, width)
for b in range(seg_mc.shape[0]):
for i, k in enumerate(self.things_idx):
mask = (seg_mc_one_hot[b, k] > 0)
mask_shape = torch.sum(mask.long())
closest_dis = 10000 * torch.ones(mask_shape)
new_colors = torch.ones((mask_shape, 3))
for j, (center_y, center_x) in enumerate(center_mask[b, 0].nonzero()):
dis_x = shifted_x_coord[b][mask] - 1. * center_x / height
dis_y = shifted_y_coord[b][mask] - 1. * center_y / height
squared_dis_to_center = dis_x ** 2 + dis_y ** 2
new_center_mask = (squared_dis_to_center < closest_dis)
closest_dis[new_center_mask] = squared_dis_to_center[new_center_mask]
new_colors[new_center_mask] = 0.2 + 0.6 * torch.rand(3)
instance_colors[b][mask] = new_colors
instance_colors = instance_colors.permute(0, 3, 1, 2)
self.log_img(name, instance_colors, nrow, global_iteration, pad_value=1)
def log_ins_offset(self, name, seg_mc, ins_offset, nrow, global_iteration):
if self.is_empty([seg_mc, ins_offset]):
return
with torch.no_grad():
index = seg_mc.max(dim=1, keepdim=True)[1]
seg_mc = torch.zeros_like(seg_mc).scatter_(1, index, 1.0)
bg = (seg_mc[:,self.things_idx].sum(dim=1) == 0)
angle = (1 + torch.atan2(ins_offset[:, 1], ins_offset[:, 0]) / np.pi) / 2
sat_norm = torch.min(10 * (torch.sqrt(ins_offset[:, 0] ** 2 + ins_offset[:, 1] ** 2)), torch.tensor([1.]))
cmp = cm.get_cmap('hsv', 128)
offset_rgba = cmp(angle.numpy())
offset_rgb = torch.tensor(offset_rgba[:, :, :, :3]).float()
offset_rgb = sat_norm.unsqueeze(-1) * offset_rgb + (1 - sat_norm).unsqueeze(-1) * torch.ones_like(offset_rgb)
offset_rgb[bg] = torch.tensor([0., 0., 0.])
offset_rgb = offset_rgb.permute(0, 3, 1, 2)
self.log_img(name, offset_rgb, nrow, global_iteration, pad_value=1)
def log_ins_density(self, name, ins_density, nrow, global_iteration):
if self.is_empty([ins_density]):
return
with torch.no_grad():
colored_mask = torch.zeros((ins_density.shape[0], *ins_density.shape[2:]))
has_density = torch.sum(ins_density, dim=1) > 0
max_density, idx = torch.max(ins_density, dim=1)
max_max_density = torch.max(max_density.view(ins_density.shape[0], -1), dim=1, keepdim=True)[0].unsqueeze(-1)
max_max_density[max_max_density == 0] = 1
max_density /= max_max_density
colored_mask[has_density] = torch.Tensor(self.things_idx)[idx[has_density]]
colored_mask = color_transfer(colored_mask.unsqueeze(1), self.colormap)
colored_mask = (colored_mask + 1) * max_density.unsqueeze(1) - 1
self.log_img(name, colored_mask, nrow, global_iteration, normalize=True, range=(-1, 1))
def log_confusion_matrix(self, name, confusion_matrix, global_iteration, save=False, eval_only=False):
if self.is_empty([confusion_matrix]):
return
with torch.no_grad():
num_sem_classes = len(self.eval_idx) if eval_only else self.num_semantics
sem_labels = [self.semantic_labels[i] for i in self.eval_idx] if eval_only else self.semantic_labels
cm = confusion_matrix.numpy()
tot_gt = np.sum(cm, axis=1)
tot_gt[tot_gt == 0] = 1
n_cm = (cm.T / tot_gt).T
fig = plt.figure(figsize=(int(0.6 * num_sem_classes), int(0.6 * num_sem_classes)))
ax = fig.add_subplot(111)
cax = ax.matshow(n_cm)
fig.colorbar(cax)
ax.set_aspect('auto')
ax.set_xticks(np.arange(0.0, num_sem_classes, 1.0), minor=False)
ax.set_yticks(np.arange(0.0, num_sem_classes, 1.0), minor=False)
ax.set_xticklabels(sem_labels)
ax.set_yticklabels(sem_labels)
ax.tick_params(axis='both', which='major', labelsize=8)
ax.xaxis.set_ticks_position('bottom')
for tick in ax.get_xticklabels():
tick.set_rotation(45)
plt.xlabel('Predicted')
plt.ylabel('Ground truth')
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, f"{int(cm[i, j])}",
horizontalalignment="center",
color="white" if n_cm[i, j] < 0.5 else "black",
fontsize=6)
plt.tight_layout()
if save:
plt.savefig(f"{self.log_path}/confusion_matrix.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_class_hist(self, name, class_hist, global_iteration, save=False):
if self.is_empty([class_hist]):
return
with torch.no_grad():
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.array(range(len(class_hist)))
ax.bar(x, class_hist)
ax.set_xticks(np.arange(0.0, self.num_semantics, 1.0), minor=False)
ax.set_xticklabels(self.semantic_labels)
for tick in ax.get_xticklabels():
tick.set_rotation(90)
fig.tight_layout()
if save:
plt.savefig(f"{self.log_path}/{name.replace('/', '_')}_{global_iteration}.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_component_weights(self, name, weights, global_iteration, save):
if self.is_empty([weights]):
return
with torch.no_grad():
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.array(range(len(weights)))
ax.bar(x, weights)
fig.tight_layout()
if save:
plt.savefig(f"{self.log_path}/{name.replace('/', '_')}_{global_iteration}.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True) | tools/logger.py | from matplotlib import cm
import matplotlib.pyplot as plt
import numpy as np
import random
import itertools
import torch
from torchvision.utils import make_grid
from tensorboardX import SummaryWriter
from data.utils import get_coord
from tools.utils import create_colormap, color_transfer, color_spread
class Logger():
def __init__(self, opt):
self.writer = SummaryWriter(opt.log_path)
self.colormap = create_colormap(opt)
self.num_semantics = opt.num_semantics
self.semantic_labels = opt.semantic_labels
self.num_things = opt.num_things
self.things_idx = opt.things_idx
self.x_coord, self.y_coord = get_coord(opt.max_dim, opt.aspect_ratio)
self.log_path = opt.log_path
self.eval_idx = opt.eval_idx
def is_empty(self, tensors):
for tensor in tensors:
if 0 in tensor.size():
return True
return False
def log_img(self, name, tensor, nrow, global_iteration, normalize=False, range=None, pad_value=0):
if self.is_empty([tensor]):
return
with torch.no_grad():
grid = make_grid(tensor, nrow=nrow, normalize=normalize, range=range, pad_value=pad_value)
self.writer.add_image(name, grid, global_iteration)
def log_scalar(self, name, scalar, global_iteration):
if scalar is not None:
if type(scalar) == list:
for i, x in enumerate(scalar):
self.log_scalar(f"{name}_{i}", x, global_iteration)
else:
self.writer.add_scalar(name, scalar, global_iteration)
def log_spread(self, name, spread, nrow, global_iteration, max_spread=5):
if self.is_empty([spread]):
return
with torch.no_grad():
im_spread_rgb = color_spread(spread, max_spread)
self.log_img(name + f"-max{max_spread}", im_spread_rgb, nrow, global_iteration)
def log_semantic_mask(self, name, semantic_mask, sem_cond, mask_num, nrow, global_iteration, thresh=0.001):
if self.is_empty([semantic_mask, sem_cond]):
return
with torch.no_grad():
semantic_mask = semantic_mask[0]
sem_cond = sem_cond[0]
possible_idx = list((sem_cond != 0).nonzero().flatten().numpy())
num = min(len(possible_idx), mask_num)
idx = sorted(random.sample(possible_idx, num))
semantic_mask = semantic_mask[idx]
semantic_mask = semantic_mask.view(num, 1, *semantic_mask.size()[1:])
masks_max = torch.max(semantic_mask.view(num, 1, -1), dim=2)[0].view(num, 1, 1, 1)
semantic_mask /= masks_max
colors = torch.FloatTensor(num, 1, *semantic_mask.size()[2:]).zero_()
for i in range(num):
colors[i] = float(idx[i])
colors = color_transfer(colors, self.colormap).cpu() / 2 + 0.5
colors = colors * semantic_mask
cell = max(semantic_mask.size(2) // 60, 1)
background = torch.zeros(semantic_mask.size(2), semantic_mask.size(3))
for i in range(cell):
background[i::2 * cell] += 1
background[:, i::2 * cell] += 1
background = (background % 2) * 0.5 + 0.5
background = background.repeat(3, 1, 1)
for i in range(semantic_mask.size(0)):
bg_mask = (semantic_mask[i] < thresh).repeat(3, 1, 1)
colors[i][bg_mask] = background[bg_mask]
self.log_img(name + f"-thresh{thresh}", colors, nrow, global_iteration)
def log_semantic_seg(self, name, seg_mc, nrow, global_iteration):
if self.is_empty([seg_mc]):
return
with torch.no_grad():
seg = seg_mc if seg_mc.size(1) == 1 else seg_mc.max(1, keepdim=True)[1]
seg[seg > self.num_semantics - 1] = -1
seg = color_transfer(seg, self.colormap)
self.log_img(name, seg, nrow, global_iteration, normalize=True, range=(-1, 1))
def log_cond_distrib(self, name, real_cond, fake_cond, nrow, ncol, global_iteration, width=0.5):
if self.is_empty([real_cond, fake_cond]):
return
with torch.no_grad():
num = fake_cond.size(0)
fake_cond = fake_cond.detach().numpy()
real_cond = real_cond.detach().numpy()
x = np.array(range(fake_cond.shape[1]))
fig, axes = plt.subplots(nrows=nrow, ncols=ncol, figsize=(7, 6))
for i, ax in enumerate(axes.flat):
if i < num:
ax.bar(x - width / 2, real_cond[i], width, label='real')
ax.bar(x + width / 2, fake_cond[i], width, label='fake')
if i == num - 1:
ax.legend()
fig.tight_layout()
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_ins_center(self, name, ins_center, nrow, global_iteration):
if self.is_empty([ins_center]):
return
with torch.no_grad():
ins_center = torch.clamp(ins_center, max=1)
self.log_img(name, ins_center, nrow, global_iteration, pad_value=1)
def log_instance(self, name, seg_mc, center_mask, ins_offset, nrow, global_iteration):
if self.is_empty([seg_mc, center_mask, ins_offset]):
return
with torch.no_grad():
height, width = seg_mc.shape[2:]
seg_mc_one_hot = torch.zeros_like(seg_mc).scatter_(1, seg_mc.max(dim=1, keepdim=True)[1], 1.0)
instance_colors = torch.zeros((seg_mc.shape[0], *seg_mc.shape[2:], 3))
scaled_x_coord = self.x_coord[:height, :width] / height
scaled_y_coord = self.y_coord[:height, :width] / height
shifted_x_coord = ins_offset[:, 0] + scaled_x_coord.view(1, height, width)
shifted_y_coord = ins_offset[:, 1] + scaled_y_coord.view(1, height, width)
for b in range(seg_mc.shape[0]):
for i, k in enumerate(self.things_idx):
mask = (seg_mc_one_hot[b, k] > 0)
mask_shape = torch.sum(mask.long())
closest_dis = 10000 * torch.ones(mask_shape)
new_colors = torch.ones((mask_shape, 3))
for j, (center_y, center_x) in enumerate(center_mask[b, 0].nonzero()):
dis_x = shifted_x_coord[b][mask] - 1. * center_x / height
dis_y = shifted_y_coord[b][mask] - 1. * center_y / height
squared_dis_to_center = dis_x ** 2 + dis_y ** 2
new_center_mask = (squared_dis_to_center < closest_dis)
closest_dis[new_center_mask] = squared_dis_to_center[new_center_mask]
new_colors[new_center_mask] = 0.2 + 0.6 * torch.rand(3)
instance_colors[b][mask] = new_colors
instance_colors = instance_colors.permute(0, 3, 1, 2)
self.log_img(name, instance_colors, nrow, global_iteration, pad_value=1)
def log_ins_offset(self, name, seg_mc, ins_offset, nrow, global_iteration):
if self.is_empty([seg_mc, ins_offset]):
return
with torch.no_grad():
index = seg_mc.max(dim=1, keepdim=True)[1]
seg_mc = torch.zeros_like(seg_mc).scatter_(1, index, 1.0)
bg = (seg_mc[:,self.things_idx].sum(dim=1) == 0)
angle = (1 + torch.atan2(ins_offset[:, 1], ins_offset[:, 0]) / np.pi) / 2
sat_norm = torch.min(10 * (torch.sqrt(ins_offset[:, 0] ** 2 + ins_offset[:, 1] ** 2)), torch.tensor([1.]))
cmp = cm.get_cmap('hsv', 128)
offset_rgba = cmp(angle.numpy())
offset_rgb = torch.tensor(offset_rgba[:, :, :, :3]).float()
offset_rgb = sat_norm.unsqueeze(-1) * offset_rgb + (1 - sat_norm).unsqueeze(-1) * torch.ones_like(offset_rgb)
offset_rgb[bg] = torch.tensor([0., 0., 0.])
offset_rgb = offset_rgb.permute(0, 3, 1, 2)
self.log_img(name, offset_rgb, nrow, global_iteration, pad_value=1)
def log_ins_density(self, name, ins_density, nrow, global_iteration):
if self.is_empty([ins_density]):
return
with torch.no_grad():
colored_mask = torch.zeros((ins_density.shape[0], *ins_density.shape[2:]))
has_density = torch.sum(ins_density, dim=1) > 0
max_density, idx = torch.max(ins_density, dim=1)
max_max_density = torch.max(max_density.view(ins_density.shape[0], -1), dim=1, keepdim=True)[0].unsqueeze(-1)
max_max_density[max_max_density == 0] = 1
max_density /= max_max_density
colored_mask[has_density] = torch.Tensor(self.things_idx)[idx[has_density]]
colored_mask = color_transfer(colored_mask.unsqueeze(1), self.colormap)
colored_mask = (colored_mask + 1) * max_density.unsqueeze(1) - 1
self.log_img(name, colored_mask, nrow, global_iteration, normalize=True, range=(-1, 1))
def log_confusion_matrix(self, name, confusion_matrix, global_iteration, save=False, eval_only=False):
if self.is_empty([confusion_matrix]):
return
with torch.no_grad():
num_sem_classes = len(self.eval_idx) if eval_only else self.num_semantics
sem_labels = [self.semantic_labels[i] for i in self.eval_idx] if eval_only else self.semantic_labels
cm = confusion_matrix.numpy()
tot_gt = np.sum(cm, axis=1)
tot_gt[tot_gt == 0] = 1
n_cm = (cm.T / tot_gt).T
fig = plt.figure(figsize=(int(0.6 * num_sem_classes), int(0.6 * num_sem_classes)))
ax = fig.add_subplot(111)
cax = ax.matshow(n_cm)
fig.colorbar(cax)
ax.set_aspect('auto')
ax.set_xticks(np.arange(0.0, num_sem_classes, 1.0), minor=False)
ax.set_yticks(np.arange(0.0, num_sem_classes, 1.0), minor=False)
ax.set_xticklabels(sem_labels)
ax.set_yticklabels(sem_labels)
ax.tick_params(axis='both', which='major', labelsize=8)
ax.xaxis.set_ticks_position('bottom')
for tick in ax.get_xticklabels():
tick.set_rotation(45)
plt.xlabel('Predicted')
plt.ylabel('Ground truth')
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, f"{int(cm[i, j])}",
horizontalalignment="center",
color="white" if n_cm[i, j] < 0.5 else "black",
fontsize=6)
plt.tight_layout()
if save:
plt.savefig(f"{self.log_path}/confusion_matrix.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_class_hist(self, name, class_hist, global_iteration, save=False):
if self.is_empty([class_hist]):
return
with torch.no_grad():
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.array(range(len(class_hist)))
ax.bar(x, class_hist)
ax.set_xticks(np.arange(0.0, self.num_semantics, 1.0), minor=False)
ax.set_xticklabels(self.semantic_labels)
for tick in ax.get_xticklabels():
tick.set_rotation(90)
fig.tight_layout()
if save:
plt.savefig(f"{self.log_path}/{name.replace('/', '_')}_{global_iteration}.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True)
def log_component_weights(self, name, weights, global_iteration, save):
if self.is_empty([weights]):
return
with torch.no_grad():
fig = plt.figure()
ax = fig.add_subplot(111)
x = np.array(range(len(weights)))
ax.bar(x, weights)
fig.tight_layout()
if save:
plt.savefig(f"{self.log_path}/{name.replace('/', '_')}_{global_iteration}.pdf")
self.writer.add_figure(name, fig, global_iteration, close=True) | 0.755817 | 0.288438 |
from math import *
class Success:
success = None
feilmelding = None
def createStringReference(string):
stringReference = StringReference()
stringReference.string = string
return stringReference
def lagGyldigReversTabell(nummerliste):
maxnummer = 0.0
i = 0.0
while i < len(nummerliste):
maxnummer = max(maxnummer,nummerliste[int(i)])
i = i + 1.0
inverse = [None]*int(maxnummer + 1.0)
i = 0.0
while i < maxnummer:
inverse[int(i)] = False
i = i + 1.0
i = 0.0
while i < len(nummerliste):
inverse[int(nummerliste[int(i)])] = True
i = i + 1.0
return inverse
def hentKommunenavnFraNummer(kommunenummer, success):
kommunenavn = [None]*int(0.0)
if erGyldigKommunenummer(kommunenummer):
kommunenavnListe = hentKommunenavn()
stringToDecimalResult = stringToDecimalForBase(kommunenummer, 10.0)
if stringToDecimalResult.success:
nr = stringToDecimalResult.result
kommunenavn = kommunenavnListe[int(nr)].string
success.success = True
else:
success.feilmelding = "Kommunenummer var ikke et gyldig tall."
success.success = False
else:
success.success = False
success.feilmelding = "Kommunenummer er ikke gyldig."
return kommunenavn
def erGyldigKommunenummer(kommunenummer):
if len(kommunenummer) == 4.0:
nr = stringToDecimalForBase(kommunenummer, 10.0).result
if nr >= 101.0 and nr <= 2030.0 and erHelttall(nr):
gyldigKommunetabell = hentGyldigKommunetabell()
gyldig = gyldigKommunetabell[int(nr)]
else:
gyldig = False
else:
gyldig = False
return gyldig
def erHelttall(nr):
return (nr % 1.0) == 0.0
def hentGyldigKommunetabell():
nummerliste = hentGyldigeKommunenummer()
return lagGyldigReversTabell(nummerliste)
def hentKommunenavn():
kommunenavn = [None]*int(2100.0)
kommunenavn[int(101.0)] = createStringReference("Halden")
kommunenavn[int(104.0)] = createStringReference("Moss")
kommunenavn[int(105.0)] = createStringReference("Sarpsborg")
kommunenavn[int(106.0)] = createStringReference("Fredrikstad")
kommunenavn[int(111.0)] = createStringReference("Hvaler")
kommunenavn[int(118.0)] = createStringReference("Aremark")
kommunenavn[int(119.0)] = createStringReference("Marker")
kommunenavn[int(121.0)] = createStringReference("R\u00f8mskog")
kommunenavn[int(122.0)] = createStringReference("Tr\u00f8gstad")
kommunenavn[int(123.0)] = createStringReference("Spydeberg")
kommunenavn[int(124.0)] = createStringReference("Askim")
kommunenavn[int(125.0)] = createStringReference("Eidsberg")
kommunenavn[int(127.0)] = createStringReference("Skiptvet")
kommunenavn[int(128.0)] = createStringReference("Rakkestad")
kommunenavn[int(135.0)] = createStringReference("R\u00e5de")
kommunenavn[int(136.0)] = createStringReference("Rygge")
kommunenavn[int(137.0)] = createStringReference("V\u00e5ler")
kommunenavn[int(138.0)] = createStringReference("Hob\u00f8l")
kommunenavn[int(211.0)] = createStringReference("Vestby")
kommunenavn[int(213.0)] = createStringReference("Ski")
kommunenavn[int(214.0)] = createStringReference("\u00c5s")
kommunenavn[int(215.0)] = createStringReference("Frogn")
kommunenavn[int(216.0)] = createStringReference("Nesodden")
kommunenavn[int(217.0)] = createStringReference("Oppeg\u00e5rd")
kommunenavn[int(219.0)] = createStringReference("B\u00e6rum")
kommunenavn[int(220.0)] = createStringReference("Asker")
kommunenavn[int(221.0)] = createStringReference("Aurskog-H\u00f8land")
kommunenavn[int(226.0)] = createStringReference("S\u00f8rum")
kommunenavn[int(227.0)] = createStringReference("Fet")
kommunenavn[int(228.0)] = createStringReference("R\u00e6lingen")
kommunenavn[int(229.0)] = createStringReference("Enebakk")
kommunenavn[int(230.0)] = createStringReference("L\u00f8renskog")
kommunenavn[int(231.0)] = createStringReference("Skedsmo")
kommunenavn[int(233.0)] = createStringReference("Nittedal")
kommunenavn[int(234.0)] = createStringReference("Gjerdrum")
kommunenavn[int(235.0)] = createStringReference("Ullensaker")
kommunenavn[int(236.0)] = createStringReference("Nes")
kommunenavn[int(237.0)] = createStringReference("Eidsvoll")
kommunenavn[int(238.0)] = createStringReference("Nannestad")
kommunenavn[int(239.0)] = createStringReference("Hurdal")
kommunenavn[int(301.0)] = createStringReference("Oslo")
kommunenavn[int(402.0)] = createStringReference("Kongsvinger")
kommunenavn[int(403.0)] = createStringReference("Hamar")
kommunenavn[int(412.0)] = createStringReference("Ringsaker")
kommunenavn[int(415.0)] = createStringReference("L\u00f8ten")
kommunenavn[int(417.0)] = createStringReference("Stange")
kommunenavn[int(418.0)] = createStringReference("Nord-Odal")
kommunenavn[int(419.0)] = createStringReference("S\u00f8r-Odal")
kommunenavn[int(420.0)] = createStringReference("Eidskog")
kommunenavn[int(423.0)] = createStringReference("Grue")
kommunenavn[int(425.0)] = createStringReference("\u00c5snes")
kommunenavn[int(426.0)] = createStringReference("V\u00e5ler")
kommunenavn[int(427.0)] = createStringReference("Elverum")
kommunenavn[int(428.0)] = createStringReference("Trysil")
kommunenavn[int(429.0)] = createStringReference("\u00c5mot")
kommunenavn[int(430.0)] = createStringReference("Stor-Elvdal")
kommunenavn[int(432.0)] = createStringReference("Rendalen")
kommunenavn[int(434.0)] = createStringReference("Engerdal")
kommunenavn[int(436.0)] = createStringReference("Tolga")
kommunenavn[int(437.0)] = createStringReference("Tynset")
kommunenavn[int(438.0)] = createStringReference("Alvdal")
kommunenavn[int(439.0)] = createStringReference("Folldal")
kommunenavn[int(441.0)] = createStringReference("Os")
kommunenavn[int(501.0)] = createStringReference("Lillehammer")
kommunenavn[int(502.0)] = createStringReference("Gj\u00f8vik")
kommunenavn[int(511.0)] = createStringReference("Dovre")
kommunenavn[int(512.0)] = createStringReference("Lesja")
kommunenavn[int(513.0)] = createStringReference("Skj\u00e5k")
kommunenavn[int(514.0)] = createStringReference("Lom")
kommunenavn[int(515.0)] = createStringReference("V\u00e5g\u00e5")
kommunenavn[int(516.0)] = createStringReference("Nord-Fron")
kommunenavn[int(517.0)] = createStringReference("Sel")
kommunenavn[int(519.0)] = createStringReference("S\u00f8r-Fron")
kommunenavn[int(520.0)] = createStringReference("Ringebu")
kommunenavn[int(521.0)] = createStringReference("\u00d8yer")
kommunenavn[int(522.0)] = createStringReference("Gausdal")
kommunenavn[int(528.0)] = createStringReference("\u00d<NAME>")
kommunenavn[int(529.0)] = createStringReference("<NAME>")
kommunenavn[int(532.0)] = createStringReference("Jevnaker")
kommunenavn[int(533.0)] = createStringReference("Lunner")
kommunenavn[int(534.0)] = createStringReference("Gran")
kommunenavn[int(536.0)] = createStringReference("S\u00f8ndre Land")
kommunenavn[int(538.0)] = createStringReference("<NAME>")
kommunenavn[int(540.0)] = createStringReference("S\u00f8r-Aurdal")
kommunenavn[int(541.0)] = createStringReference("Etnedal")
kommunenavn[int(542.0)] = createStringReference("Nord-Aurdal")
kommunenavn[int(543.0)] = createStringReference("<NAME>")
kommunenavn[int(544.0)] = createStringReference("\u00d8yst<NAME>")
kommunenavn[int(545.0)] = createStringReference("Vang")
kommunenavn[int(602.0)] = createStringReference("Drammen")
kommunenavn[int(604.0)] = createStringReference("Kongsberg")
kommunenavn[int(605.0)] = createStringReference("Ringerike")
kommunenavn[int(612.0)] = createStringReference("Hole")
kommunenavn[int(615.0)] = createStringReference("Fl\u00e5")
kommunenavn[int(616.0)] = createStringReference("Nes")
kommunenavn[int(617.0)] = createStringReference("Gol")
kommunenavn[int(618.0)] = createStringReference("Hemsedal")
kommunenavn[int(619.0)] = createStringReference("\u00c5l")
kommunenavn[int(620.0)] = createStringReference("Hol")
kommunenavn[int(621.0)] = createStringReference("Sigdal")
kommunenavn[int(622.0)] = createStringReference("Kr\u00f8dsherad")
kommunenavn[int(623.0)] = createStringReference("Modum")
kommunenavn[int(624.0)] = createStringReference("\u00d8vre Eiker")
kommunenavn[int(625.0)] = createStringReference("<NAME>")
kommunenavn[int(626.0)] = createStringReference("Lier")
kommunenavn[int(627.0)] = createStringReference("R\u00f8yken")
kommunenavn[int(628.0)] = createStringReference("Hurum")
kommunenavn[int(631.0)] = createStringReference("Flesberg")
kommunenavn[int(632.0)] = createStringReference("Rollag")
kommunenavn[int(633.0)] = createStringReference("<NAME>")
kommunenavn[int(701.0)] = createStringReference("Horten")
kommunenavn[int(702.0)] = createStringReference("Holmestrand")
kommunenavn[int(704.0)] = createStringReference("T\u00f8nsberg")
kommunenavn[int(709.0)] = createStringReference("Larvik")
kommunenavn[int(710.0)] = createStringReference("Sandefjord")
kommunenavn[int(711.0)] = createStringReference("Svelvik")
kommunenavn[int(713.0)] = createStringReference("Sande")
kommunenavn[int(714.0)] = createStringReference("Hof")
kommunenavn[int(716.0)] = createStringReference("Re")
kommunenavn[int(722.0)] = createStringReference("N\u00f8tter\u00f8y")
kommunenavn[int(723.0)] = createStringReference("Tj\u00f8me")
kommunenavn[int(728.0)] = createStringReference("Lardal")
kommunenavn[int(805.0)] = createStringReference("Porsgrunn")
kommunenavn[int(806.0)] = createStringReference("Skien")
kommunenavn[int(807.0)] = createStringReference("Notodden")
kommunenavn[int(811.0)] = createStringReference("Siljan")
kommunenavn[int(814.0)] = createStringReference("Bamble")
kommunenavn[int(815.0)] = createStringReference("Krager\u00f8")
kommunenavn[int(817.0)] = createStringReference("Drangedal")
kommunenavn[int(819.0)] = createStringReference("Nome")
kommunenavn[int(821.0)] = createStringReference("B\u00f8")
kommunenavn[int(822.0)] = createStringReference("Sauherad")
kommunenavn[int(826.0)] = createStringReference("Tinn")
kommunenavn[int(827.0)] = createStringReference("Hjartdal")
kommunenavn[int(828.0)] = createStringReference("Seljord")
kommunenavn[int(829.0)] = createStringReference("Kviteseid")
kommunenavn[int(830.0)] = createStringReference("Nissedal")
kommunenavn[int(831.0)] = createStringReference("Fyresdal")
kommunenavn[int(833.0)] = createStringReference("Tokke")
kommunenavn[int(834.0)] = createStringReference("Vinje")
kommunenavn[int(901.0)] = createStringReference("Ris\u00f8r")
kommunenavn[int(904.0)] = createStringReference("Grimstad")
kommunenavn[int(906.0)] = createStringReference("Arendal")
kommunenavn[int(911.0)] = createStringReference("Gjerstad")
kommunenavn[int(912.0)] = createStringReference("Veg\u00e5rshei")
kommunenavn[int(914.0)] = createStringReference("Tvedestrand")
kommunenavn[int(919.0)] = createStringReference("Froland")
kommunenavn[int(926.0)] = createStringReference("Lillesand")
kommunenavn[int(928.0)] = createStringReference("Birkenes")
kommunenavn[int(929.0)] = createStringReference("\u00c5mli")
kommunenavn[int(935.0)] = createStringReference("Iveland")
kommunenavn[int(937.0)] = createStringReference("<NAME>")
kommunenavn[int(938.0)] = createStringReference("Bygland")
kommunenavn[int(940.0)] = createStringReference("Valle")
kommunenavn[int(941.0)] = createStringReference("Bykle")
kommunenavn[int(1001.0)] = createStringReference("Kristiansand")
kommunenavn[int(1002.0)] = createStringReference("Mandal")
kommunenavn[int(1003.0)] = createStringReference("Farsund")
kommunenavn[int(1004.0)] = createStringReference("Flekkefjord")
kommunenavn[int(1014.0)] = createStringReference("Vennesla")
kommunenavn[int(1017.0)] = createStringReference("Songdalen")
kommunenavn[int(1018.0)] = createStringReference("S\u00f8gne")
kommunenavn[int(1021.0)] = createStringReference("Marnardal")
kommunenavn[int(1026.0)] = createStringReference("\u00c5seral")
kommunenavn[int(1027.0)] = createStringReference("Audnedal")
kommunenavn[int(1029.0)] = createStringReference("Lindesnes")
kommunenavn[int(1032.0)] = createStringReference("Lyngdal")
kommunenavn[int(1034.0)] = createStringReference("H\u00e6gebostad")
kommunenavn[int(1037.0)] = createStringReference("Kvinesdal")
kommunenavn[int(1046.0)] = createStringReference("Sirdal")
kommunenavn[int(1101.0)] = createStringReference("Eigersund")
kommunenavn[int(1102.0)] = createStringReference("Sandnes")
kommunenavn[int(1103.0)] = createStringReference("Stavanger")
kommunenavn[int(1106.0)] = createStringReference("Haugesund")
kommunenavn[int(1111.0)] = createStringReference("Sokndal")
kommunenavn[int(1112.0)] = createStringReference("Lund")
kommunenavn[int(1114.0)] = createStringReference("Bjerkreim")
kommunenavn[int(1119.0)] = createStringReference("H\u00e5")
kommunenavn[int(1120.0)] = createStringReference("Klepp")
kommunenavn[int(1121.0)] = createStringReference("Time")
kommunenavn[int(1122.0)] = createStringReference("Gjesdal")
kommunenavn[int(1124.0)] = createStringReference("Sola")
kommunenavn[int(1127.0)] = createStringReference("Randaberg")
kommunenavn[int(1129.0)] = createStringReference("Forsand")
kommunenavn[int(1130.0)] = createStringReference("Strand")
kommunenavn[int(1133.0)] = createStringReference("Hjelmeland")
kommunenavn[int(1134.0)] = createStringReference("Suldal")
kommunenavn[int(1135.0)] = createStringReference("Sauda")
kommunenavn[int(1141.0)] = createStringReference("Finn\u00f8y")
kommunenavn[int(1142.0)] = createStringReference("Rennes\u00f8y")
kommunenavn[int(1144.0)] = createStringReference("Kvits\u00f8y")
kommunenavn[int(1145.0)] = createStringReference("Bokn")
kommunenavn[int(1146.0)] = createStringReference("Tysv\u00e6r")
kommunenavn[int(1149.0)] = createStringReference("Karm\u00f8y")
kommunenavn[int(1151.0)] = createStringReference("Utsira")
kommunenavn[int(1160.0)] = createStringReference("Vindafjord")
kommunenavn[int(1201.0)] = createStringReference("Bergen")
kommunenavn[int(1211.0)] = createStringReference("Etne")
kommunenavn[int(1216.0)] = createStringReference("Sveio")
kommunenavn[int(1219.0)] = createStringReference("B\u00f8mlo")
kommunenavn[int(1221.0)] = createStringReference("Stord")
kommunenavn[int(1222.0)] = createStringReference("Fitjar")
kommunenavn[int(1223.0)] = createStringReference("Tysnes")
kommunenavn[int(1224.0)] = createStringReference("Kvinnherad")
kommunenavn[int(1227.0)] = createStringReference("Jondal")
kommunenavn[int(1228.0)] = createStringReference("Odda")
kommunenavn[int(1231.0)] = createStringReference("Ullensvang")
kommunenavn[int(1232.0)] = createStringReference("Eidfjord")
kommunenavn[int(1233.0)] = createStringReference("Ulvik")
kommunenavn[int(1234.0)] = createStringReference("Granvin")
kommunenavn[int(1235.0)] = createStringReference("Voss")
kommunenavn[int(1238.0)] = createStringReference("Kvam")
kommunenavn[int(1241.0)] = createStringReference("Fusa")
kommunenavn[int(1242.0)] = createStringReference("Samnanger")
kommunenavn[int(1243.0)] = createStringReference("Os")
kommunenavn[int(1244.0)] = createStringReference("Austevoll")
kommunenavn[int(1245.0)] = createStringReference("Sund")
kommunenavn[int(1246.0)] = createStringReference("Fjell")
kommunenavn[int(1247.0)] = createStringReference("Ask\u00f8y")
kommunenavn[int(1251.0)] = createStringReference("Vaksdal")
kommunenavn[int(1252.0)] = createStringReference("Modalen")
kommunenavn[int(1253.0)] = createStringReference("Oster\u00f8y")
kommunenavn[int(1256.0)] = createStringReference("Meland")
kommunenavn[int(1259.0)] = createStringReference("\u00d8ygarden")
kommunenavn[int(1260.0)] = createStringReference("Rad\u00f8y")
kommunenavn[int(1263.0)] = createStringReference("Lind\u00e5s")
kommunenavn[int(1264.0)] = createStringReference("Austrheim")
kommunenavn[int(1265.0)] = createStringReference("Fedje")
kommunenavn[int(1266.0)] = createStringReference("Masfjorden")
kommunenavn[int(1401.0)] = createStringReference("Flora")
kommunenavn[int(1411.0)] = createStringReference("Gulen")
kommunenavn[int(1412.0)] = createStringReference("Solund")
kommunenavn[int(1413.0)] = createStringReference("Hyllestad")
kommunenavn[int(1416.0)] = createStringReference("H\u00f8yanger")
kommunenavn[int(1417.0)] = createStringReference("Vik")
kommunenavn[int(1418.0)] = createStringReference("Balestrand")
kommunenavn[int(1419.0)] = createStringReference("Leikanger")
kommunenavn[int(1420.0)] = createStringReference("Sogndal")
kommunenavn[int(1421.0)] = createStringReference("Aurland")
kommunenavn[int(1422.0)] = createStringReference("L\u00e6rdal")
kommunenavn[int(1424.0)] = createStringReference("\u00c5rdal")
kommunenavn[int(1426.0)] = createStringReference("Luster")
kommunenavn[int(1428.0)] = createStringReference("Askvoll")
kommunenavn[int(1429.0)] = createStringReference("Fjaler")
kommunenavn[int(1430.0)] = createStringReference("Gaular")
kommunenavn[int(1431.0)] = createStringReference("J\u00f8lster")
kommunenavn[int(1432.0)] = createStringReference("F\u00f8rde")
kommunenavn[int(1433.0)] = createStringReference("Naustdal")
kommunenavn[int(1438.0)] = createStringReference("Bremanger")
kommunenavn[int(1439.0)] = createStringReference("V\u00e5gs\u00f8y")
kommunenavn[int(1441.0)] = createStringReference("Selje")
kommunenavn[int(1443.0)] = createStringReference("Eid")
kommunenavn[int(1444.0)] = createStringReference("Hornindal")
kommunenavn[int(1445.0)] = createStringReference("Gloppen")
kommunenavn[int(1449.0)] = createStringReference("Stryn")
kommunenavn[int(1502.0)] = createStringReference("Molde")
kommunenavn[int(1504.0)] = createStringReference("\u00c5lesund")
kommunenavn[int(1505.0)] = createStringReference("Kristiansund")
kommunenavn[int(1511.0)] = createStringReference("Vanylven")
kommunenavn[int(1514.0)] = createStringReference("Sande")
kommunenavn[int(1515.0)] = createStringReference("Her\u00f8y")
kommunenavn[int(1516.0)] = createStringReference("Ulstein")
kommunenavn[int(1517.0)] = createStringReference("Hareid")
kommunenavn[int(1519.0)] = createStringReference("Volda")
kommunenavn[int(1520.0)] = createStringReference("\u00d8rsta")
kommunenavn[int(1523.0)] = createStringReference("\u00d8rskog")
kommunenavn[int(1524.0)] = createStringReference("Norddal")
kommunenavn[int(1525.0)] = createStringReference("Stranda")
kommunenavn[int(1526.0)] = createStringReference("Stordal")
kommunenavn[int(1528.0)] = createStringReference("Sykkylven")
kommunenavn[int(1529.0)] = createStringReference("Skodje")
kommunenavn[int(1531.0)] = createStringReference("Sula")
kommunenavn[int(1532.0)] = createStringReference("Giske")
kommunenavn[int(1534.0)] = createStringReference("Haram")
kommunenavn[int(1535.0)] = createStringReference("Vestnes")
kommunenavn[int(1539.0)] = createStringReference("Rauma")
kommunenavn[int(1543.0)] = createStringReference("Nesset")
kommunenavn[int(1545.0)] = createStringReference("Midsund")
kommunenavn[int(1546.0)] = createStringReference("Sand\u00f8y")
kommunenavn[int(1547.0)] = createStringReference("Aukra")
kommunenavn[int(1548.0)] = createStringReference("Fr\u00e6na")
kommunenavn[int(1551.0)] = createStringReference("Eide")
kommunenavn[int(1554.0)] = createStringReference("Aver\u00f8y")
kommunenavn[int(1557.0)] = createStringReference("Gjemnes")
kommunenavn[int(1560.0)] = createStringReference("Tingvoll")
kommunenavn[int(1563.0)] = createStringReference("Sunndal")
kommunenavn[int(1566.0)] = createStringReference("Surnadal")
kommunenavn[int(1567.0)] = createStringReference("Rindal")
kommunenavn[int(1571.0)] = createStringReference("Halsa")
kommunenavn[int(1573.0)] = createStringReference("Sm\u00f8la")
kommunenavn[int(1576.0)] = createStringReference("Aure")
kommunenavn[int(1601.0)] = createStringReference("Trondheim")
kommunenavn[int(1612.0)] = createStringReference("Hemne")
kommunenavn[int(1613.0)] = createStringReference("Snillfjord")
kommunenavn[int(1617.0)] = createStringReference("Hitra")
kommunenavn[int(1620.0)] = createStringReference("Fr\u00f8ya")
kommunenavn[int(1621.0)] = createStringReference("\u00d8rland")
kommunenavn[int(1622.0)] = createStringReference("Agdenes")
kommunenavn[int(1624.0)] = createStringReference("Rissa")
kommunenavn[int(1627.0)] = createStringReference("Bjugn")
kommunenavn[int(1630.0)] = createStringReference("\u00c5fjord")
kommunenavn[int(1632.0)] = createStringReference("Roan")
kommunenavn[int(1633.0)] = createStringReference("Osen")
kommunenavn[int(1634.0)] = createStringReference("Oppdal")
kommunenavn[int(1635.0)] = createStringReference("Rennebu")
kommunenavn[int(1636.0)] = createStringReference("Meldal")
kommunenavn[int(1638.0)] = createStringReference("Orkdal")
kommunenavn[int(1640.0)] = createStringReference("R\u00f8ros")
kommunenavn[int(1644.0)] = createStringReference("Holt\u00e5len")
kommunenavn[int(1648.0)] = createStringReference("<NAME>")
kommunenavn[int(1653.0)] = createStringReference("Melhus")
kommunenavn[int(1657.0)] = createStringReference("Skaun")
kommunenavn[int(1662.0)] = createStringReference("Kl\u00e6bu")
kommunenavn[int(1663.0)] = createStringReference("Malvik")
kommunenavn[int(1664.0)] = createStringReference("Selbu")
kommunenavn[int(1665.0)] = createStringReference("Tydal")
kommunenavn[int(1702.0)] = createStringReference("Steinkjer")
kommunenavn[int(1703.0)] = createStringReference("Namsos")
kommunenavn[int(1711.0)] = createStringReference("Mer\u00e5ker")
kommunenavn[int(1714.0)] = createStringReference("Stj\u00f8rdal")
kommunenavn[int(1717.0)] = createStringReference("Frosta")
kommunenavn[int(1718.0)] = createStringReference("Leksvik")
kommunenavn[int(1719.0)] = createStringReference("Levanger")
kommunenavn[int(1721.0)] = createStringReference("Verdal")
kommunenavn[int(1724.0)] = createStringReference("Verran")
kommunenavn[int(1725.0)] = createStringReference("Namdalseid")
kommunenavn[int(1736.0)] = createStringReference("Sn\u00e5ase \u2013 Sn\u00e5sa")
kommunenavn[int(1738.0)] = createStringReference("Lierne")
kommunenavn[int(1739.0)] = createStringReference("Raarvihke \u2013 R\u00f8yrvik")
kommunenavn[int(1740.0)] = createStringReference("Namsskogan")
kommunenavn[int(1742.0)] = createStringReference("Grong")
kommunenavn[int(1743.0)] = createStringReference("H\u00f8ylandet")
kommunenavn[int(1744.0)] = createStringReference("Overhalla")
kommunenavn[int(1748.0)] = createStringReference("Fosnes")
kommunenavn[int(1749.0)] = createStringReference("Flatanger")
kommunenavn[int(1750.0)] = createStringReference("Vikna")
kommunenavn[int(1751.0)] = createStringReference("N\u00e6r\u00f8y")
kommunenavn[int(1755.0)] = createStringReference("Leka")
kommunenavn[int(1756.0)] = createStringReference("Inder\u00f8y")
kommunenavn[int(1804.0)] = createStringReference("Bod\u00f8")
kommunenavn[int(1805.0)] = createStringReference("Narvik")
kommunenavn[int(1811.0)] = createStringReference("Bindal")
kommunenavn[int(1812.0)] = createStringReference("S\u00f8mna")
kommunenavn[int(1813.0)] = createStringReference("Br\u00f8nn\u00f8y")
kommunenavn[int(1815.0)] = createStringReference("Vega")
kommunenavn[int(1816.0)] = createStringReference("Vevelstad")
kommunenavn[int(1818.0)] = createStringReference("Her\u00f8y")
kommunenavn[int(1820.0)] = createStringReference("Alstahaug")
kommunenavn[int(1822.0)] = createStringReference("Leirfjord")
kommunenavn[int(1824.0)] = createStringReference("Vefsn")
kommunenavn[int(1825.0)] = createStringReference("Grane")
kommunenavn[int(1826.0)] = createStringReference("Hattfjelldal")
kommunenavn[int(1827.0)] = createStringReference("D\u00f8nna")
kommunenavn[int(1828.0)] = createStringReference("Nesna")
kommunenavn[int(1832.0)] = createStringReference("Hemnes")
kommunenavn[int(1833.0)] = createStringReference("Rana")
kommunenavn[int(1834.0)] = createStringReference("Lur\u00f8y")
kommunenavn[int(1835.0)] = createStringReference("Tr\u00e6na")
kommunenavn[int(1836.0)] = createStringReference("R\u00f8d\u00f8y")
kommunenavn[int(1837.0)] = createStringReference("Mel\u00f8y")
kommunenavn[int(1838.0)] = createStringReference("Gildesk\u00e5l")
kommunenavn[int(1839.0)] = createStringReference("Beiarn")
kommunenavn[int(1840.0)] = createStringReference("Saltdal")
kommunenavn[int(1841.0)] = createStringReference("Fauske \u2013 Fuossko")
kommunenavn[int(1845.0)] = createStringReference("S\u00f8rfold")
kommunenavn[int(1848.0)] = createStringReference("Steigen")
kommunenavn[int(1849.0)] = createStringReference("Hamar\u00f8y \u2013 H\u00e1bmer")
kommunenavn[int(1850.0)] = createStringReference("Divtasvuodna \u2013 Tysfjord")
kommunenavn[int(1851.0)] = createStringReference("L\u00f8dingen")
kommunenavn[int(1852.0)] = createStringReference("Tjeldsund")
kommunenavn[int(1853.0)] = createStringReference("Evenes")
kommunenavn[int(1854.0)] = createStringReference("Ballangen")
kommunenavn[int(1856.0)] = createStringReference("R\u00f8st")
kommunenavn[int(1857.0)] = createStringReference("V\u00e6r\u00f8y")
kommunenavn[int(1859.0)] = createStringReference("Flakstad")
kommunenavn[int(1860.0)] = createStringReference("Vestv\u00e5g\u00f8y")
kommunenavn[int(1865.0)] = createStringReference("V\u00e5gan")
kommunenavn[int(1866.0)] = createStringReference("Hadsel")
kommunenavn[int(1867.0)] = createStringReference("B\u00f8")
kommunenavn[int(1868.0)] = createStringReference("\u00d8ksnes")
kommunenavn[int(1870.0)] = createStringReference("Sortland \u2013 Suort\u00e1")
kommunenavn[int(1871.0)] = createStringReference("And\u00f8y")
kommunenavn[int(1874.0)] = createStringReference("Moskenes")
kommunenavn[int(1903.0)] = createStringReference("Harstad \u2013 H\u00e1rstt\u00e1k")
kommunenavn[int(1902.0)] = createStringReference("Troms\u00f8")
kommunenavn[int(1911.0)] = createStringReference("Kv\u00e6fjord")
kommunenavn[int(1913.0)] = createStringReference("Sk\u00e5nland")
kommunenavn[int(1917.0)] = createStringReference("Ibestad")
kommunenavn[int(1919.0)] = createStringReference("Gratangen")
kommunenavn[int(1920.0)] = createStringReference("Loab\u00e1k \u2013 Lavangen")
kommunenavn[int(1922.0)] = createStringReference("Bardu")
kommunenavn[int(1923.0)] = createStringReference("Salangen")
kommunenavn[int(1924.0)] = createStringReference("M\u00e5lselv")
kommunenavn[int(1925.0)] = createStringReference("S\u00f8rreisa")
kommunenavn[int(1926.0)] = createStringReference("Dyr\u00f8y")
kommunenavn[int(1927.0)] = createStringReference("Tran\u00f8y")
kommunenavn[int(1928.0)] = createStringReference("Torsken")
kommunenavn[int(1929.0)] = createStringReference("Berg")
kommunenavn[int(1931.0)] = createStringReference("Lenvik")
kommunenavn[int(1933.0)] = createStringReference("Balsfjord")
kommunenavn[int(1936.0)] = createStringReference("Karls\u00f8y")
kommunenavn[int(1938.0)] = createStringReference("Lyngen")
kommunenavn[int(1939.0)] = createStringReference("Storfjord \u2013 Omasvuotna \u2013 Omasvuono")
kommunenavn[int(1940.0)] = createStringReference("G\u00e1ivuotna \u2013 K\u00e5fjord \u2013 Kaivuono")
kommunenavn[int(1941.0)] = createStringReference("Skjerv\u00f8y")
kommunenavn[int(1942.0)] = createStringReference("Nordreisa")
kommunenavn[int(1943.0)] = createStringReference("Kv\u00e6nangen")
kommunenavn[int(2002.0)] = createStringReference("Vard\u00f8")
kommunenavn[int(2003.0)] = createStringReference("Vads\u00f8")
kommunenavn[int(2004.0)] = createStringReference("Hammerfest")
kommunenavn[int(2011.0)] = createStringReference("Guovdageaidnu \u2013 Kautokeino")
kommunenavn[int(2012.0)] = createStringReference("Alta")
kommunenavn[int(2014.0)] = createStringReference("Loppa")
kommunenavn[int(2015.0)] = createStringReference("Hasvik")
kommunenavn[int(2017.0)] = createStringReference("Kvalsund")
kommunenavn[int(2018.0)] = createStringReference("M\u00e5s\u00f8y")
kommunenavn[int(2019.0)] = createStringReference("Nordkapp")
kommunenavn[int(2020.0)] = createStringReference("Porsanger \u2013 Pors\u00e1\u014bgu \u2013 Porsanki")
kommunenavn[int(2021.0)] = createStringReference("K\u00e1r\u00e1\u0161johka \u2013 Karasjok")
kommunenavn[int(2022.0)] = createStringReference("Lebesby")
kommunenavn[int(2023.0)] = createStringReference("Gamvik")
kommunenavn[int(2024.0)] = createStringReference("Berlev\u00e5g")
kommunenavn[int(2025.0)] = createStringReference("Deatnu \u2013 Tana")
kommunenavn[int(2027.0)] = createStringReference("Unj\u00e1rga \u2013 Nesseby")
kommunenavn[int(2028.0)] = createStringReference("B\u00e5tsfjord")
kommunenavn[int(2030.0)] = createStringReference("S\u00f8r-Varanger")
return kommunenavn
def hentGyldigeKommunenummer():
gyldigeKommunenummer = [None]*int(425.0 + 1.0)
gyldigeKommunenummer[int(0.0)] = 101.0
gyldigeKommunenummer[int(1.0)] = 104.0
gyldigeKommunenummer[int(2.0)] = 105.0
gyldigeKommunenummer[int(3.0)] = 106.0
gyldigeKommunenummer[int(4.0)] = 111.0
gyldigeKommunenummer[int(5.0)] = 118.0
gyldigeKommunenummer[int(6.0)] = 119.0
gyldigeKommunenummer[int(7.0)] = 121.0
gyldigeKommunenummer[int(8.0)] = 122.0
gyldigeKommunenummer[int(9.0)] = 123.0
gyldigeKommunenummer[int(10.0)] = 124.0
gyldigeKommunenummer[int(11.0)] = 125.0
gyldigeKommunenummer[int(12.0)] = 127.0
gyldigeKommunenummer[int(13.0)] = 128.0
gyldigeKommunenummer[int(14.0)] = 135.0
gyldigeKommunenummer[int(15.0)] = 136.0
gyldigeKommunenummer[int(16.0)] = 137.0
gyldigeKommunenummer[int(17.0)] = 138.0
gyldigeKommunenummer[int(18.0)] = 211.0
gyldigeKommunenummer[int(19.0)] = 213.0
gyldigeKommunenummer[int(20.0)] = 214.0
gyldigeKommunenummer[int(21.0)] = 215.0
gyldigeKommunenummer[int(22.0)] = 216.0
gyldigeKommunenummer[int(23.0)] = 217.0
gyldigeKommunenummer[int(24.0)] = 219.0
gyldigeKommunenummer[int(25.0)] = 220.0
gyldigeKommunenummer[int(26.0)] = 221.0
gyldigeKommunenummer[int(27.0)] = 226.0
gyldigeKommunenummer[int(28.0)] = 227.0
gyldigeKommunenummer[int(29.0)] = 228.0
gyldigeKommunenummer[int(30.0)] = 229.0
gyldigeKommunenummer[int(31.0)] = 230.0
gyldigeKommunenummer[int(32.0)] = 231.0
gyldigeKommunenummer[int(33.0)] = 233.0
gyldigeKommunenummer[int(34.0)] = 234.0
gyldigeKommunenummer[int(35.0)] = 235.0
gyldigeKommunenummer[int(36.0)] = 236.0
gyldigeKommunenummer[int(37.0)] = 237.0
gyldigeKommunenummer[int(38.0)] = 238.0
gyldigeKommunenummer[int(39.0)] = 239.0
gyldigeKommunenummer[int(40.0)] = 301.0
gyldigeKommunenummer[int(41.0)] = 402.0
gyldigeKommunenummer[int(42.0)] = 403.0
gyldigeKommunenummer[int(43.0)] = 412.0
gyldigeKommunenummer[int(44.0)] = 415.0
gyldigeKommunenummer[int(45.0)] = 417.0
gyldigeKommunenummer[int(46.0)] = 418.0
gyldigeKommunenummer[int(47.0)] = 419.0
gyldigeKommunenummer[int(48.0)] = 420.0
gyldigeKommunenummer[int(49.0)] = 423.0
gyldigeKommunenummer[int(50.0)] = 425.0
gyldigeKommunenummer[int(51.0)] = 426.0
gyldigeKommunenummer[int(52.0)] = 427.0
gyldigeKommunenummer[int(53.0)] = 428.0
gyldigeKommunenummer[int(54.0)] = 429.0
gyldigeKommunenummer[int(55.0)] = 430.0
gyldigeKommunenummer[int(56.0)] = 432.0
gyldigeKommunenummer[int(57.0)] = 434.0
gyldigeKommunenummer[int(58.0)] = 436.0
gyldigeKommunenummer[int(59.0)] = 437.0
gyldigeKommunenummer[int(60.0)] = 438.0
gyldigeKommunenummer[int(61.0)] = 439.0
gyldigeKommunenummer[int(62.0)] = 441.0
gyldigeKommunenummer[int(63.0)] = 501.0
gyldigeKommunenummer[int(64.0)] = 502.0
gyldigeKommunenummer[int(65.0)] = 511.0
gyldigeKommunenummer[int(66.0)] = 512.0
gyldigeKommunenummer[int(67.0)] = 513.0
gyldigeKommunenummer[int(68.0)] = 514.0
gyldigeKommunenummer[int(69.0)] = 515.0
gyldigeKommunenummer[int(70.0)] = 516.0
gyldigeKommunenummer[int(71.0)] = 517.0
gyldigeKommunenummer[int(72.0)] = 519.0
gyldigeKommunenummer[int(73.0)] = 520.0
gyldigeKommunenummer[int(74.0)] = 521.0
gyldigeKommunenummer[int(75.0)] = 522.0
gyldigeKommunenummer[int(76.0)] = 528.0
gyldigeKommunenummer[int(77.0)] = 529.0
gyldigeKommunenummer[int(78.0)] = 532.0
gyldigeKommunenummer[int(79.0)] = 533.0
gyldigeKommunenummer[int(80.0)] = 534.0
gyldigeKommunenummer[int(81.0)] = 536.0
gyldigeKommunenummer[int(82.0)] = 538.0
gyldigeKommunenummer[int(83.0)] = 540.0
gyldigeKommunenummer[int(84.0)] = 541.0
gyldigeKommunenummer[int(85.0)] = 542.0
gyldigeKommunenummer[int(86.0)] = 543.0
gyldigeKommunenummer[int(87.0)] = 544.0
gyldigeKommunenummer[int(88.0)] = 545.0
gyldigeKommunenummer[int(89.0)] = 602.0
gyldigeKommunenummer[int(90.0)] = 604.0
gyldigeKommunenummer[int(91.0)] = 605.0
gyldigeKommunenummer[int(92.0)] = 612.0
gyldigeKommunenummer[int(93.0)] = 615.0
gyldigeKommunenummer[int(94.0)] = 616.0
gyldigeKommunenummer[int(95.0)] = 617.0
gyldigeKommunenummer[int(96.0)] = 618.0
gyldigeKommunenummer[int(97.0)] = 619.0
gyldigeKommunenummer[int(98.0)] = 620.0
gyldigeKommunenummer[int(99.0)] = 621.0
gyldigeKommunenummer[int(100.0)] = 622.0
gyldigeKommunenummer[int(101.0)] = 623.0
gyldigeKommunenummer[int(102.0)] = 624.0
gyldigeKommunenummer[int(103.0)] = 625.0
gyldigeKommunenummer[int(104.0)] = 626.0
gyldigeKommunenummer[int(105.0)] = 627.0
gyldigeKommunenummer[int(106.0)] = 628.0
gyldigeKommunenummer[int(107.0)] = 631.0
gyldigeKommunenummer[int(108.0)] = 632.0
gyldigeKommunenummer[int(109.0)] = 633.0
gyldigeKommunenummer[int(110.0)] = 701.0
gyldigeKommunenummer[int(111.0)] = 702.0
gyldigeKommunenummer[int(112.0)] = 704.0
gyldigeKommunenummer[int(113.0)] = 709.0
gyldigeKommunenummer[int(114.0)] = 710.0
gyldigeKommunenummer[int(115.0)] = 711.0
gyldigeKommunenummer[int(116.0)] = 713.0
gyldigeKommunenummer[int(117.0)] = 714.0
gyldigeKommunenummer[int(118.0)] = 716.0
gyldigeKommunenummer[int(119.0)] = 722.0
gyldigeKommunenummer[int(120.0)] = 723.0
gyldigeKommunenummer[int(121.0)] = 728.0
gyldigeKommunenummer[int(122.0)] = 805.0
gyldigeKommunenummer[int(123.0)] = 806.0
gyldigeKommunenummer[int(124.0)] = 807.0
gyldigeKommunenummer[int(125.0)] = 811.0
gyldigeKommunenummer[int(126.0)] = 814.0
gyldigeKommunenummer[int(127.0)] = 815.0
gyldigeKommunenummer[int(128.0)] = 817.0
gyldigeKommunenummer[int(129.0)] = 819.0
gyldigeKommunenummer[int(130.0)] = 821.0
gyldigeKommunenummer[int(131.0)] = 822.0
gyldigeKommunenummer[int(132.0)] = 826.0
gyldigeKommunenummer[int(133.0)] = 827.0
gyldigeKommunenummer[int(134.0)] = 828.0
gyldigeKommunenummer[int(135.0)] = 829.0
gyldigeKommunenummer[int(136.0)] = 830.0
gyldigeKommunenummer[int(137.0)] = 831.0
gyldigeKommunenummer[int(138.0)] = 833.0
gyldigeKommunenummer[int(139.0)] = 834.0
gyldigeKommunenummer[int(140.0)] = 901.0
gyldigeKommunenummer[int(141.0)] = 904.0
gyldigeKommunenummer[int(142.0)] = 906.0
gyldigeKommunenummer[int(143.0)] = 911.0
gyldigeKommunenummer[int(144.0)] = 912.0
gyldigeKommunenummer[int(145.0)] = 914.0
gyldigeKommunenummer[int(146.0)] = 919.0
gyldigeKommunenummer[int(147.0)] = 926.0
gyldigeKommunenummer[int(148.0)] = 928.0
gyldigeKommunenummer[int(149.0)] = 929.0
gyldigeKommunenummer[int(150.0)] = 935.0
gyldigeKommunenummer[int(151.0)] = 937.0
gyldigeKommunenummer[int(152.0)] = 938.0
gyldigeKommunenummer[int(153.0)] = 940.0
gyldigeKommunenummer[int(154.0)] = 941.0
gyldigeKommunenummer[int(155.0)] = 1001.0
gyldigeKommunenummer[int(156.0)] = 1002.0
gyldigeKommunenummer[int(157.0)] = 1003.0
gyldigeKommunenummer[int(158.0)] = 1004.0
gyldigeKommunenummer[int(159.0)] = 1014.0
gyldigeKommunenummer[int(160.0)] = 1017.0
gyldigeKommunenummer[int(161.0)] = 1018.0
gyldigeKommunenummer[int(162.0)] = 1021.0
gyldigeKommunenummer[int(163.0)] = 1026.0
gyldigeKommunenummer[int(164.0)] = 1027.0
gyldigeKommunenummer[int(165.0)] = 1029.0
gyldigeKommunenummer[int(166.0)] = 1032.0
gyldigeKommunenummer[int(167.0)] = 1034.0
gyldigeKommunenummer[int(168.0)] = 1037.0
gyldigeKommunenummer[int(169.0)] = 1046.0
gyldigeKommunenummer[int(170.0)] = 1101.0
gyldigeKommunenummer[int(171.0)] = 1102.0
gyldigeKommunenummer[int(172.0)] = 1103.0
gyldigeKommunenummer[int(173.0)] = 1106.0
gyldigeKommunenummer[int(174.0)] = 1111.0
gyldigeKommunenummer[int(175.0)] = 1112.0
gyldigeKommunenummer[int(176.0)] = 1114.0
gyldigeKommunenummer[int(177.0)] = 1119.0
gyldigeKommunenummer[int(178.0)] = 1120.0
gyldigeKommunenummer[int(179.0)] = 1121.0
gyldigeKommunenummer[int(180.0)] = 1122.0
gyldigeKommunenummer[int(181.0)] = 1124.0
gyldigeKommunenummer[int(182.0)] = 1127.0
gyldigeKommunenummer[int(183.0)] = 1129.0
gyldigeKommunenummer[int(184.0)] = 1130.0
gyldigeKommunenummer[int(185.0)] = 1133.0
gyldigeKommunenummer[int(186.0)] = 1134.0
gyldigeKommunenummer[int(187.0)] = 1135.0
gyldigeKommunenummer[int(188.0)] = 1141.0
gyldigeKommunenummer[int(189.0)] = 1142.0
gyldigeKommunenummer[int(190.0)] = 1144.0
gyldigeKommunenummer[int(191.0)] = 1145.0
gyldigeKommunenummer[int(192.0)] = 1146.0
gyldigeKommunenummer[int(193.0)] = 1149.0
gyldigeKommunenummer[int(194.0)] = 1151.0
gyldigeKommunenummer[int(195.0)] = 1160.0
gyldigeKommunenummer[int(196.0)] = 1201.0
gyldigeKommunenummer[int(197.0)] = 1211.0
gyldigeKommunenummer[int(198.0)] = 1216.0
gyldigeKommunenummer[int(199.0)] = 1219.0
gyldigeKommunenummer[int(200.0)] = 1221.0
gyldigeKommunenummer[int(201.0)] = 1222.0
gyldigeKommunenummer[int(202.0)] = 1223.0
gyldigeKommunenummer[int(203.0)] = 1224.0
gyldigeKommunenummer[int(204.0)] = 1227.0
gyldigeKommunenummer[int(205.0)] = 1228.0
gyldigeKommunenummer[int(206.0)] = 1231.0
gyldigeKommunenummer[int(207.0)] = 1232.0
gyldigeKommunenummer[int(208.0)] = 1233.0
gyldigeKommunenummer[int(209.0)] = 1234.0
gyldigeKommunenummer[int(210.0)] = 1235.0
gyldigeKommunenummer[int(211.0)] = 1238.0
gyldigeKommunenummer[int(212.0)] = 1241.0
gyldigeKommunenummer[int(213.0)] = 1242.0
gyldigeKommunenummer[int(214.0)] = 1243.0
gyldigeKommunenummer[int(215.0)] = 1244.0
gyldigeKommunenummer[int(216.0)] = 1245.0
gyldigeKommunenummer[int(217.0)] = 1246.0
gyldigeKommunenummer[int(218.0)] = 1247.0
gyldigeKommunenummer[int(219.0)] = 1251.0
gyldigeKommunenummer[int(220.0)] = 1252.0
gyldigeKommunenummer[int(221.0)] = 1253.0
gyldigeKommunenummer[int(222.0)] = 1256.0
gyldigeKommunenummer[int(223.0)] = 1259.0
gyldigeKommunenummer[int(224.0)] = 1260.0
gyldigeKommunenummer[int(225.0)] = 1263.0
gyldigeKommunenummer[int(226.0)] = 1264.0
gyldigeKommunenummer[int(227.0)] = 1265.0
gyldigeKommunenummer[int(228.0)] = 1266.0
gyldigeKommunenummer[int(229.0)] = 1401.0
gyldigeKommunenummer[int(230.0)] = 1411.0
gyldigeKommunenummer[int(231.0)] = 1412.0
gyldigeKommunenummer[int(232.0)] = 1413.0
gyldigeKommunenummer[int(233.0)] = 1416.0
gyldigeKommunenummer[int(234.0)] = 1417.0
gyldigeKommunenummer[int(235.0)] = 1418.0
gyldigeKommunenummer[int(236.0)] = 1419.0
gyldigeKommunenummer[int(237.0)] = 1420.0
gyldigeKommunenummer[int(238.0)] = 1421.0
gyldigeKommunenummer[int(239.0)] = 1422.0
gyldigeKommunenummer[int(240.0)] = 1424.0
gyldigeKommunenummer[int(241.0)] = 1426.0
gyldigeKommunenummer[int(242.0)] = 1428.0
gyldigeKommunenummer[int(243.0)] = 1429.0
gyldigeKommunenummer[int(244.0)] = 1430.0
gyldigeKommunenummer[int(245.0)] = 1431.0
gyldigeKommunenummer[int(246.0)] = 1432.0
gyldigeKommunenummer[int(247.0)] = 1433.0
gyldigeKommunenummer[int(248.0)] = 1438.0
gyldigeKommunenummer[int(249.0)] = 1439.0
gyldigeKommunenummer[int(250.0)] = 1441.0
gyldigeKommunenummer[int(251.0)] = 1443.0
gyldigeKommunenummer[int(252.0)] = 1444.0
gyldigeKommunenummer[int(253.0)] = 1445.0
gyldigeKommunenummer[int(254.0)] = 1449.0
gyldigeKommunenummer[int(255.0)] = 1502.0
gyldigeKommunenummer[int(256.0)] = 1504.0
gyldigeKommunenummer[int(257.0)] = 1505.0
gyldigeKommunenummer[int(258.0)] = 1511.0
gyldigeKommunenummer[int(259.0)] = 1514.0
gyldigeKommunenummer[int(260.0)] = 1515.0
gyldigeKommunenummer[int(261.0)] = 1516.0
gyldigeKommunenummer[int(262.0)] = 1517.0
gyldigeKommunenummer[int(263.0)] = 1519.0
gyldigeKommunenummer[int(264.0)] = 1520.0
gyldigeKommunenummer[int(265.0)] = 1523.0
gyldigeKommunenummer[int(266.0)] = 1524.0
gyldigeKommunenummer[int(267.0)] = 1525.0
gyldigeKommunenummer[int(268.0)] = 1526.0
gyldigeKommunenummer[int(269.0)] = 1528.0
gyldigeKommunenummer[int(270.0)] = 1529.0
gyldigeKommunenummer[int(271.0)] = 1531.0
gyldigeKommunenummer[int(272.0)] = 1532.0
gyldigeKommunenummer[int(273.0)] = 1534.0
gyldigeKommunenummer[int(274.0)] = 1535.0
gyldigeKommunenummer[int(275.0)] = 1539.0
gyldigeKommunenummer[int(276.0)] = 1543.0
gyldigeKommunenummer[int(277.0)] = 1545.0
gyldigeKommunenummer[int(278.0)] = 1546.0
gyldigeKommunenummer[int(279.0)] = 1547.0
gyldigeKommunenummer[int(280.0)] = 1548.0
gyldigeKommunenummer[int(281.0)] = 1551.0
gyldigeKommunenummer[int(282.0)] = 1554.0
gyldigeKommunenummer[int(283.0)] = 1557.0
gyldigeKommunenummer[int(284.0)] = 1560.0
gyldigeKommunenummer[int(285.0)] = 1563.0
gyldigeKommunenummer[int(286.0)] = 1566.0
gyldigeKommunenummer[int(287.0)] = 1567.0
gyldigeKommunenummer[int(288.0)] = 1571.0
gyldigeKommunenummer[int(289.0)] = 1573.0
gyldigeKommunenummer[int(290.0)] = 1576.0
gyldigeKommunenummer[int(291.0)] = 1601.0
gyldigeKommunenummer[int(292.0)] = 1612.0
gyldigeKommunenummer[int(293.0)] = 1613.0
gyldigeKommunenummer[int(294.0)] = 1617.0
gyldigeKommunenummer[int(295.0)] = 1620.0
gyldigeKommunenummer[int(296.0)] = 1621.0
gyldigeKommunenummer[int(297.0)] = 1622.0
gyldigeKommunenummer[int(298.0)] = 1624.0
gyldigeKommunenummer[int(299.0)] = 1627.0
gyldigeKommunenummer[int(300.0)] = 1630.0
gyldigeKommunenummer[int(301.0)] = 1632.0
gyldigeKommunenummer[int(302.0)] = 1633.0
gyldigeKommunenummer[int(303.0)] = 1634.0
gyldigeKommunenummer[int(304.0)] = 1635.0
gyldigeKommunenummer[int(305.0)] = 1636.0
gyldigeKommunenummer[int(306.0)] = 1638.0
gyldigeKommunenummer[int(307.0)] = 1640.0
gyldigeKommunenummer[int(308.0)] = 1644.0
gyldigeKommunenummer[int(309.0)] = 1648.0
gyldigeKommunenummer[int(310.0)] = 1653.0
gyldigeKommunenummer[int(311.0)] = 1657.0
gyldigeKommunenummer[int(312.0)] = 1662.0
gyldigeKommunenummer[int(313.0)] = 1663.0
gyldigeKommunenummer[int(314.0)] = 1664.0
gyldigeKommunenummer[int(315.0)] = 1665.0
gyldigeKommunenummer[int(316.0)] = 1702.0
gyldigeKommunenummer[int(317.0)] = 1703.0
gyldigeKommunenummer[int(318.0)] = 1711.0
gyldigeKommunenummer[int(319.0)] = 1714.0
gyldigeKommunenummer[int(320.0)] = 1717.0
gyldigeKommunenummer[int(321.0)] = 1718.0
gyldigeKommunenummer[int(322.0)] = 1719.0
gyldigeKommunenummer[int(323.0)] = 1721.0
gyldigeKommunenummer[int(324.0)] = 1724.0
gyldigeKommunenummer[int(325.0)] = 1725.0
gyldigeKommunenummer[int(326.0)] = 1736.0
gyldigeKommunenummer[int(327.0)] = 1738.0
gyldigeKommunenummer[int(328.0)] = 1739.0
gyldigeKommunenummer[int(329.0)] = 1740.0
gyldigeKommunenummer[int(330.0)] = 1742.0
gyldigeKommunenummer[int(331.0)] = 1743.0
gyldigeKommunenummer[int(332.0)] = 1744.0
gyldigeKommunenummer[int(333.0)] = 1748.0
gyldigeKommunenummer[int(334.0)] = 1749.0
gyldigeKommunenummer[int(335.0)] = 1750.0
gyldigeKommunenummer[int(336.0)] = 1751.0
gyldigeKommunenummer[int(337.0)] = 1755.0
gyldigeKommunenummer[int(338.0)] = 1756.0
gyldigeKommunenummer[int(339.0)] = 1804.0
gyldigeKommunenummer[int(340.0)] = 1805.0
gyldigeKommunenummer[int(341.0)] = 1811.0
gyldigeKommunenummer[int(342.0)] = 1812.0
gyldigeKommunenummer[int(343.0)] = 1813.0
gyldigeKommunenummer[int(344.0)] = 1815.0
gyldigeKommunenummer[int(345.0)] = 1816.0
gyldigeKommunenummer[int(346.0)] = 1818.0
gyldigeKommunenummer[int(347.0)] = 1820.0
gyldigeKommunenummer[int(348.0)] = 1822.0
gyldigeKommunenummer[int(349.0)] = 1824.0
gyldigeKommunenummer[int(350.0)] = 1825.0
gyldigeKommunenummer[int(351.0)] = 1826.0
gyldigeKommunenummer[int(352.0)] = 1827.0
gyldigeKommunenummer[int(353.0)] = 1828.0
gyldigeKommunenummer[int(354.0)] = 1832.0
gyldigeKommunenummer[int(355.0)] = 1833.0
gyldigeKommunenummer[int(356.0)] = 1834.0
gyldigeKommunenummer[int(357.0)] = 1835.0
gyldigeKommunenummer[int(358.0)] = 1836.0
gyldigeKommunenummer[int(359.0)] = 1837.0
gyldigeKommunenummer[int(360.0)] = 1838.0
gyldigeKommunenummer[int(361.0)] = 1839.0
gyldigeKommunenummer[int(362.0)] = 1840.0
gyldigeKommunenummer[int(363.0)] = 1841.0
gyldigeKommunenummer[int(364.0)] = 1845.0
gyldigeKommunenummer[int(365.0)] = 1848.0
gyldigeKommunenummer[int(366.0)] = 1849.0
gyldigeKommunenummer[int(367.0)] = 1850.0
gyldigeKommunenummer[int(368.0)] = 1851.0
gyldigeKommunenummer[int(369.0)] = 1852.0
gyldigeKommunenummer[int(370.0)] = 1853.0
gyldigeKommunenummer[int(371.0)] = 1854.0
gyldigeKommunenummer[int(372.0)] = 1856.0
gyldigeKommunenummer[int(373.0)] = 1857.0
gyldigeKommunenummer[int(374.0)] = 1859.0
gyldigeKommunenummer[int(375.0)] = 1860.0
gyldigeKommunenummer[int(376.0)] = 1865.0
gyldigeKommunenummer[int(377.0)] = 1866.0
gyldigeKommunenummer[int(378.0)] = 1867.0
gyldigeKommunenummer[int(379.0)] = 1868.0
gyldigeKommunenummer[int(380.0)] = 1870.0
gyldigeKommunenummer[int(381.0)] = 1871.0
gyldigeKommunenummer[int(382.0)] = 1874.0
gyldigeKommunenummer[int(383.0)] = 1903.0
gyldigeKommunenummer[int(384.0)] = 1902.0
gyldigeKommunenummer[int(385.0)] = 1911.0
gyldigeKommunenummer[int(386.0)] = 1913.0
gyldigeKommunenummer[int(387.0)] = 1917.0
gyldigeKommunenummer[int(388.0)] = 1919.0
gyldigeKommunenummer[int(389.0)] = 1920.0
gyldigeKommunenummer[int(390.0)] = 1922.0
gyldigeKommunenummer[int(391.0)] = 1923.0
gyldigeKommunenummer[int(392.0)] = 1924.0
gyldigeKommunenummer[int(393.0)] = 1925.0
gyldigeKommunenummer[int(394.0)] = 1926.0
gyldigeKommunenummer[int(395.0)] = 1927.0
gyldigeKommunenummer[int(396.0)] = 1928.0
gyldigeKommunenummer[int(397.0)] = 1929.0
gyldigeKommunenummer[int(398.0)] = 1931.0
gyldigeKommunenummer[int(399.0)] = 1933.0
gyldigeKommunenummer[int(400.0)] = 1936.0
gyldigeKommunenummer[int(401.0)] = 1938.0
gyldigeKommunenummer[int(402.0)] = 1939.0
gyldigeKommunenummer[int(403.0)] = 1940.0
gyldigeKommunenummer[int(404.0)] = 1941.0
gyldigeKommunenummer[int(405.0)] = 1942.0
gyldigeKommunenummer[int(406.0)] = 1943.0
gyldigeKommunenummer[int(407.0)] = 2002.0
gyldigeKommunenummer[int(408.0)] = 2003.0
gyldigeKommunenummer[int(409.0)] = 2004.0
gyldigeKommunenummer[int(410.0)] = 2011.0
gyldigeKommunenummer[int(411.0)] = 2012.0
gyldigeKommunenummer[int(412.0)] = 2014.0
gyldigeKommunenummer[int(413.0)] = 2015.0
gyldigeKommunenummer[int(414.0)] = 2017.0
gyldigeKommunenummer[int(415.0)] = 2018.0
gyldigeKommunenummer[int(416.0)] = 2019.0
gyldigeKommunenummer[int(417.0)] = 2020.0
gyldigeKommunenummer[int(418.0)] = 2021.0
gyldigeKommunenummer[int(419.0)] = 2022.0
gyldigeKommunenummer[int(420.0)] = 2023.0
gyldigeKommunenummer[int(421.0)] = 2024.0
gyldigeKommunenummer[int(422.0)] = 2025.0
gyldigeKommunenummer[int(423.0)] = 2027.0
gyldigeKommunenummer[int(424.0)] = 2028.0
gyldigeKommunenummer[int(425.0)] = 2030.0
return gyldigeKommunenummer
def test1():
doubleReference = DoubleReference()
doubleReference.doubleValue = 0.0
success = Success()
kommuenavn = hentKommunenavnFraNummer("1640", success)
assertStringEquals(kommuenavn, "R\u00f8ros", doubleReference)
assertTrue(success.success, doubleReference)
return doubleReference.doubleValue
def test():
failures = 0.0
failures = failures + test1()
return failures
class StringToDecimalResult:
result = None
feilmelding = None
success = None
def decimalToString(decimal):
return numberToString(decimal, 10.0)
def numberToString(decimal, base):
string = [None]*int(0.0)
digits = getDigits(base)
# Find digitPosition:
digitPosition = getDigitPosition(decimal, base)
decimal = round(decimal*base**( -digitPosition + digits - 1.0))
hasPrintedPoint = False
# Print leading zeros.
if digitPosition < 0.0:
string = appendCharacter(string, '0')
string = appendCharacter(string, '.')
hasPrintedPoint = True
i = 0.0
while i < -digitPosition - 1.0:
string = appendCharacter(string, '0')
i = i + 1.0
# Print number.
i = 0.0
while i < digits:
d = floor(decimal/base**(digits - i - 1.0))
if not hasPrintedPoint and digitPosition - i + 1.0 == 0.0:
if decimal != 0.0:
string = appendCharacter(string, '.')
hasPrintedPoint = True
if decimal == 0.0 and hasPrintedPoint:
pass
else:
string = appendCharacter(string, getSingleDigitFromNumber(d, base))
decimal = decimal - d*base**(digits - i - 1.0)
i = i + 1.0
# Print trailing zeros.
i = 0.0
while i < digitPosition - digits + 1.0:
string = appendCharacter(string, '0')
i = i + 1.0
# Done
return string
def getDigits(base):
t = 10.0**15.0
return floor(log10(t)/log10(base))
def getDigitPosition(decimal, base):
power = ceil(log10(decimal)/log10(base))
t = decimal*base**( -power )
if t < base and t >= 1.0:
pass
elif t >= base:
power = power + 1.0
elif t < 1.0:
power = power - 1.0
return power
def getSingleDigitFromNumber(c, base):
numberTable = getNumberTable()
if c > base - 1.0:
retc = '?'
else:
retc = numberTable[int(c)]
return retc
def getNumberTable():
numberTable = [None]*int(16.0)
numberTable[int(0.0)] = '0'
numberTable[int(1.0)] = '1'
numberTable[int(2.0)] = '2'
numberTable[int(3.0)] = '3'
numberTable[int(4.0)] = '4'
numberTable[int(5.0)] = '5'
numberTable[int(6.0)] = '6'
numberTable[int(7.0)] = '7'
numberTable[int(8.0)] = '8'
numberTable[int(9.0)] = '9'
numberTable[int(10.0)] = 'A'
numberTable[int(11.0)] = 'B'
numberTable[int(12.0)] = 'C'
numberTable[int(13.0)] = 'D'
numberTable[int(14.0)] = 'E'
numberTable[int(15.0)] = 'F'
return numberTable
def stringToDecimal(string):
return stringToDecimalForBase(string, 10.0)
def stringToDecimalForBase(string, base):
stringToDecimalResult = StringToDecimalResult()
stringToDecimalResult.success = True
i = 0.0
isPositive = True
beforeDecimalPoint = 0.0
afterDecimalPoint = 0.0
n = 0.0
validCharacters = 0.0
if base >= 2.0 and base <= 16.0:
j = 0.0
while j < len(string):
c = string[int(j)]
if isNumber(c, base) or c == '.' or c == '-':
validCharacters = validCharacters + 1.0
j = j + 1.0
if validCharacters == len(string):
if len(string) > 0.0:
c = string[int(i)]
if c == '-':
isPositive = False
i = i + 1.0
if i < len(string):
c = string[int(i)]
if isNumber(c, base):
while isNumber(c, base) and (i < len(string)):
beforeDecimalPoint = beforeDecimalPoint + 1.0
i = i + 1.0
if i < len(string):
c = string[int(i)]
if i < len(string):
c = string[int(i)]
if c == '.':
i = i + 1.0
if i < len(string):
c = string[int(i)]
while isNumber(c, base) and (i < len(string)):
afterDecimalPoint = afterDecimalPoint + 1.0
i = i + 1.0
if i < len(string):
c = string[int(i)]
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Number must have digits after the decimal point."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Number must start with digits (for negative numbers, after the optional negative sign)."
if stringToDecimalResult.success != False:
i = 0.0
if not isPositive :
i = 1.0
j = 0.0
while j < beforeDecimalPoint:
c = string[int(i)]
i = i + 1.0
d = getDecimalFromSingleDecimalDigit(c, base)
n = n + d*base**(beforeDecimalPoint - j - 1.0)
j = j + 1.0
if afterDecimalPoint > 0.0:
i = i + 1.0
j = 0.0
while j < afterDecimalPoint:
c = string[int(i)]
i = i + 1.0
d = getDecimalFromSingleDecimalDigit(c, base)
n = n + d*base**(0.0 - j - 1.0)
j = j + 1.0
if not isPositive :
n = -n
stringToDecimalResult.result = n
stringToDecimalResult.success = True
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "String has no content."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "String contains invalid character."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Base must be from 2 to 16."
return stringToDecimalResult
def getDecimalFromSingleDecimalDigit(c, base):
numberTable = getNumberTable()
position = 0.0
i = 0.0
while i < base:
if numberTable[int(i)] == c:
position = i
i = i + 1.0
return position
def isNumber(c, base):
numberTable = getNumberTable()
found = False
i = 0.0
while i < base:
if numberTable[int(i)] == c:
found = True
i = i + 1.0
return found
def assertFalse(b, failures):
if b:
failures.doubleValue = failures.doubleValue + 1.0
def assertTrue(b, failures):
if not b :
failures.doubleValue = failures.doubleValue + 1.0
def assertEquals(a, b, failures):
if a != b:
failures.doubleValue = failures.doubleValue + 1.0
def assertStringEquals(a, b, failures):
if not stringsEqual(a, b) :
failures.doubleValue = failures.doubleValue + 1.0
def stringToNumberArray(string):
array = [None]*int(len(string))
i = 0.0
while i < len(string):
array[int(i)] = ord(string[int(i)])
i = i + 1.0
return array
def numberArrayToString(array):
string = [None]*int(len(array))
i = 0.0
while i < len(array):
string[int(i)] = chr(int(array[int(i)]))
i = i + 1.0
return string
def stringsEqual(data1, data2):
equal = False
if len(data1) == len(data2):
nrEqual = 0.0
i = 0.0
while i < len(data1):
if data1[int(i)] == data2[int(i)]:
nrEqual = nrEqual + 1.0
i = i + 1.0
if nrEqual == len(data1):
equal = True
else:
equal = False
return equal
def numberArraysEqual(data1, data2):
equal = False
if len(data1) == len(data2):
nrEqual = 0.0
i = 0.0
while i < len(data1):
if data1[int(i)] == data2[int(i)]:
nrEqual = nrEqual + 1.0
i = i + 1.0
if nrEqual == len(data1):
equal = True
else:
equal = False
return equal
def substring(string, fromx, to):
n = [None]*int(to - fromx)
i = fromx
while i < to:
n[int(i - fromx)] = string[int(i)]
i = i + 1.0
return n
def appendString(string, s):
newString = [None]*int(len(string) + len(s))
i = 0.0
while i < len(string):
newString[int(i)] = string[int(i)]
i = i + 1.0
i = 0.0
while i < len(s):
newString[int(len(string) + i)] = s[int(i)]
i = i + 1.0
del(string)
return newString
def appendCharacter(string, c):
newString = [None]*int(len(string) + 1.0)
i = 0.0
while i < len(string):
newString[int(i)] = string[int(i)]
i = i + 1.0
newString[int(len(string))] = c
del(string)
return newString
def split(toSplit, splitBy):
splitt = [None]*int(0.0)
next = [None]*int(0.0)
i = 0.0
while i < len(toSplit):
c = toSplit[int(i)]
if c == splitBy:
n = StringReference()
n.string = next
splitt = addString(splitt, n)
next = [None]*int(0.0)
else:
next = appendCharacter(next, c)
i = i + 1.0
if len(next) > 0.0:
n = StringReference()
n.string = next
splitt = addString(splitt, n)
return splitt
class BooleanReference:
booleanValue = None
class DoubleReference:
doubleValue = None
class StringReference:
string = None
class DecimalListRef:
list = None
class StringListRef:
list = None
def addDecimal(list, a):
newlist = [None]*int(len(list) + 1.0)
i = 0.0
while i < len(list):
newlist[int(i)] = list[int(i)]
i = i + 1.0
newlist[int(len(list))] = a
del(list)
return newlist
def addDecimalRef(list, i):
list.list = addDecimal(list.list, i)
def removeDecimal(list, n):
newlist = [None]*int(len(list) - 1.0)
i = 0.0
while i < len(list):
if i < n:
newlist[int(i)] = list[int(i)]
if i > n:
newlist[int(i - 1.0)] = list[int(i)]
i = i + 1.0
del(list)
return newlist
def getDecimalRef(list, i):
return list.list[int(i)]
def removeDecimalRef(list, i):
list.list = removeDecimal(list.list, i)
def addString(list, a):
newlist = [None]*int(len(list) + 1.0)
i = 0.0
while i < len(list):
newlist[int(i)] = list[int(i)]
i = i + 1.0
newlist[int(len(list))] = a
del(list)
return newlist
def addStringRef(list, i):
list.list = addString(list.list, i)
def removeString(list, n):
newlist = [None]*int(len(list) - 1.0)
i = 0.0
while i < len(list):
if i < n:
newlist[int(i)] = list[int(i)]
if i > n:
newlist[int(i - 1.0)] = list[int(i)]
i = i + 1.0
del(list)
return newlist
def getStringRef(list, i):
return list.list[int(i)]
def removeStringRef(list, i):
list.list = removeString(list.list, i)
print(test()) | Python/kommunenummer.py | from math import *
class Success:
success = None
feilmelding = None
def createStringReference(string):
stringReference = StringReference()
stringReference.string = string
return stringReference
def lagGyldigReversTabell(nummerliste):
maxnummer = 0.0
i = 0.0
while i < len(nummerliste):
maxnummer = max(maxnummer,nummerliste[int(i)])
i = i + 1.0
inverse = [None]*int(maxnummer + 1.0)
i = 0.0
while i < maxnummer:
inverse[int(i)] = False
i = i + 1.0
i = 0.0
while i < len(nummerliste):
inverse[int(nummerliste[int(i)])] = True
i = i + 1.0
return inverse
def hentKommunenavnFraNummer(kommunenummer, success):
kommunenavn = [None]*int(0.0)
if erGyldigKommunenummer(kommunenummer):
kommunenavnListe = hentKommunenavn()
stringToDecimalResult = stringToDecimalForBase(kommunenummer, 10.0)
if stringToDecimalResult.success:
nr = stringToDecimalResult.result
kommunenavn = kommunenavnListe[int(nr)].string
success.success = True
else:
success.feilmelding = "Kommunenummer var ikke et gyldig tall."
success.success = False
else:
success.success = False
success.feilmelding = "Kommunenummer er ikke gyldig."
return kommunenavn
def erGyldigKommunenummer(kommunenummer):
if len(kommunenummer) == 4.0:
nr = stringToDecimalForBase(kommunenummer, 10.0).result
if nr >= 101.0 and nr <= 2030.0 and erHelttall(nr):
gyldigKommunetabell = hentGyldigKommunetabell()
gyldig = gyldigKommunetabell[int(nr)]
else:
gyldig = False
else:
gyldig = False
return gyldig
def erHelttall(nr):
return (nr % 1.0) == 0.0
def hentGyldigKommunetabell():
nummerliste = hentGyldigeKommunenummer()
return lagGyldigReversTabell(nummerliste)
def hentKommunenavn():
kommunenavn = [None]*int(2100.0)
kommunenavn[int(101.0)] = createStringReference("Halden")
kommunenavn[int(104.0)] = createStringReference("Moss")
kommunenavn[int(105.0)] = createStringReference("Sarpsborg")
kommunenavn[int(106.0)] = createStringReference("Fredrikstad")
kommunenavn[int(111.0)] = createStringReference("Hvaler")
kommunenavn[int(118.0)] = createStringReference("Aremark")
kommunenavn[int(119.0)] = createStringReference("Marker")
kommunenavn[int(121.0)] = createStringReference("R\u00f8mskog")
kommunenavn[int(122.0)] = createStringReference("Tr\u00f8gstad")
kommunenavn[int(123.0)] = createStringReference("Spydeberg")
kommunenavn[int(124.0)] = createStringReference("Askim")
kommunenavn[int(125.0)] = createStringReference("Eidsberg")
kommunenavn[int(127.0)] = createStringReference("Skiptvet")
kommunenavn[int(128.0)] = createStringReference("Rakkestad")
kommunenavn[int(135.0)] = createStringReference("R\u00e5de")
kommunenavn[int(136.0)] = createStringReference("Rygge")
kommunenavn[int(137.0)] = createStringReference("V\u00e5ler")
kommunenavn[int(138.0)] = createStringReference("Hob\u00f8l")
kommunenavn[int(211.0)] = createStringReference("Vestby")
kommunenavn[int(213.0)] = createStringReference("Ski")
kommunenavn[int(214.0)] = createStringReference("\u00c5s")
kommunenavn[int(215.0)] = createStringReference("Frogn")
kommunenavn[int(216.0)] = createStringReference("Nesodden")
kommunenavn[int(217.0)] = createStringReference("Oppeg\u00e5rd")
kommunenavn[int(219.0)] = createStringReference("B\u00e6rum")
kommunenavn[int(220.0)] = createStringReference("Asker")
kommunenavn[int(221.0)] = createStringReference("Aurskog-H\u00f8land")
kommunenavn[int(226.0)] = createStringReference("S\u00f8rum")
kommunenavn[int(227.0)] = createStringReference("Fet")
kommunenavn[int(228.0)] = createStringReference("R\u00e6lingen")
kommunenavn[int(229.0)] = createStringReference("Enebakk")
kommunenavn[int(230.0)] = createStringReference("L\u00f8renskog")
kommunenavn[int(231.0)] = createStringReference("Skedsmo")
kommunenavn[int(233.0)] = createStringReference("Nittedal")
kommunenavn[int(234.0)] = createStringReference("Gjerdrum")
kommunenavn[int(235.0)] = createStringReference("Ullensaker")
kommunenavn[int(236.0)] = createStringReference("Nes")
kommunenavn[int(237.0)] = createStringReference("Eidsvoll")
kommunenavn[int(238.0)] = createStringReference("Nannestad")
kommunenavn[int(239.0)] = createStringReference("Hurdal")
kommunenavn[int(301.0)] = createStringReference("Oslo")
kommunenavn[int(402.0)] = createStringReference("Kongsvinger")
kommunenavn[int(403.0)] = createStringReference("Hamar")
kommunenavn[int(412.0)] = createStringReference("Ringsaker")
kommunenavn[int(415.0)] = createStringReference("L\u00f8ten")
kommunenavn[int(417.0)] = createStringReference("Stange")
kommunenavn[int(418.0)] = createStringReference("Nord-Odal")
kommunenavn[int(419.0)] = createStringReference("S\u00f8r-Odal")
kommunenavn[int(420.0)] = createStringReference("Eidskog")
kommunenavn[int(423.0)] = createStringReference("Grue")
kommunenavn[int(425.0)] = createStringReference("\u00c5snes")
kommunenavn[int(426.0)] = createStringReference("V\u00e5ler")
kommunenavn[int(427.0)] = createStringReference("Elverum")
kommunenavn[int(428.0)] = createStringReference("Trysil")
kommunenavn[int(429.0)] = createStringReference("\u00c5mot")
kommunenavn[int(430.0)] = createStringReference("Stor-Elvdal")
kommunenavn[int(432.0)] = createStringReference("Rendalen")
kommunenavn[int(434.0)] = createStringReference("Engerdal")
kommunenavn[int(436.0)] = createStringReference("Tolga")
kommunenavn[int(437.0)] = createStringReference("Tynset")
kommunenavn[int(438.0)] = createStringReference("Alvdal")
kommunenavn[int(439.0)] = createStringReference("Folldal")
kommunenavn[int(441.0)] = createStringReference("Os")
kommunenavn[int(501.0)] = createStringReference("Lillehammer")
kommunenavn[int(502.0)] = createStringReference("Gj\u00f8vik")
kommunenavn[int(511.0)] = createStringReference("Dovre")
kommunenavn[int(512.0)] = createStringReference("Lesja")
kommunenavn[int(513.0)] = createStringReference("Skj\u00e5k")
kommunenavn[int(514.0)] = createStringReference("Lom")
kommunenavn[int(515.0)] = createStringReference("V\u00e5g\u00e5")
kommunenavn[int(516.0)] = createStringReference("Nord-Fron")
kommunenavn[int(517.0)] = createStringReference("Sel")
kommunenavn[int(519.0)] = createStringReference("S\u00f8r-Fron")
kommunenavn[int(520.0)] = createStringReference("Ringebu")
kommunenavn[int(521.0)] = createStringReference("\u00d8yer")
kommunenavn[int(522.0)] = createStringReference("Gausdal")
kommunenavn[int(528.0)] = createStringReference("\u00d<NAME>")
kommunenavn[int(529.0)] = createStringReference("<NAME>")
kommunenavn[int(532.0)] = createStringReference("Jevnaker")
kommunenavn[int(533.0)] = createStringReference("Lunner")
kommunenavn[int(534.0)] = createStringReference("Gran")
kommunenavn[int(536.0)] = createStringReference("S\u00f8ndre Land")
kommunenavn[int(538.0)] = createStringReference("<NAME>")
kommunenavn[int(540.0)] = createStringReference("S\u00f8r-Aurdal")
kommunenavn[int(541.0)] = createStringReference("Etnedal")
kommunenavn[int(542.0)] = createStringReference("Nord-Aurdal")
kommunenavn[int(543.0)] = createStringReference("<NAME>")
kommunenavn[int(544.0)] = createStringReference("\u00d8yst<NAME>")
kommunenavn[int(545.0)] = createStringReference("Vang")
kommunenavn[int(602.0)] = createStringReference("Drammen")
kommunenavn[int(604.0)] = createStringReference("Kongsberg")
kommunenavn[int(605.0)] = createStringReference("Ringerike")
kommunenavn[int(612.0)] = createStringReference("Hole")
kommunenavn[int(615.0)] = createStringReference("Fl\u00e5")
kommunenavn[int(616.0)] = createStringReference("Nes")
kommunenavn[int(617.0)] = createStringReference("Gol")
kommunenavn[int(618.0)] = createStringReference("Hemsedal")
kommunenavn[int(619.0)] = createStringReference("\u00c5l")
kommunenavn[int(620.0)] = createStringReference("Hol")
kommunenavn[int(621.0)] = createStringReference("Sigdal")
kommunenavn[int(622.0)] = createStringReference("Kr\u00f8dsherad")
kommunenavn[int(623.0)] = createStringReference("Modum")
kommunenavn[int(624.0)] = createStringReference("\u00d8vre Eiker")
kommunenavn[int(625.0)] = createStringReference("<NAME>")
kommunenavn[int(626.0)] = createStringReference("Lier")
kommunenavn[int(627.0)] = createStringReference("R\u00f8yken")
kommunenavn[int(628.0)] = createStringReference("Hurum")
kommunenavn[int(631.0)] = createStringReference("Flesberg")
kommunenavn[int(632.0)] = createStringReference("Rollag")
kommunenavn[int(633.0)] = createStringReference("<NAME>")
kommunenavn[int(701.0)] = createStringReference("Horten")
kommunenavn[int(702.0)] = createStringReference("Holmestrand")
kommunenavn[int(704.0)] = createStringReference("T\u00f8nsberg")
kommunenavn[int(709.0)] = createStringReference("Larvik")
kommunenavn[int(710.0)] = createStringReference("Sandefjord")
kommunenavn[int(711.0)] = createStringReference("Svelvik")
kommunenavn[int(713.0)] = createStringReference("Sande")
kommunenavn[int(714.0)] = createStringReference("Hof")
kommunenavn[int(716.0)] = createStringReference("Re")
kommunenavn[int(722.0)] = createStringReference("N\u00f8tter\u00f8y")
kommunenavn[int(723.0)] = createStringReference("Tj\u00f8me")
kommunenavn[int(728.0)] = createStringReference("Lardal")
kommunenavn[int(805.0)] = createStringReference("Porsgrunn")
kommunenavn[int(806.0)] = createStringReference("Skien")
kommunenavn[int(807.0)] = createStringReference("Notodden")
kommunenavn[int(811.0)] = createStringReference("Siljan")
kommunenavn[int(814.0)] = createStringReference("Bamble")
kommunenavn[int(815.0)] = createStringReference("Krager\u00f8")
kommunenavn[int(817.0)] = createStringReference("Drangedal")
kommunenavn[int(819.0)] = createStringReference("Nome")
kommunenavn[int(821.0)] = createStringReference("B\u00f8")
kommunenavn[int(822.0)] = createStringReference("Sauherad")
kommunenavn[int(826.0)] = createStringReference("Tinn")
kommunenavn[int(827.0)] = createStringReference("Hjartdal")
kommunenavn[int(828.0)] = createStringReference("Seljord")
kommunenavn[int(829.0)] = createStringReference("Kviteseid")
kommunenavn[int(830.0)] = createStringReference("Nissedal")
kommunenavn[int(831.0)] = createStringReference("Fyresdal")
kommunenavn[int(833.0)] = createStringReference("Tokke")
kommunenavn[int(834.0)] = createStringReference("Vinje")
kommunenavn[int(901.0)] = createStringReference("Ris\u00f8r")
kommunenavn[int(904.0)] = createStringReference("Grimstad")
kommunenavn[int(906.0)] = createStringReference("Arendal")
kommunenavn[int(911.0)] = createStringReference("Gjerstad")
kommunenavn[int(912.0)] = createStringReference("Veg\u00e5rshei")
kommunenavn[int(914.0)] = createStringReference("Tvedestrand")
kommunenavn[int(919.0)] = createStringReference("Froland")
kommunenavn[int(926.0)] = createStringReference("Lillesand")
kommunenavn[int(928.0)] = createStringReference("Birkenes")
kommunenavn[int(929.0)] = createStringReference("\u00c5mli")
kommunenavn[int(935.0)] = createStringReference("Iveland")
kommunenavn[int(937.0)] = createStringReference("<NAME>")
kommunenavn[int(938.0)] = createStringReference("Bygland")
kommunenavn[int(940.0)] = createStringReference("Valle")
kommunenavn[int(941.0)] = createStringReference("Bykle")
kommunenavn[int(1001.0)] = createStringReference("Kristiansand")
kommunenavn[int(1002.0)] = createStringReference("Mandal")
kommunenavn[int(1003.0)] = createStringReference("Farsund")
kommunenavn[int(1004.0)] = createStringReference("Flekkefjord")
kommunenavn[int(1014.0)] = createStringReference("Vennesla")
kommunenavn[int(1017.0)] = createStringReference("Songdalen")
kommunenavn[int(1018.0)] = createStringReference("S\u00f8gne")
kommunenavn[int(1021.0)] = createStringReference("Marnardal")
kommunenavn[int(1026.0)] = createStringReference("\u00c5seral")
kommunenavn[int(1027.0)] = createStringReference("Audnedal")
kommunenavn[int(1029.0)] = createStringReference("Lindesnes")
kommunenavn[int(1032.0)] = createStringReference("Lyngdal")
kommunenavn[int(1034.0)] = createStringReference("H\u00e6gebostad")
kommunenavn[int(1037.0)] = createStringReference("Kvinesdal")
kommunenavn[int(1046.0)] = createStringReference("Sirdal")
kommunenavn[int(1101.0)] = createStringReference("Eigersund")
kommunenavn[int(1102.0)] = createStringReference("Sandnes")
kommunenavn[int(1103.0)] = createStringReference("Stavanger")
kommunenavn[int(1106.0)] = createStringReference("Haugesund")
kommunenavn[int(1111.0)] = createStringReference("Sokndal")
kommunenavn[int(1112.0)] = createStringReference("Lund")
kommunenavn[int(1114.0)] = createStringReference("Bjerkreim")
kommunenavn[int(1119.0)] = createStringReference("H\u00e5")
kommunenavn[int(1120.0)] = createStringReference("Klepp")
kommunenavn[int(1121.0)] = createStringReference("Time")
kommunenavn[int(1122.0)] = createStringReference("Gjesdal")
kommunenavn[int(1124.0)] = createStringReference("Sola")
kommunenavn[int(1127.0)] = createStringReference("Randaberg")
kommunenavn[int(1129.0)] = createStringReference("Forsand")
kommunenavn[int(1130.0)] = createStringReference("Strand")
kommunenavn[int(1133.0)] = createStringReference("Hjelmeland")
kommunenavn[int(1134.0)] = createStringReference("Suldal")
kommunenavn[int(1135.0)] = createStringReference("Sauda")
kommunenavn[int(1141.0)] = createStringReference("Finn\u00f8y")
kommunenavn[int(1142.0)] = createStringReference("Rennes\u00f8y")
kommunenavn[int(1144.0)] = createStringReference("Kvits\u00f8y")
kommunenavn[int(1145.0)] = createStringReference("Bokn")
kommunenavn[int(1146.0)] = createStringReference("Tysv\u00e6r")
kommunenavn[int(1149.0)] = createStringReference("Karm\u00f8y")
kommunenavn[int(1151.0)] = createStringReference("Utsira")
kommunenavn[int(1160.0)] = createStringReference("Vindafjord")
kommunenavn[int(1201.0)] = createStringReference("Bergen")
kommunenavn[int(1211.0)] = createStringReference("Etne")
kommunenavn[int(1216.0)] = createStringReference("Sveio")
kommunenavn[int(1219.0)] = createStringReference("B\u00f8mlo")
kommunenavn[int(1221.0)] = createStringReference("Stord")
kommunenavn[int(1222.0)] = createStringReference("Fitjar")
kommunenavn[int(1223.0)] = createStringReference("Tysnes")
kommunenavn[int(1224.0)] = createStringReference("Kvinnherad")
kommunenavn[int(1227.0)] = createStringReference("Jondal")
kommunenavn[int(1228.0)] = createStringReference("Odda")
kommunenavn[int(1231.0)] = createStringReference("Ullensvang")
kommunenavn[int(1232.0)] = createStringReference("Eidfjord")
kommunenavn[int(1233.0)] = createStringReference("Ulvik")
kommunenavn[int(1234.0)] = createStringReference("Granvin")
kommunenavn[int(1235.0)] = createStringReference("Voss")
kommunenavn[int(1238.0)] = createStringReference("Kvam")
kommunenavn[int(1241.0)] = createStringReference("Fusa")
kommunenavn[int(1242.0)] = createStringReference("Samnanger")
kommunenavn[int(1243.0)] = createStringReference("Os")
kommunenavn[int(1244.0)] = createStringReference("Austevoll")
kommunenavn[int(1245.0)] = createStringReference("Sund")
kommunenavn[int(1246.0)] = createStringReference("Fjell")
kommunenavn[int(1247.0)] = createStringReference("Ask\u00f8y")
kommunenavn[int(1251.0)] = createStringReference("Vaksdal")
kommunenavn[int(1252.0)] = createStringReference("Modalen")
kommunenavn[int(1253.0)] = createStringReference("Oster\u00f8y")
kommunenavn[int(1256.0)] = createStringReference("Meland")
kommunenavn[int(1259.0)] = createStringReference("\u00d8ygarden")
kommunenavn[int(1260.0)] = createStringReference("Rad\u00f8y")
kommunenavn[int(1263.0)] = createStringReference("Lind\u00e5s")
kommunenavn[int(1264.0)] = createStringReference("Austrheim")
kommunenavn[int(1265.0)] = createStringReference("Fedje")
kommunenavn[int(1266.0)] = createStringReference("Masfjorden")
kommunenavn[int(1401.0)] = createStringReference("Flora")
kommunenavn[int(1411.0)] = createStringReference("Gulen")
kommunenavn[int(1412.0)] = createStringReference("Solund")
kommunenavn[int(1413.0)] = createStringReference("Hyllestad")
kommunenavn[int(1416.0)] = createStringReference("H\u00f8yanger")
kommunenavn[int(1417.0)] = createStringReference("Vik")
kommunenavn[int(1418.0)] = createStringReference("Balestrand")
kommunenavn[int(1419.0)] = createStringReference("Leikanger")
kommunenavn[int(1420.0)] = createStringReference("Sogndal")
kommunenavn[int(1421.0)] = createStringReference("Aurland")
kommunenavn[int(1422.0)] = createStringReference("L\u00e6rdal")
kommunenavn[int(1424.0)] = createStringReference("\u00c5rdal")
kommunenavn[int(1426.0)] = createStringReference("Luster")
kommunenavn[int(1428.0)] = createStringReference("Askvoll")
kommunenavn[int(1429.0)] = createStringReference("Fjaler")
kommunenavn[int(1430.0)] = createStringReference("Gaular")
kommunenavn[int(1431.0)] = createStringReference("J\u00f8lster")
kommunenavn[int(1432.0)] = createStringReference("F\u00f8rde")
kommunenavn[int(1433.0)] = createStringReference("Naustdal")
kommunenavn[int(1438.0)] = createStringReference("Bremanger")
kommunenavn[int(1439.0)] = createStringReference("V\u00e5gs\u00f8y")
kommunenavn[int(1441.0)] = createStringReference("Selje")
kommunenavn[int(1443.0)] = createStringReference("Eid")
kommunenavn[int(1444.0)] = createStringReference("Hornindal")
kommunenavn[int(1445.0)] = createStringReference("Gloppen")
kommunenavn[int(1449.0)] = createStringReference("Stryn")
kommunenavn[int(1502.0)] = createStringReference("Molde")
kommunenavn[int(1504.0)] = createStringReference("\u00c5lesund")
kommunenavn[int(1505.0)] = createStringReference("Kristiansund")
kommunenavn[int(1511.0)] = createStringReference("Vanylven")
kommunenavn[int(1514.0)] = createStringReference("Sande")
kommunenavn[int(1515.0)] = createStringReference("Her\u00f8y")
kommunenavn[int(1516.0)] = createStringReference("Ulstein")
kommunenavn[int(1517.0)] = createStringReference("Hareid")
kommunenavn[int(1519.0)] = createStringReference("Volda")
kommunenavn[int(1520.0)] = createStringReference("\u00d8rsta")
kommunenavn[int(1523.0)] = createStringReference("\u00d8rskog")
kommunenavn[int(1524.0)] = createStringReference("Norddal")
kommunenavn[int(1525.0)] = createStringReference("Stranda")
kommunenavn[int(1526.0)] = createStringReference("Stordal")
kommunenavn[int(1528.0)] = createStringReference("Sykkylven")
kommunenavn[int(1529.0)] = createStringReference("Skodje")
kommunenavn[int(1531.0)] = createStringReference("Sula")
kommunenavn[int(1532.0)] = createStringReference("Giske")
kommunenavn[int(1534.0)] = createStringReference("Haram")
kommunenavn[int(1535.0)] = createStringReference("Vestnes")
kommunenavn[int(1539.0)] = createStringReference("Rauma")
kommunenavn[int(1543.0)] = createStringReference("Nesset")
kommunenavn[int(1545.0)] = createStringReference("Midsund")
kommunenavn[int(1546.0)] = createStringReference("Sand\u00f8y")
kommunenavn[int(1547.0)] = createStringReference("Aukra")
kommunenavn[int(1548.0)] = createStringReference("Fr\u00e6na")
kommunenavn[int(1551.0)] = createStringReference("Eide")
kommunenavn[int(1554.0)] = createStringReference("Aver\u00f8y")
kommunenavn[int(1557.0)] = createStringReference("Gjemnes")
kommunenavn[int(1560.0)] = createStringReference("Tingvoll")
kommunenavn[int(1563.0)] = createStringReference("Sunndal")
kommunenavn[int(1566.0)] = createStringReference("Surnadal")
kommunenavn[int(1567.0)] = createStringReference("Rindal")
kommunenavn[int(1571.0)] = createStringReference("Halsa")
kommunenavn[int(1573.0)] = createStringReference("Sm\u00f8la")
kommunenavn[int(1576.0)] = createStringReference("Aure")
kommunenavn[int(1601.0)] = createStringReference("Trondheim")
kommunenavn[int(1612.0)] = createStringReference("Hemne")
kommunenavn[int(1613.0)] = createStringReference("Snillfjord")
kommunenavn[int(1617.0)] = createStringReference("Hitra")
kommunenavn[int(1620.0)] = createStringReference("Fr\u00f8ya")
kommunenavn[int(1621.0)] = createStringReference("\u00d8rland")
kommunenavn[int(1622.0)] = createStringReference("Agdenes")
kommunenavn[int(1624.0)] = createStringReference("Rissa")
kommunenavn[int(1627.0)] = createStringReference("Bjugn")
kommunenavn[int(1630.0)] = createStringReference("\u00c5fjord")
kommunenavn[int(1632.0)] = createStringReference("Roan")
kommunenavn[int(1633.0)] = createStringReference("Osen")
kommunenavn[int(1634.0)] = createStringReference("Oppdal")
kommunenavn[int(1635.0)] = createStringReference("Rennebu")
kommunenavn[int(1636.0)] = createStringReference("Meldal")
kommunenavn[int(1638.0)] = createStringReference("Orkdal")
kommunenavn[int(1640.0)] = createStringReference("R\u00f8ros")
kommunenavn[int(1644.0)] = createStringReference("Holt\u00e5len")
kommunenavn[int(1648.0)] = createStringReference("<NAME>")
kommunenavn[int(1653.0)] = createStringReference("Melhus")
kommunenavn[int(1657.0)] = createStringReference("Skaun")
kommunenavn[int(1662.0)] = createStringReference("Kl\u00e6bu")
kommunenavn[int(1663.0)] = createStringReference("Malvik")
kommunenavn[int(1664.0)] = createStringReference("Selbu")
kommunenavn[int(1665.0)] = createStringReference("Tydal")
kommunenavn[int(1702.0)] = createStringReference("Steinkjer")
kommunenavn[int(1703.0)] = createStringReference("Namsos")
kommunenavn[int(1711.0)] = createStringReference("Mer\u00e5ker")
kommunenavn[int(1714.0)] = createStringReference("Stj\u00f8rdal")
kommunenavn[int(1717.0)] = createStringReference("Frosta")
kommunenavn[int(1718.0)] = createStringReference("Leksvik")
kommunenavn[int(1719.0)] = createStringReference("Levanger")
kommunenavn[int(1721.0)] = createStringReference("Verdal")
kommunenavn[int(1724.0)] = createStringReference("Verran")
kommunenavn[int(1725.0)] = createStringReference("Namdalseid")
kommunenavn[int(1736.0)] = createStringReference("Sn\u00e5ase \u2013 Sn\u00e5sa")
kommunenavn[int(1738.0)] = createStringReference("Lierne")
kommunenavn[int(1739.0)] = createStringReference("Raarvihke \u2013 R\u00f8yrvik")
kommunenavn[int(1740.0)] = createStringReference("Namsskogan")
kommunenavn[int(1742.0)] = createStringReference("Grong")
kommunenavn[int(1743.0)] = createStringReference("H\u00f8ylandet")
kommunenavn[int(1744.0)] = createStringReference("Overhalla")
kommunenavn[int(1748.0)] = createStringReference("Fosnes")
kommunenavn[int(1749.0)] = createStringReference("Flatanger")
kommunenavn[int(1750.0)] = createStringReference("Vikna")
kommunenavn[int(1751.0)] = createStringReference("N\u00e6r\u00f8y")
kommunenavn[int(1755.0)] = createStringReference("Leka")
kommunenavn[int(1756.0)] = createStringReference("Inder\u00f8y")
kommunenavn[int(1804.0)] = createStringReference("Bod\u00f8")
kommunenavn[int(1805.0)] = createStringReference("Narvik")
kommunenavn[int(1811.0)] = createStringReference("Bindal")
kommunenavn[int(1812.0)] = createStringReference("S\u00f8mna")
kommunenavn[int(1813.0)] = createStringReference("Br\u00f8nn\u00f8y")
kommunenavn[int(1815.0)] = createStringReference("Vega")
kommunenavn[int(1816.0)] = createStringReference("Vevelstad")
kommunenavn[int(1818.0)] = createStringReference("Her\u00f8y")
kommunenavn[int(1820.0)] = createStringReference("Alstahaug")
kommunenavn[int(1822.0)] = createStringReference("Leirfjord")
kommunenavn[int(1824.0)] = createStringReference("Vefsn")
kommunenavn[int(1825.0)] = createStringReference("Grane")
kommunenavn[int(1826.0)] = createStringReference("Hattfjelldal")
kommunenavn[int(1827.0)] = createStringReference("D\u00f8nna")
kommunenavn[int(1828.0)] = createStringReference("Nesna")
kommunenavn[int(1832.0)] = createStringReference("Hemnes")
kommunenavn[int(1833.0)] = createStringReference("Rana")
kommunenavn[int(1834.0)] = createStringReference("Lur\u00f8y")
kommunenavn[int(1835.0)] = createStringReference("Tr\u00e6na")
kommunenavn[int(1836.0)] = createStringReference("R\u00f8d\u00f8y")
kommunenavn[int(1837.0)] = createStringReference("Mel\u00f8y")
kommunenavn[int(1838.0)] = createStringReference("Gildesk\u00e5l")
kommunenavn[int(1839.0)] = createStringReference("Beiarn")
kommunenavn[int(1840.0)] = createStringReference("Saltdal")
kommunenavn[int(1841.0)] = createStringReference("Fauske \u2013 Fuossko")
kommunenavn[int(1845.0)] = createStringReference("S\u00f8rfold")
kommunenavn[int(1848.0)] = createStringReference("Steigen")
kommunenavn[int(1849.0)] = createStringReference("Hamar\u00f8y \u2013 H\u00e1bmer")
kommunenavn[int(1850.0)] = createStringReference("Divtasvuodna \u2013 Tysfjord")
kommunenavn[int(1851.0)] = createStringReference("L\u00f8dingen")
kommunenavn[int(1852.0)] = createStringReference("Tjeldsund")
kommunenavn[int(1853.0)] = createStringReference("Evenes")
kommunenavn[int(1854.0)] = createStringReference("Ballangen")
kommunenavn[int(1856.0)] = createStringReference("R\u00f8st")
kommunenavn[int(1857.0)] = createStringReference("V\u00e6r\u00f8y")
kommunenavn[int(1859.0)] = createStringReference("Flakstad")
kommunenavn[int(1860.0)] = createStringReference("Vestv\u00e5g\u00f8y")
kommunenavn[int(1865.0)] = createStringReference("V\u00e5gan")
kommunenavn[int(1866.0)] = createStringReference("Hadsel")
kommunenavn[int(1867.0)] = createStringReference("B\u00f8")
kommunenavn[int(1868.0)] = createStringReference("\u00d8ksnes")
kommunenavn[int(1870.0)] = createStringReference("Sortland \u2013 Suort\u00e1")
kommunenavn[int(1871.0)] = createStringReference("And\u00f8y")
kommunenavn[int(1874.0)] = createStringReference("Moskenes")
kommunenavn[int(1903.0)] = createStringReference("Harstad \u2013 H\u00e1rstt\u00e1k")
kommunenavn[int(1902.0)] = createStringReference("Troms\u00f8")
kommunenavn[int(1911.0)] = createStringReference("Kv\u00e6fjord")
kommunenavn[int(1913.0)] = createStringReference("Sk\u00e5nland")
kommunenavn[int(1917.0)] = createStringReference("Ibestad")
kommunenavn[int(1919.0)] = createStringReference("Gratangen")
kommunenavn[int(1920.0)] = createStringReference("Loab\u00e1k \u2013 Lavangen")
kommunenavn[int(1922.0)] = createStringReference("Bardu")
kommunenavn[int(1923.0)] = createStringReference("Salangen")
kommunenavn[int(1924.0)] = createStringReference("M\u00e5lselv")
kommunenavn[int(1925.0)] = createStringReference("S\u00f8rreisa")
kommunenavn[int(1926.0)] = createStringReference("Dyr\u00f8y")
kommunenavn[int(1927.0)] = createStringReference("Tran\u00f8y")
kommunenavn[int(1928.0)] = createStringReference("Torsken")
kommunenavn[int(1929.0)] = createStringReference("Berg")
kommunenavn[int(1931.0)] = createStringReference("Lenvik")
kommunenavn[int(1933.0)] = createStringReference("Balsfjord")
kommunenavn[int(1936.0)] = createStringReference("Karls\u00f8y")
kommunenavn[int(1938.0)] = createStringReference("Lyngen")
kommunenavn[int(1939.0)] = createStringReference("Storfjord \u2013 Omasvuotna \u2013 Omasvuono")
kommunenavn[int(1940.0)] = createStringReference("G\u00e1ivuotna \u2013 K\u00e5fjord \u2013 Kaivuono")
kommunenavn[int(1941.0)] = createStringReference("Skjerv\u00f8y")
kommunenavn[int(1942.0)] = createStringReference("Nordreisa")
kommunenavn[int(1943.0)] = createStringReference("Kv\u00e6nangen")
kommunenavn[int(2002.0)] = createStringReference("Vard\u00f8")
kommunenavn[int(2003.0)] = createStringReference("Vads\u00f8")
kommunenavn[int(2004.0)] = createStringReference("Hammerfest")
kommunenavn[int(2011.0)] = createStringReference("Guovdageaidnu \u2013 Kautokeino")
kommunenavn[int(2012.0)] = createStringReference("Alta")
kommunenavn[int(2014.0)] = createStringReference("Loppa")
kommunenavn[int(2015.0)] = createStringReference("Hasvik")
kommunenavn[int(2017.0)] = createStringReference("Kvalsund")
kommunenavn[int(2018.0)] = createStringReference("M\u00e5s\u00f8y")
kommunenavn[int(2019.0)] = createStringReference("Nordkapp")
kommunenavn[int(2020.0)] = createStringReference("Porsanger \u2013 Pors\u00e1\u014bgu \u2013 Porsanki")
kommunenavn[int(2021.0)] = createStringReference("K\u00e1r\u00e1\u0161johka \u2013 Karasjok")
kommunenavn[int(2022.0)] = createStringReference("Lebesby")
kommunenavn[int(2023.0)] = createStringReference("Gamvik")
kommunenavn[int(2024.0)] = createStringReference("Berlev\u00e5g")
kommunenavn[int(2025.0)] = createStringReference("Deatnu \u2013 Tana")
kommunenavn[int(2027.0)] = createStringReference("Unj\u00e1rga \u2013 Nesseby")
kommunenavn[int(2028.0)] = createStringReference("B\u00e5tsfjord")
kommunenavn[int(2030.0)] = createStringReference("S\u00f8r-Varanger")
return kommunenavn
def hentGyldigeKommunenummer():
gyldigeKommunenummer = [None]*int(425.0 + 1.0)
gyldigeKommunenummer[int(0.0)] = 101.0
gyldigeKommunenummer[int(1.0)] = 104.0
gyldigeKommunenummer[int(2.0)] = 105.0
gyldigeKommunenummer[int(3.0)] = 106.0
gyldigeKommunenummer[int(4.0)] = 111.0
gyldigeKommunenummer[int(5.0)] = 118.0
gyldigeKommunenummer[int(6.0)] = 119.0
gyldigeKommunenummer[int(7.0)] = 121.0
gyldigeKommunenummer[int(8.0)] = 122.0
gyldigeKommunenummer[int(9.0)] = 123.0
gyldigeKommunenummer[int(10.0)] = 124.0
gyldigeKommunenummer[int(11.0)] = 125.0
gyldigeKommunenummer[int(12.0)] = 127.0
gyldigeKommunenummer[int(13.0)] = 128.0
gyldigeKommunenummer[int(14.0)] = 135.0
gyldigeKommunenummer[int(15.0)] = 136.0
gyldigeKommunenummer[int(16.0)] = 137.0
gyldigeKommunenummer[int(17.0)] = 138.0
gyldigeKommunenummer[int(18.0)] = 211.0
gyldigeKommunenummer[int(19.0)] = 213.0
gyldigeKommunenummer[int(20.0)] = 214.0
gyldigeKommunenummer[int(21.0)] = 215.0
gyldigeKommunenummer[int(22.0)] = 216.0
gyldigeKommunenummer[int(23.0)] = 217.0
gyldigeKommunenummer[int(24.0)] = 219.0
gyldigeKommunenummer[int(25.0)] = 220.0
gyldigeKommunenummer[int(26.0)] = 221.0
gyldigeKommunenummer[int(27.0)] = 226.0
gyldigeKommunenummer[int(28.0)] = 227.0
gyldigeKommunenummer[int(29.0)] = 228.0
gyldigeKommunenummer[int(30.0)] = 229.0
gyldigeKommunenummer[int(31.0)] = 230.0
gyldigeKommunenummer[int(32.0)] = 231.0
gyldigeKommunenummer[int(33.0)] = 233.0
gyldigeKommunenummer[int(34.0)] = 234.0
gyldigeKommunenummer[int(35.0)] = 235.0
gyldigeKommunenummer[int(36.0)] = 236.0
gyldigeKommunenummer[int(37.0)] = 237.0
gyldigeKommunenummer[int(38.0)] = 238.0
gyldigeKommunenummer[int(39.0)] = 239.0
gyldigeKommunenummer[int(40.0)] = 301.0
gyldigeKommunenummer[int(41.0)] = 402.0
gyldigeKommunenummer[int(42.0)] = 403.0
gyldigeKommunenummer[int(43.0)] = 412.0
gyldigeKommunenummer[int(44.0)] = 415.0
gyldigeKommunenummer[int(45.0)] = 417.0
gyldigeKommunenummer[int(46.0)] = 418.0
gyldigeKommunenummer[int(47.0)] = 419.0
gyldigeKommunenummer[int(48.0)] = 420.0
gyldigeKommunenummer[int(49.0)] = 423.0
gyldigeKommunenummer[int(50.0)] = 425.0
gyldigeKommunenummer[int(51.0)] = 426.0
gyldigeKommunenummer[int(52.0)] = 427.0
gyldigeKommunenummer[int(53.0)] = 428.0
gyldigeKommunenummer[int(54.0)] = 429.0
gyldigeKommunenummer[int(55.0)] = 430.0
gyldigeKommunenummer[int(56.0)] = 432.0
gyldigeKommunenummer[int(57.0)] = 434.0
gyldigeKommunenummer[int(58.0)] = 436.0
gyldigeKommunenummer[int(59.0)] = 437.0
gyldigeKommunenummer[int(60.0)] = 438.0
gyldigeKommunenummer[int(61.0)] = 439.0
gyldigeKommunenummer[int(62.0)] = 441.0
gyldigeKommunenummer[int(63.0)] = 501.0
gyldigeKommunenummer[int(64.0)] = 502.0
gyldigeKommunenummer[int(65.0)] = 511.0
gyldigeKommunenummer[int(66.0)] = 512.0
gyldigeKommunenummer[int(67.0)] = 513.0
gyldigeKommunenummer[int(68.0)] = 514.0
gyldigeKommunenummer[int(69.0)] = 515.0
gyldigeKommunenummer[int(70.0)] = 516.0
gyldigeKommunenummer[int(71.0)] = 517.0
gyldigeKommunenummer[int(72.0)] = 519.0
gyldigeKommunenummer[int(73.0)] = 520.0
gyldigeKommunenummer[int(74.0)] = 521.0
gyldigeKommunenummer[int(75.0)] = 522.0
gyldigeKommunenummer[int(76.0)] = 528.0
gyldigeKommunenummer[int(77.0)] = 529.0
gyldigeKommunenummer[int(78.0)] = 532.0
gyldigeKommunenummer[int(79.0)] = 533.0
gyldigeKommunenummer[int(80.0)] = 534.0
gyldigeKommunenummer[int(81.0)] = 536.0
gyldigeKommunenummer[int(82.0)] = 538.0
gyldigeKommunenummer[int(83.0)] = 540.0
gyldigeKommunenummer[int(84.0)] = 541.0
gyldigeKommunenummer[int(85.0)] = 542.0
gyldigeKommunenummer[int(86.0)] = 543.0
gyldigeKommunenummer[int(87.0)] = 544.0
gyldigeKommunenummer[int(88.0)] = 545.0
gyldigeKommunenummer[int(89.0)] = 602.0
gyldigeKommunenummer[int(90.0)] = 604.0
gyldigeKommunenummer[int(91.0)] = 605.0
gyldigeKommunenummer[int(92.0)] = 612.0
gyldigeKommunenummer[int(93.0)] = 615.0
gyldigeKommunenummer[int(94.0)] = 616.0
gyldigeKommunenummer[int(95.0)] = 617.0
gyldigeKommunenummer[int(96.0)] = 618.0
gyldigeKommunenummer[int(97.0)] = 619.0
gyldigeKommunenummer[int(98.0)] = 620.0
gyldigeKommunenummer[int(99.0)] = 621.0
gyldigeKommunenummer[int(100.0)] = 622.0
gyldigeKommunenummer[int(101.0)] = 623.0
gyldigeKommunenummer[int(102.0)] = 624.0
gyldigeKommunenummer[int(103.0)] = 625.0
gyldigeKommunenummer[int(104.0)] = 626.0
gyldigeKommunenummer[int(105.0)] = 627.0
gyldigeKommunenummer[int(106.0)] = 628.0
gyldigeKommunenummer[int(107.0)] = 631.0
gyldigeKommunenummer[int(108.0)] = 632.0
gyldigeKommunenummer[int(109.0)] = 633.0
gyldigeKommunenummer[int(110.0)] = 701.0
gyldigeKommunenummer[int(111.0)] = 702.0
gyldigeKommunenummer[int(112.0)] = 704.0
gyldigeKommunenummer[int(113.0)] = 709.0
gyldigeKommunenummer[int(114.0)] = 710.0
gyldigeKommunenummer[int(115.0)] = 711.0
gyldigeKommunenummer[int(116.0)] = 713.0
gyldigeKommunenummer[int(117.0)] = 714.0
gyldigeKommunenummer[int(118.0)] = 716.0
gyldigeKommunenummer[int(119.0)] = 722.0
gyldigeKommunenummer[int(120.0)] = 723.0
gyldigeKommunenummer[int(121.0)] = 728.0
gyldigeKommunenummer[int(122.0)] = 805.0
gyldigeKommunenummer[int(123.0)] = 806.0
gyldigeKommunenummer[int(124.0)] = 807.0
gyldigeKommunenummer[int(125.0)] = 811.0
gyldigeKommunenummer[int(126.0)] = 814.0
gyldigeKommunenummer[int(127.0)] = 815.0
gyldigeKommunenummer[int(128.0)] = 817.0
gyldigeKommunenummer[int(129.0)] = 819.0
gyldigeKommunenummer[int(130.0)] = 821.0
gyldigeKommunenummer[int(131.0)] = 822.0
gyldigeKommunenummer[int(132.0)] = 826.0
gyldigeKommunenummer[int(133.0)] = 827.0
gyldigeKommunenummer[int(134.0)] = 828.0
gyldigeKommunenummer[int(135.0)] = 829.0
gyldigeKommunenummer[int(136.0)] = 830.0
gyldigeKommunenummer[int(137.0)] = 831.0
gyldigeKommunenummer[int(138.0)] = 833.0
gyldigeKommunenummer[int(139.0)] = 834.0
gyldigeKommunenummer[int(140.0)] = 901.0
gyldigeKommunenummer[int(141.0)] = 904.0
gyldigeKommunenummer[int(142.0)] = 906.0
gyldigeKommunenummer[int(143.0)] = 911.0
gyldigeKommunenummer[int(144.0)] = 912.0
gyldigeKommunenummer[int(145.0)] = 914.0
gyldigeKommunenummer[int(146.0)] = 919.0
gyldigeKommunenummer[int(147.0)] = 926.0
gyldigeKommunenummer[int(148.0)] = 928.0
gyldigeKommunenummer[int(149.0)] = 929.0
gyldigeKommunenummer[int(150.0)] = 935.0
gyldigeKommunenummer[int(151.0)] = 937.0
gyldigeKommunenummer[int(152.0)] = 938.0
gyldigeKommunenummer[int(153.0)] = 940.0
gyldigeKommunenummer[int(154.0)] = 941.0
gyldigeKommunenummer[int(155.0)] = 1001.0
gyldigeKommunenummer[int(156.0)] = 1002.0
gyldigeKommunenummer[int(157.0)] = 1003.0
gyldigeKommunenummer[int(158.0)] = 1004.0
gyldigeKommunenummer[int(159.0)] = 1014.0
gyldigeKommunenummer[int(160.0)] = 1017.0
gyldigeKommunenummer[int(161.0)] = 1018.0
gyldigeKommunenummer[int(162.0)] = 1021.0
gyldigeKommunenummer[int(163.0)] = 1026.0
gyldigeKommunenummer[int(164.0)] = 1027.0
gyldigeKommunenummer[int(165.0)] = 1029.0
gyldigeKommunenummer[int(166.0)] = 1032.0
gyldigeKommunenummer[int(167.0)] = 1034.0
gyldigeKommunenummer[int(168.0)] = 1037.0
gyldigeKommunenummer[int(169.0)] = 1046.0
gyldigeKommunenummer[int(170.0)] = 1101.0
gyldigeKommunenummer[int(171.0)] = 1102.0
gyldigeKommunenummer[int(172.0)] = 1103.0
gyldigeKommunenummer[int(173.0)] = 1106.0
gyldigeKommunenummer[int(174.0)] = 1111.0
gyldigeKommunenummer[int(175.0)] = 1112.0
gyldigeKommunenummer[int(176.0)] = 1114.0
gyldigeKommunenummer[int(177.0)] = 1119.0
gyldigeKommunenummer[int(178.0)] = 1120.0
gyldigeKommunenummer[int(179.0)] = 1121.0
gyldigeKommunenummer[int(180.0)] = 1122.0
gyldigeKommunenummer[int(181.0)] = 1124.0
gyldigeKommunenummer[int(182.0)] = 1127.0
gyldigeKommunenummer[int(183.0)] = 1129.0
gyldigeKommunenummer[int(184.0)] = 1130.0
gyldigeKommunenummer[int(185.0)] = 1133.0
gyldigeKommunenummer[int(186.0)] = 1134.0
gyldigeKommunenummer[int(187.0)] = 1135.0
gyldigeKommunenummer[int(188.0)] = 1141.0
gyldigeKommunenummer[int(189.0)] = 1142.0
gyldigeKommunenummer[int(190.0)] = 1144.0
gyldigeKommunenummer[int(191.0)] = 1145.0
gyldigeKommunenummer[int(192.0)] = 1146.0
gyldigeKommunenummer[int(193.0)] = 1149.0
gyldigeKommunenummer[int(194.0)] = 1151.0
gyldigeKommunenummer[int(195.0)] = 1160.0
gyldigeKommunenummer[int(196.0)] = 1201.0
gyldigeKommunenummer[int(197.0)] = 1211.0
gyldigeKommunenummer[int(198.0)] = 1216.0
gyldigeKommunenummer[int(199.0)] = 1219.0
gyldigeKommunenummer[int(200.0)] = 1221.0
gyldigeKommunenummer[int(201.0)] = 1222.0
gyldigeKommunenummer[int(202.0)] = 1223.0
gyldigeKommunenummer[int(203.0)] = 1224.0
gyldigeKommunenummer[int(204.0)] = 1227.0
gyldigeKommunenummer[int(205.0)] = 1228.0
gyldigeKommunenummer[int(206.0)] = 1231.0
gyldigeKommunenummer[int(207.0)] = 1232.0
gyldigeKommunenummer[int(208.0)] = 1233.0
gyldigeKommunenummer[int(209.0)] = 1234.0
gyldigeKommunenummer[int(210.0)] = 1235.0
gyldigeKommunenummer[int(211.0)] = 1238.0
gyldigeKommunenummer[int(212.0)] = 1241.0
gyldigeKommunenummer[int(213.0)] = 1242.0
gyldigeKommunenummer[int(214.0)] = 1243.0
gyldigeKommunenummer[int(215.0)] = 1244.0
gyldigeKommunenummer[int(216.0)] = 1245.0
gyldigeKommunenummer[int(217.0)] = 1246.0
gyldigeKommunenummer[int(218.0)] = 1247.0
gyldigeKommunenummer[int(219.0)] = 1251.0
gyldigeKommunenummer[int(220.0)] = 1252.0
gyldigeKommunenummer[int(221.0)] = 1253.0
gyldigeKommunenummer[int(222.0)] = 1256.0
gyldigeKommunenummer[int(223.0)] = 1259.0
gyldigeKommunenummer[int(224.0)] = 1260.0
gyldigeKommunenummer[int(225.0)] = 1263.0
gyldigeKommunenummer[int(226.0)] = 1264.0
gyldigeKommunenummer[int(227.0)] = 1265.0
gyldigeKommunenummer[int(228.0)] = 1266.0
gyldigeKommunenummer[int(229.0)] = 1401.0
gyldigeKommunenummer[int(230.0)] = 1411.0
gyldigeKommunenummer[int(231.0)] = 1412.0
gyldigeKommunenummer[int(232.0)] = 1413.0
gyldigeKommunenummer[int(233.0)] = 1416.0
gyldigeKommunenummer[int(234.0)] = 1417.0
gyldigeKommunenummer[int(235.0)] = 1418.0
gyldigeKommunenummer[int(236.0)] = 1419.0
gyldigeKommunenummer[int(237.0)] = 1420.0
gyldigeKommunenummer[int(238.0)] = 1421.0
gyldigeKommunenummer[int(239.0)] = 1422.0
gyldigeKommunenummer[int(240.0)] = 1424.0
gyldigeKommunenummer[int(241.0)] = 1426.0
gyldigeKommunenummer[int(242.0)] = 1428.0
gyldigeKommunenummer[int(243.0)] = 1429.0
gyldigeKommunenummer[int(244.0)] = 1430.0
gyldigeKommunenummer[int(245.0)] = 1431.0
gyldigeKommunenummer[int(246.0)] = 1432.0
gyldigeKommunenummer[int(247.0)] = 1433.0
gyldigeKommunenummer[int(248.0)] = 1438.0
gyldigeKommunenummer[int(249.0)] = 1439.0
gyldigeKommunenummer[int(250.0)] = 1441.0
gyldigeKommunenummer[int(251.0)] = 1443.0
gyldigeKommunenummer[int(252.0)] = 1444.0
gyldigeKommunenummer[int(253.0)] = 1445.0
gyldigeKommunenummer[int(254.0)] = 1449.0
gyldigeKommunenummer[int(255.0)] = 1502.0
gyldigeKommunenummer[int(256.0)] = 1504.0
gyldigeKommunenummer[int(257.0)] = 1505.0
gyldigeKommunenummer[int(258.0)] = 1511.0
gyldigeKommunenummer[int(259.0)] = 1514.0
gyldigeKommunenummer[int(260.0)] = 1515.0
gyldigeKommunenummer[int(261.0)] = 1516.0
gyldigeKommunenummer[int(262.0)] = 1517.0
gyldigeKommunenummer[int(263.0)] = 1519.0
gyldigeKommunenummer[int(264.0)] = 1520.0
gyldigeKommunenummer[int(265.0)] = 1523.0
gyldigeKommunenummer[int(266.0)] = 1524.0
gyldigeKommunenummer[int(267.0)] = 1525.0
gyldigeKommunenummer[int(268.0)] = 1526.0
gyldigeKommunenummer[int(269.0)] = 1528.0
gyldigeKommunenummer[int(270.0)] = 1529.0
gyldigeKommunenummer[int(271.0)] = 1531.0
gyldigeKommunenummer[int(272.0)] = 1532.0
gyldigeKommunenummer[int(273.0)] = 1534.0
gyldigeKommunenummer[int(274.0)] = 1535.0
gyldigeKommunenummer[int(275.0)] = 1539.0
gyldigeKommunenummer[int(276.0)] = 1543.0
gyldigeKommunenummer[int(277.0)] = 1545.0
gyldigeKommunenummer[int(278.0)] = 1546.0
gyldigeKommunenummer[int(279.0)] = 1547.0
gyldigeKommunenummer[int(280.0)] = 1548.0
gyldigeKommunenummer[int(281.0)] = 1551.0
gyldigeKommunenummer[int(282.0)] = 1554.0
gyldigeKommunenummer[int(283.0)] = 1557.0
gyldigeKommunenummer[int(284.0)] = 1560.0
gyldigeKommunenummer[int(285.0)] = 1563.0
gyldigeKommunenummer[int(286.0)] = 1566.0
gyldigeKommunenummer[int(287.0)] = 1567.0
gyldigeKommunenummer[int(288.0)] = 1571.0
gyldigeKommunenummer[int(289.0)] = 1573.0
gyldigeKommunenummer[int(290.0)] = 1576.0
gyldigeKommunenummer[int(291.0)] = 1601.0
gyldigeKommunenummer[int(292.0)] = 1612.0
gyldigeKommunenummer[int(293.0)] = 1613.0
gyldigeKommunenummer[int(294.0)] = 1617.0
gyldigeKommunenummer[int(295.0)] = 1620.0
gyldigeKommunenummer[int(296.0)] = 1621.0
gyldigeKommunenummer[int(297.0)] = 1622.0
gyldigeKommunenummer[int(298.0)] = 1624.0
gyldigeKommunenummer[int(299.0)] = 1627.0
gyldigeKommunenummer[int(300.0)] = 1630.0
gyldigeKommunenummer[int(301.0)] = 1632.0
gyldigeKommunenummer[int(302.0)] = 1633.0
gyldigeKommunenummer[int(303.0)] = 1634.0
gyldigeKommunenummer[int(304.0)] = 1635.0
gyldigeKommunenummer[int(305.0)] = 1636.0
gyldigeKommunenummer[int(306.0)] = 1638.0
gyldigeKommunenummer[int(307.0)] = 1640.0
gyldigeKommunenummer[int(308.0)] = 1644.0
gyldigeKommunenummer[int(309.0)] = 1648.0
gyldigeKommunenummer[int(310.0)] = 1653.0
gyldigeKommunenummer[int(311.0)] = 1657.0
gyldigeKommunenummer[int(312.0)] = 1662.0
gyldigeKommunenummer[int(313.0)] = 1663.0
gyldigeKommunenummer[int(314.0)] = 1664.0
gyldigeKommunenummer[int(315.0)] = 1665.0
gyldigeKommunenummer[int(316.0)] = 1702.0
gyldigeKommunenummer[int(317.0)] = 1703.0
gyldigeKommunenummer[int(318.0)] = 1711.0
gyldigeKommunenummer[int(319.0)] = 1714.0
gyldigeKommunenummer[int(320.0)] = 1717.0
gyldigeKommunenummer[int(321.0)] = 1718.0
gyldigeKommunenummer[int(322.0)] = 1719.0
gyldigeKommunenummer[int(323.0)] = 1721.0
gyldigeKommunenummer[int(324.0)] = 1724.0
gyldigeKommunenummer[int(325.0)] = 1725.0
gyldigeKommunenummer[int(326.0)] = 1736.0
gyldigeKommunenummer[int(327.0)] = 1738.0
gyldigeKommunenummer[int(328.0)] = 1739.0
gyldigeKommunenummer[int(329.0)] = 1740.0
gyldigeKommunenummer[int(330.0)] = 1742.0
gyldigeKommunenummer[int(331.0)] = 1743.0
gyldigeKommunenummer[int(332.0)] = 1744.0
gyldigeKommunenummer[int(333.0)] = 1748.0
gyldigeKommunenummer[int(334.0)] = 1749.0
gyldigeKommunenummer[int(335.0)] = 1750.0
gyldigeKommunenummer[int(336.0)] = 1751.0
gyldigeKommunenummer[int(337.0)] = 1755.0
gyldigeKommunenummer[int(338.0)] = 1756.0
gyldigeKommunenummer[int(339.0)] = 1804.0
gyldigeKommunenummer[int(340.0)] = 1805.0
gyldigeKommunenummer[int(341.0)] = 1811.0
gyldigeKommunenummer[int(342.0)] = 1812.0
gyldigeKommunenummer[int(343.0)] = 1813.0
gyldigeKommunenummer[int(344.0)] = 1815.0
gyldigeKommunenummer[int(345.0)] = 1816.0
gyldigeKommunenummer[int(346.0)] = 1818.0
gyldigeKommunenummer[int(347.0)] = 1820.0
gyldigeKommunenummer[int(348.0)] = 1822.0
gyldigeKommunenummer[int(349.0)] = 1824.0
gyldigeKommunenummer[int(350.0)] = 1825.0
gyldigeKommunenummer[int(351.0)] = 1826.0
gyldigeKommunenummer[int(352.0)] = 1827.0
gyldigeKommunenummer[int(353.0)] = 1828.0
gyldigeKommunenummer[int(354.0)] = 1832.0
gyldigeKommunenummer[int(355.0)] = 1833.0
gyldigeKommunenummer[int(356.0)] = 1834.0
gyldigeKommunenummer[int(357.0)] = 1835.0
gyldigeKommunenummer[int(358.0)] = 1836.0
gyldigeKommunenummer[int(359.0)] = 1837.0
gyldigeKommunenummer[int(360.0)] = 1838.0
gyldigeKommunenummer[int(361.0)] = 1839.0
gyldigeKommunenummer[int(362.0)] = 1840.0
gyldigeKommunenummer[int(363.0)] = 1841.0
gyldigeKommunenummer[int(364.0)] = 1845.0
gyldigeKommunenummer[int(365.0)] = 1848.0
gyldigeKommunenummer[int(366.0)] = 1849.0
gyldigeKommunenummer[int(367.0)] = 1850.0
gyldigeKommunenummer[int(368.0)] = 1851.0
gyldigeKommunenummer[int(369.0)] = 1852.0
gyldigeKommunenummer[int(370.0)] = 1853.0
gyldigeKommunenummer[int(371.0)] = 1854.0
gyldigeKommunenummer[int(372.0)] = 1856.0
gyldigeKommunenummer[int(373.0)] = 1857.0
gyldigeKommunenummer[int(374.0)] = 1859.0
gyldigeKommunenummer[int(375.0)] = 1860.0
gyldigeKommunenummer[int(376.0)] = 1865.0
gyldigeKommunenummer[int(377.0)] = 1866.0
gyldigeKommunenummer[int(378.0)] = 1867.0
gyldigeKommunenummer[int(379.0)] = 1868.0
gyldigeKommunenummer[int(380.0)] = 1870.0
gyldigeKommunenummer[int(381.0)] = 1871.0
gyldigeKommunenummer[int(382.0)] = 1874.0
gyldigeKommunenummer[int(383.0)] = 1903.0
gyldigeKommunenummer[int(384.0)] = 1902.0
gyldigeKommunenummer[int(385.0)] = 1911.0
gyldigeKommunenummer[int(386.0)] = 1913.0
gyldigeKommunenummer[int(387.0)] = 1917.0
gyldigeKommunenummer[int(388.0)] = 1919.0
gyldigeKommunenummer[int(389.0)] = 1920.0
gyldigeKommunenummer[int(390.0)] = 1922.0
gyldigeKommunenummer[int(391.0)] = 1923.0
gyldigeKommunenummer[int(392.0)] = 1924.0
gyldigeKommunenummer[int(393.0)] = 1925.0
gyldigeKommunenummer[int(394.0)] = 1926.0
gyldigeKommunenummer[int(395.0)] = 1927.0
gyldigeKommunenummer[int(396.0)] = 1928.0
gyldigeKommunenummer[int(397.0)] = 1929.0
gyldigeKommunenummer[int(398.0)] = 1931.0
gyldigeKommunenummer[int(399.0)] = 1933.0
gyldigeKommunenummer[int(400.0)] = 1936.0
gyldigeKommunenummer[int(401.0)] = 1938.0
gyldigeKommunenummer[int(402.0)] = 1939.0
gyldigeKommunenummer[int(403.0)] = 1940.0
gyldigeKommunenummer[int(404.0)] = 1941.0
gyldigeKommunenummer[int(405.0)] = 1942.0
gyldigeKommunenummer[int(406.0)] = 1943.0
gyldigeKommunenummer[int(407.0)] = 2002.0
gyldigeKommunenummer[int(408.0)] = 2003.0
gyldigeKommunenummer[int(409.0)] = 2004.0
gyldigeKommunenummer[int(410.0)] = 2011.0
gyldigeKommunenummer[int(411.0)] = 2012.0
gyldigeKommunenummer[int(412.0)] = 2014.0
gyldigeKommunenummer[int(413.0)] = 2015.0
gyldigeKommunenummer[int(414.0)] = 2017.0
gyldigeKommunenummer[int(415.0)] = 2018.0
gyldigeKommunenummer[int(416.0)] = 2019.0
gyldigeKommunenummer[int(417.0)] = 2020.0
gyldigeKommunenummer[int(418.0)] = 2021.0
gyldigeKommunenummer[int(419.0)] = 2022.0
gyldigeKommunenummer[int(420.0)] = 2023.0
gyldigeKommunenummer[int(421.0)] = 2024.0
gyldigeKommunenummer[int(422.0)] = 2025.0
gyldigeKommunenummer[int(423.0)] = 2027.0
gyldigeKommunenummer[int(424.0)] = 2028.0
gyldigeKommunenummer[int(425.0)] = 2030.0
return gyldigeKommunenummer
def test1():
doubleReference = DoubleReference()
doubleReference.doubleValue = 0.0
success = Success()
kommuenavn = hentKommunenavnFraNummer("1640", success)
assertStringEquals(kommuenavn, "R\u00f8ros", doubleReference)
assertTrue(success.success, doubleReference)
return doubleReference.doubleValue
def test():
failures = 0.0
failures = failures + test1()
return failures
class StringToDecimalResult:
result = None
feilmelding = None
success = None
def decimalToString(decimal):
return numberToString(decimal, 10.0)
def numberToString(decimal, base):
string = [None]*int(0.0)
digits = getDigits(base)
# Find digitPosition:
digitPosition = getDigitPosition(decimal, base)
decimal = round(decimal*base**( -digitPosition + digits - 1.0))
hasPrintedPoint = False
# Print leading zeros.
if digitPosition < 0.0:
string = appendCharacter(string, '0')
string = appendCharacter(string, '.')
hasPrintedPoint = True
i = 0.0
while i < -digitPosition - 1.0:
string = appendCharacter(string, '0')
i = i + 1.0
# Print number.
i = 0.0
while i < digits:
d = floor(decimal/base**(digits - i - 1.0))
if not hasPrintedPoint and digitPosition - i + 1.0 == 0.0:
if decimal != 0.0:
string = appendCharacter(string, '.')
hasPrintedPoint = True
if decimal == 0.0 and hasPrintedPoint:
pass
else:
string = appendCharacter(string, getSingleDigitFromNumber(d, base))
decimal = decimal - d*base**(digits - i - 1.0)
i = i + 1.0
# Print trailing zeros.
i = 0.0
while i < digitPosition - digits + 1.0:
string = appendCharacter(string, '0')
i = i + 1.0
# Done
return string
def getDigits(base):
t = 10.0**15.0
return floor(log10(t)/log10(base))
def getDigitPosition(decimal, base):
power = ceil(log10(decimal)/log10(base))
t = decimal*base**( -power )
if t < base and t >= 1.0:
pass
elif t >= base:
power = power + 1.0
elif t < 1.0:
power = power - 1.0
return power
def getSingleDigitFromNumber(c, base):
numberTable = getNumberTable()
if c > base - 1.0:
retc = '?'
else:
retc = numberTable[int(c)]
return retc
def getNumberTable():
numberTable = [None]*int(16.0)
numberTable[int(0.0)] = '0'
numberTable[int(1.0)] = '1'
numberTable[int(2.0)] = '2'
numberTable[int(3.0)] = '3'
numberTable[int(4.0)] = '4'
numberTable[int(5.0)] = '5'
numberTable[int(6.0)] = '6'
numberTable[int(7.0)] = '7'
numberTable[int(8.0)] = '8'
numberTable[int(9.0)] = '9'
numberTable[int(10.0)] = 'A'
numberTable[int(11.0)] = 'B'
numberTable[int(12.0)] = 'C'
numberTable[int(13.0)] = 'D'
numberTable[int(14.0)] = 'E'
numberTable[int(15.0)] = 'F'
return numberTable
def stringToDecimal(string):
return stringToDecimalForBase(string, 10.0)
def stringToDecimalForBase(string, base):
stringToDecimalResult = StringToDecimalResult()
stringToDecimalResult.success = True
i = 0.0
isPositive = True
beforeDecimalPoint = 0.0
afterDecimalPoint = 0.0
n = 0.0
validCharacters = 0.0
if base >= 2.0 and base <= 16.0:
j = 0.0
while j < len(string):
c = string[int(j)]
if isNumber(c, base) or c == '.' or c == '-':
validCharacters = validCharacters + 1.0
j = j + 1.0
if validCharacters == len(string):
if len(string) > 0.0:
c = string[int(i)]
if c == '-':
isPositive = False
i = i + 1.0
if i < len(string):
c = string[int(i)]
if isNumber(c, base):
while isNumber(c, base) and (i < len(string)):
beforeDecimalPoint = beforeDecimalPoint + 1.0
i = i + 1.0
if i < len(string):
c = string[int(i)]
if i < len(string):
c = string[int(i)]
if c == '.':
i = i + 1.0
if i < len(string):
c = string[int(i)]
while isNumber(c, base) and (i < len(string)):
afterDecimalPoint = afterDecimalPoint + 1.0
i = i + 1.0
if i < len(string):
c = string[int(i)]
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Number must have digits after the decimal point."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Number must start with digits (for negative numbers, after the optional negative sign)."
if stringToDecimalResult.success != False:
i = 0.0
if not isPositive :
i = 1.0
j = 0.0
while j < beforeDecimalPoint:
c = string[int(i)]
i = i + 1.0
d = getDecimalFromSingleDecimalDigit(c, base)
n = n + d*base**(beforeDecimalPoint - j - 1.0)
j = j + 1.0
if afterDecimalPoint > 0.0:
i = i + 1.0
j = 0.0
while j < afterDecimalPoint:
c = string[int(i)]
i = i + 1.0
d = getDecimalFromSingleDecimalDigit(c, base)
n = n + d*base**(0.0 - j - 1.0)
j = j + 1.0
if not isPositive :
n = -n
stringToDecimalResult.result = n
stringToDecimalResult.success = True
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "String has no content."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "String contains invalid character."
else:
stringToDecimalResult.success = False
stringToDecimalResult.feilmelding = "Base must be from 2 to 16."
return stringToDecimalResult
def getDecimalFromSingleDecimalDigit(c, base):
numberTable = getNumberTable()
position = 0.0
i = 0.0
while i < base:
if numberTable[int(i)] == c:
position = i
i = i + 1.0
return position
def isNumber(c, base):
numberTable = getNumberTable()
found = False
i = 0.0
while i < base:
if numberTable[int(i)] == c:
found = True
i = i + 1.0
return found
def assertFalse(b, failures):
if b:
failures.doubleValue = failures.doubleValue + 1.0
def assertTrue(b, failures):
if not b :
failures.doubleValue = failures.doubleValue + 1.0
def assertEquals(a, b, failures):
if a != b:
failures.doubleValue = failures.doubleValue + 1.0
def assertStringEquals(a, b, failures):
if not stringsEqual(a, b) :
failures.doubleValue = failures.doubleValue + 1.0
def stringToNumberArray(string):
array = [None]*int(len(string))
i = 0.0
while i < len(string):
array[int(i)] = ord(string[int(i)])
i = i + 1.0
return array
def numberArrayToString(array):
string = [None]*int(len(array))
i = 0.0
while i < len(array):
string[int(i)] = chr(int(array[int(i)]))
i = i + 1.0
return string
def stringsEqual(data1, data2):
equal = False
if len(data1) == len(data2):
nrEqual = 0.0
i = 0.0
while i < len(data1):
if data1[int(i)] == data2[int(i)]:
nrEqual = nrEqual + 1.0
i = i + 1.0
if nrEqual == len(data1):
equal = True
else:
equal = False
return equal
def numberArraysEqual(data1, data2):
equal = False
if len(data1) == len(data2):
nrEqual = 0.0
i = 0.0
while i < len(data1):
if data1[int(i)] == data2[int(i)]:
nrEqual = nrEqual + 1.0
i = i + 1.0
if nrEqual == len(data1):
equal = True
else:
equal = False
return equal
def substring(string, fromx, to):
n = [None]*int(to - fromx)
i = fromx
while i < to:
n[int(i - fromx)] = string[int(i)]
i = i + 1.0
return n
def appendString(string, s):
newString = [None]*int(len(string) + len(s))
i = 0.0
while i < len(string):
newString[int(i)] = string[int(i)]
i = i + 1.0
i = 0.0
while i < len(s):
newString[int(len(string) + i)] = s[int(i)]
i = i + 1.0
del(string)
return newString
def appendCharacter(string, c):
newString = [None]*int(len(string) + 1.0)
i = 0.0
while i < len(string):
newString[int(i)] = string[int(i)]
i = i + 1.0
newString[int(len(string))] = c
del(string)
return newString
def split(toSplit, splitBy):
splitt = [None]*int(0.0)
next = [None]*int(0.0)
i = 0.0
while i < len(toSplit):
c = toSplit[int(i)]
if c == splitBy:
n = StringReference()
n.string = next
splitt = addString(splitt, n)
next = [None]*int(0.0)
else:
next = appendCharacter(next, c)
i = i + 1.0
if len(next) > 0.0:
n = StringReference()
n.string = next
splitt = addString(splitt, n)
return splitt
class BooleanReference:
booleanValue = None
class DoubleReference:
doubleValue = None
class StringReference:
string = None
class DecimalListRef:
list = None
class StringListRef:
list = None
def addDecimal(list, a):
newlist = [None]*int(len(list) + 1.0)
i = 0.0
while i < len(list):
newlist[int(i)] = list[int(i)]
i = i + 1.0
newlist[int(len(list))] = a
del(list)
return newlist
def addDecimalRef(list, i):
list.list = addDecimal(list.list, i)
def removeDecimal(list, n):
newlist = [None]*int(len(list) - 1.0)
i = 0.0
while i < len(list):
if i < n:
newlist[int(i)] = list[int(i)]
if i > n:
newlist[int(i - 1.0)] = list[int(i)]
i = i + 1.0
del(list)
return newlist
def getDecimalRef(list, i):
return list.list[int(i)]
def removeDecimalRef(list, i):
list.list = removeDecimal(list.list, i)
def addString(list, a):
newlist = [None]*int(len(list) + 1.0)
i = 0.0
while i < len(list):
newlist[int(i)] = list[int(i)]
i = i + 1.0
newlist[int(len(list))] = a
del(list)
return newlist
def addStringRef(list, i):
list.list = addString(list.list, i)
def removeString(list, n):
newlist = [None]*int(len(list) - 1.0)
i = 0.0
while i < len(list):
if i < n:
newlist[int(i)] = list[int(i)]
if i > n:
newlist[int(i - 1.0)] = list[int(i)]
i = i + 1.0
del(list)
return newlist
def getStringRef(list, i):
return list.list[int(i)]
def removeStringRef(list, i):
list.list = removeString(list.list, i)
print(test()) | 0.323166 | 0.138229 |
import base64
from collections import namedtuple
from http.client import HTTPResponse
import random
from datetime import datetime
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.db.models import Count, Q
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import redirect, render
from django.views.generic import ListView
from guests import csv_import
from guests.invitation import INVITATION_TEMPLATE, guess_party_by_invite_id_or_404
from guests.models import Guest, MEALS, Party
from guests.save_the_date import get_save_the_date_context, send_save_the_date_email, SAVE_THE_DATE_TEMPLATE, \
SAVE_THE_DATE_CONTEXT_MAP
class GuestListView(ListView):
model = Guest
@login_required
def export_guests(request):
export = csv_import.export_guests()
response = HttpResponse(export.getvalue(), content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=all-guests.csv'
return response
@login_required
def dashboard(request):
parties_with_pending_invites = Party.objects.filter(is_attending=None).order_by('category', 'name')
attending_guests = Guest.objects.filter(is_attending=True)
guests_without_meals = attending_guests.filter(
Q(meal__isnull=True) | Q(meal='')
).order_by('first_name')
meal_breakdown = attending_guests.exclude(meal=None).values('meal').annotate(count=Count('*'))
return render(request, 'guests/dashboard.html', context={
'couple_name': settings.BRIDE_AND_GROOM,
'guests': Guest.objects.filter(is_attending=True).count(),
'not_coming_guests': Guest.objects.filter(is_attending=False).count(),
'guests_without_meals': guests_without_meals,
'meal_breakdown': meal_breakdown,
})
def rsvp(request):
context = {}
if request.method == 'GET':
return render(request, 'guests/rsvp.html', context)
elif request.method == 'POST':
template_name='guests/rsvp.html'
submited_full_name = request.POST['party'].lower()
if ' ' not in submited_full_name:
context['error'] = True
try:
first_name = submited_full_name.split(' ')[0]
last_name = submited_full_name.split(' ')[1]
except Exception:
context['error'] = True
if 'error' not in context:
guest = list(Guest.objects.filter(first_name__iexact=first_name, last_name__iexact=last_name))
if len(guest) > 0:
context['party'] = guest[0].party
context['meals'] = MEALS
template_name='guests/invitation.html'
else:
context['error'] = True
return render(request, template_name=template_name, context=context)
return HTTPResponse(405)
def invitation(request):
if request.method == 'POST':
for response in _parse_invite_params(request.POST):
guest = Guest.objects.get(pk=response.guest_pk)
party = guest.party
guest.is_attending = response.is_attending
guest.meal = response.meal
guest.save()
if request.POST.get('comments'):
comments = request.POST.get('comments')
party.comments = comments if not party.comments else '{}; {}'.format(party.comments, comments)
party.is_attending = party.any_guests_attending
party.save()
context = {
'party': party
}
return render(request, template_name='guests/rsvp_confirm.html', context=context)
return HttpResponse(404)
InviteResponse = namedtuple('InviteResponse', ['guest_pk', 'is_attending', 'meal'])
def _parse_invite_params(params):
responses = {}
for param, value in params.items():
if param.startswith('attending'):
pk = int(param.split('-')[-1])
response = responses.get(pk, {})
response['attending'] = True if value == 'yes' else False
responses[pk] = response
elif param.startswith('meal'):
pk = int(param.split('-')[-1])
response = responses.get(pk, {})
response['meal'] = value
responses[pk] = response
for pk, response in responses.items():
yield InviteResponse(pk, response['attending'], response.get('meal', None))
def rsvp_confirm(request, invite_id=None):
party = guess_party_by_invite_id_or_404(invite_id)
return render(request, template_name='guests/rsvp_confirmation.html', context={
'party': party,
'support_email': settings.DEFAULT_WEDDING_REPLY_EMAIL,
})
@login_required
def test_email(request, template_id):
context = get_save_the_date_context(template_id)
send_save_the_date_email(context, [settings.DEFAULT_WEDDING_TEST_EMAIL])
return HttpResponse('sent!')
def _base64_encode(filepath):
with open(filepath, "rb") as image_file:
return base64.b64encode(image_file.read())
@login_required
def guest_importer(request):
context = {}
guests = list(Guest.objects.all())
parties = list(Party.objects.all())
if request.method == 'GET':
context['guests'] = guests
context['parties'] = parties
return render(request, 'guests/importer.html', context=context)
if request.method == 'POST':
fguests = []
fparties = []
try:
csv_file = request.FILES["csv_file"]
if not csv_file.name.endswith('.csv'):
print('File is not CSV type')
return HttpResponse('SUCKS')
if csv_file.multiple_chunks():
print(r"Uploaded file is too big (%.2f MB)." % (csv_file.size/(1000*1000),))
return HttpResponse('SUCKS')
file_data = csv_file.read().decode("utf-8")
lines = file_data.split("\n")
for line in lines:
l = line.split(',')
party_name, first_name, last_name, party_type = l[:4]
party_t = party_type.split('\r')[0]
fguests.append(f'{first_name} {last_name}')
fparties.append(f'{party_name} {party_t}')
party = Party.objects.get_or_create(name=party_name)[0]
party.type = party_t
party.save()
guest = Guest.objects.get_or_create(party=party, first_name=first_name, last_name=last_name)[0]
guest.save()
except Exception as e:
context['error'] = f'top: {str(e)}'
try:
context['guests'] = fguests
context['parties'] = fparties
except Exception as e:
context['error'] = f'bottom: {str(e)}'
return render(request, 'guests/importer.html', context=context) | guests/views.py | import base64
from collections import namedtuple
from http.client import HTTPResponse
import random
from datetime import datetime
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.db.models import Count, Q
from django.http import HttpResponseRedirect, HttpResponse
from django.shortcuts import redirect, render
from django.views.generic import ListView
from guests import csv_import
from guests.invitation import INVITATION_TEMPLATE, guess_party_by_invite_id_or_404
from guests.models import Guest, MEALS, Party
from guests.save_the_date import get_save_the_date_context, send_save_the_date_email, SAVE_THE_DATE_TEMPLATE, \
SAVE_THE_DATE_CONTEXT_MAP
class GuestListView(ListView):
model = Guest
@login_required
def export_guests(request):
export = csv_import.export_guests()
response = HttpResponse(export.getvalue(), content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=all-guests.csv'
return response
@login_required
def dashboard(request):
parties_with_pending_invites = Party.objects.filter(is_attending=None).order_by('category', 'name')
attending_guests = Guest.objects.filter(is_attending=True)
guests_without_meals = attending_guests.filter(
Q(meal__isnull=True) | Q(meal='')
).order_by('first_name')
meal_breakdown = attending_guests.exclude(meal=None).values('meal').annotate(count=Count('*'))
return render(request, 'guests/dashboard.html', context={
'couple_name': settings.BRIDE_AND_GROOM,
'guests': Guest.objects.filter(is_attending=True).count(),
'not_coming_guests': Guest.objects.filter(is_attending=False).count(),
'guests_without_meals': guests_without_meals,
'meal_breakdown': meal_breakdown,
})
def rsvp(request):
context = {}
if request.method == 'GET':
return render(request, 'guests/rsvp.html', context)
elif request.method == 'POST':
template_name='guests/rsvp.html'
submited_full_name = request.POST['party'].lower()
if ' ' not in submited_full_name:
context['error'] = True
try:
first_name = submited_full_name.split(' ')[0]
last_name = submited_full_name.split(' ')[1]
except Exception:
context['error'] = True
if 'error' not in context:
guest = list(Guest.objects.filter(first_name__iexact=first_name, last_name__iexact=last_name))
if len(guest) > 0:
context['party'] = guest[0].party
context['meals'] = MEALS
template_name='guests/invitation.html'
else:
context['error'] = True
return render(request, template_name=template_name, context=context)
return HTTPResponse(405)
def invitation(request):
if request.method == 'POST':
for response in _parse_invite_params(request.POST):
guest = Guest.objects.get(pk=response.guest_pk)
party = guest.party
guest.is_attending = response.is_attending
guest.meal = response.meal
guest.save()
if request.POST.get('comments'):
comments = request.POST.get('comments')
party.comments = comments if not party.comments else '{}; {}'.format(party.comments, comments)
party.is_attending = party.any_guests_attending
party.save()
context = {
'party': party
}
return render(request, template_name='guests/rsvp_confirm.html', context=context)
return HttpResponse(404)
InviteResponse = namedtuple('InviteResponse', ['guest_pk', 'is_attending', 'meal'])
def _parse_invite_params(params):
responses = {}
for param, value in params.items():
if param.startswith('attending'):
pk = int(param.split('-')[-1])
response = responses.get(pk, {})
response['attending'] = True if value == 'yes' else False
responses[pk] = response
elif param.startswith('meal'):
pk = int(param.split('-')[-1])
response = responses.get(pk, {})
response['meal'] = value
responses[pk] = response
for pk, response in responses.items():
yield InviteResponse(pk, response['attending'], response.get('meal', None))
def rsvp_confirm(request, invite_id=None):
party = guess_party_by_invite_id_or_404(invite_id)
return render(request, template_name='guests/rsvp_confirmation.html', context={
'party': party,
'support_email': settings.DEFAULT_WEDDING_REPLY_EMAIL,
})
@login_required
def test_email(request, template_id):
context = get_save_the_date_context(template_id)
send_save_the_date_email(context, [settings.DEFAULT_WEDDING_TEST_EMAIL])
return HttpResponse('sent!')
def _base64_encode(filepath):
with open(filepath, "rb") as image_file:
return base64.b64encode(image_file.read())
@login_required
def guest_importer(request):
context = {}
guests = list(Guest.objects.all())
parties = list(Party.objects.all())
if request.method == 'GET':
context['guests'] = guests
context['parties'] = parties
return render(request, 'guests/importer.html', context=context)
if request.method == 'POST':
fguests = []
fparties = []
try:
csv_file = request.FILES["csv_file"]
if not csv_file.name.endswith('.csv'):
print('File is not CSV type')
return HttpResponse('SUCKS')
if csv_file.multiple_chunks():
print(r"Uploaded file is too big (%.2f MB)." % (csv_file.size/(1000*1000),))
return HttpResponse('SUCKS')
file_data = csv_file.read().decode("utf-8")
lines = file_data.split("\n")
for line in lines:
l = line.split(',')
party_name, first_name, last_name, party_type = l[:4]
party_t = party_type.split('\r')[0]
fguests.append(f'{first_name} {last_name}')
fparties.append(f'{party_name} {party_t}')
party = Party.objects.get_or_create(name=party_name)[0]
party.type = party_t
party.save()
guest = Guest.objects.get_or_create(party=party, first_name=first_name, last_name=last_name)[0]
guest.save()
except Exception as e:
context['error'] = f'top: {str(e)}'
try:
context['guests'] = fguests
context['parties'] = fparties
except Exception as e:
context['error'] = f'bottom: {str(e)}'
return render(request, 'guests/importer.html', context=context) | 0.264074 | 0.067362 |
import discord
import asyncio
import aiohttp
import traceback
import datetime
import random
import asyncpg
import json
import datetime
import inspect
import re
import json
from io import BytesIO
import util
from emoji import clean_emoji
from event import Event
from options import Options
from configs import Configs
from util import encode, decode
intents = discord.Intents.default()
intents.members = True
bot = discord.Client(intents=intents)
bot.session = aiohttp.ClientSession()
bot.timestamp = 0
bot.last_check_in = 0
bot._guild_check_queue = []
bot._guild_prefix_cache = {}
with open("config.json") as w:
cfg = json.loads(w.read())
@bot.event
async def on_ready():
print("Watching...")
if not bot.timestamp:
credentials = {
"user": "watchbot",
"password": cfg["db_pass"],
"database": "watchdata",
"host": "localhost",
}
db = await asyncpg.create_pool(**credentials)
# await db.execute("CREATE TABLE IF NOT EXISTS guild_configs(guild_id bigint PRIMARY KEY, post_channel bigint, prefix text DEFAULT '!', options integer DEFAULT 0, latest_event_count integer, special_roles bigint[], recent_events bigint[], _offset integer DEFAULT 0);")
# await db.execute("CREATE TYPE event_t AS enum('kick', 'ban', 'unban', 'role_add', 'role_remove');")
# await db.execute("CREATE TABLE IF NOT EXISTS events(event_id integer, guild_id bigint REFERENCES guild_configs(guild_id), event_type event_t, reason text, timestamp TIMESTAMP, message_id bigint, target_id bigint, target_name text, actor bigint, role_id bigint, role_name text, PRIMARY KEY (event_id, guild_id));")
# Look like CREATE TYPE IF NOT EXISTS isn't a thing so just run those in the db before starting the bot ever
bot.db = db
bot._guild_check_queue = list(bot.guilds)
bot.dispatch("run_check_loop")
bot.timestamp = datetime.datetime.utcnow().timestamp()
watching_choices = ["you.", "carefully", "closely"]
while True:
await bot.change_presence(
activity=discord.Activity(
type=discord.ActivityType.watching,
name=random.choice(watching_choices),
)
)
await asyncio.sleep(3600)
event_t = [
discord.AuditLogAction.kick,
discord.AuditLogAction.ban,
discord.AuditLogAction.unban,
discord.AuditLogAction.member_role_update,
]
event_t_str = ["kick", "ban", "unban", "role_update", "role_add", "role_remove"]
event_t_display = [
"Kick",
"Ban",
"Unban",
"Special Role Modified",
"Special Role Added",
"Special Role Removed",
]
async def send_webhook(url=cfg.get("webhook_url"), **kwargs):
if url:
webhook = discord.Webhook.from_url(
url, adapter=discord.AsyncWebhookAdapter(bot.session)
)
return await webhook.send(**kwargs)
@bot.event
async def on_run_check_loop():
while True:
to_check = set(bot._guild_check_queue)
# inb4 another value is added here before I clear it haha
bot._guild_check_queue = []
for guild in to_check:
try:
# Check if still in guild
if not (guild and bot.get_guild(guild.id)):
continue
# Check if guild can be posted to
if not guild.me.guild_permissions.view_audit_log:
continue
guild_config = await get_guild_configs(guild.id)
if not guild_config.guild_id:
continue
channel = guild_config.post_channel
channel = guild.get_channel(channel)
if not channel or not channel.permissions_for(guild.me).send_messages:
continue
# Get entries
entries = await check_guild_logs(guild, guild_config)
await post_entries(entries, channel, guild_config)
except Exception as e:
text = "".join(traceback.TracebackException.from_exception(e).format())
embed_limit = 10
size = 2048 - 10
print(f"Error in guild {guild.id}")
print(text)
# Report errors to webhook
await send_webhook(
content=f"<@{cfg['owner_id']}> Error in guild {guild.id} ({guild.name})",
embeds=[
discord.Embed(
color=discord.Color(value=0xC62828),
description=f"```py\n{text[i:i+size]}\n```",
)
for i in range(0, len(text), size)[-embed_limit:]
],
)
# Check in every hour
now = datetime.datetime.utcnow()
if now.timestamp() - bot.last_check_in > 3660:
await send_webhook(
content=f"Hourly check-in successful. `{now.strftime('%Y-%m-%d %H:%M')}`"
)
bot.last_check_in = now.timestamp()
await asyncio.sleep(2)
@bot.event
async def on_member_ban(guild, user):
bot._guild_check_queue += [guild]
@bot.event
async def on_member_unban(guild, user):
bot._guild_check_queue += [guild]
@bot.event
async def on_member_remove(member):
bot._guild_check_queue += [member.guild]
@bot.event
async def on_member_update(before, after):
if before.roles != after.roles:
bot._guild_check_queue += [before.guild]
async def get_guild_configs(guild_id):
ret = await bot.db.fetchrow(
"SELECT * FROM guild_configs WHERE guild_id = $1;", guild_id
)
ret = ret if ret else {}
return Configs.from_row(ret)
async def check_guild_logs(guild, guild_config):
recent_events = guild_config.recent_events
if not recent_events:
recent_events = [discord.utils.time_snowflake(datetime.datetime.utcnow())]
events = []
special_roles = guild_config.roles
break_signal = False
oldest = None
while not break_signal:
raw_events = await guild.audit_logs(
limit=100, before=discord.Object(id=oldest) if oldest else None
).flatten()
if oldest == None:
new_recent_events = [e.id for e in raw_events[:3]]
if not raw_events:
break
oldest = raw_events[-1].id
for e in raw_events:
if e.id <= max(recent_events):
break_signal = True
break
if e.id in recent_events:
continue
if not e.action in event_t:
continue
reason = e.reason.strip() if e.reason else None
event_type = event_t_str[event_t.index(e.action)]
role = None
if e.action == discord.AuditLogAction.member_role_update:
for r in e.changes.before.roles:
if r.id in special_roles:
event_type = "role_remove"
role = r
events += [
Event(
guild.id,
event_type,
e.target.id,
str(e.target),
e.user,
reason,
e.created_at,
role.id,
role.name,
)
]
for r in e.changes.after.roles:
if r.id in special_roles:
event_type = "role_add"
role = r
events += [
Event(
guild.id,
event_type,
e.target.id,
str(e.target),
e.user,
reason,
e.created_at,
role.id,
role.name,
)
]
continue
events += [
Event(
guild.id,
event_type,
e.target.id,
str(e.target),
e.user,
reason,
e.created_at,
None,
None,
)
]
continue
events = events[::-1]
async with bot.db.acquire() as conn:
async with conn.transaction():
await conn.execute(
"SELECT FROM guild_configs WHERE guild_id = $1 FOR UPDATE;", guild.id
) # That's how you're supposed to lock it right?
latest_event_count = guild_config.latest_event_count
for e in events:
latest_event_count += 1
e.set_count(latest_event_count)
await conn.execute(
"""INSERT INTO events(
guild_id, event_type, target_id, target_name, actor, reason, timestamp, role_id, role_name, event_id
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10);""",
*e.db_insert(),
)
await conn.execute(
"""
UPDATE guild_configs
SET recent_events = $1,
latest_event_count = $2
WHERE guild_id = $3;
""",
new_recent_events,
latest_event_count,
guild.id,
)
return events
async def post_entries(entries, channel, guild_config):
ret = []
for e in entries:
print(f"Posting case {e.count} to {channel.guild.id}")
msg = await channel.send(generate_entry(e, guild_config))
await bot.db.execute(
"""
UPDATE events
SET message_id = $1
WHERE guild_id = $2
AND event_id = $3;
""",
msg.id,
channel.guild.id,
e.count,
)
ret += [msg]
return ret
invite_reg = re.compile(
"((?:https?:\/\/)?discord(?:\.gg|app\.com\/invite)\/(?:#\/)?)([a-zA-Z0-9-]*)"
)
def generate_entry(
event,
config,
default_reason="_Responsible moderator, please do `reason {} <reason>`_",
):
case_num = event.count + config.offset
ret = "**{}** | Case {}\n".format(
event_t_display[event_t_str.index(event.event_type)], case_num
)
name = event.target_name
if not config.options.reveal_invites:
name = invite_reg.sub("\g<1>[INVITE REDACTED]", name)
name = clean_emoji(name)
ret += "**User**: {} ({})".format(name, event.target_id)
if config.options.ping_target:
ret += " (<@{}>)".format(event.target_id)
ret += "\n"
if event.role_id:
ret += "**Role**: {} ({})\n".format(event.role_name, event.role_id)
ret += "**Reason**: {}\n".format(
event.reason if event.reason else default_reason.format(case_num)
)
ret += "**Responsible moderator**: "
if type(event.actor) == int:
ret += f"{event.actor}"
else:
ret += "{}#{}".format(clean_emoji(event.actor.name), event.actor.discriminator)
ret = ret.replace("@everyone", "@\u200beveryone").replace("@here", "@\u200bhere")
return ret
async def update_entry(message, event, configs=None):
if not configs:
configs = await get_guild_configs(message.guild.id)
new_text = generate_entry(event, configs)
if message.content != new_text:
print(f"Updating case {event.count} in {message.guild.id}")
try:
await message.edit(content=new_text)
except discord.errors.NotFound as e:
print(f"Warning: {message.guild.id}:{event.count} not found!")
pass
else:
print(f"Updating already equal: case {event.count} in {message.guild.id} ")
prefixes = [f"<@{cfg['bot_id']}>", f"<@!{cfg['bot_id']}>", "w!", "watch!", "⌚", "\⌚"]
@bot.event
async def on_message(message):
if (
not bot.timestamp
or message.author.bot
or not message.content
or not isinstance(message.channel, discord.abc.GuildChannel)
or not message.channel.permissions_for(message.guild.me).send_messages
):
return
msg = None
if not message.guild.id in bot._guild_prefix_cache:
configs = await get_guild_configs(message.guild.id)
if not configs.guild_id:
guild_prefix = "!"
else:
guild_prefix = configs.prefix
if guild_prefix:
guild_prefix = guild_prefix.strip().lower()
bot._guild_prefix_cache[message.guild.id] = guild_prefix
custom_prefix = [bot._guild_prefix_cache[message.guild.id]]
if not custom_prefix[0]:
custom_prefix = []
prefix = ""
for p in prefixes + custom_prefix:
if message.content.lower().startswith(p):
msg = message.content[len(p) :].strip()
prefix = p
break
if not msg:
return
split = msg.split(None, 1)
if len(split) == 0:
return
cmd = split[0].lower()
if cmd in cmds:
if isinstance(message.channel, discord.abc.GuildChannel):
print(
"{0.created_at} - {0.guild.name}#{0.channel.name} - {0.author.name}: {0.content}".format(
message
)
)
else:
print("{0.created_at} - DM - {0.author.name}: {0.content}".format(message))
args = None
if len(split) > 1:
args = split[1]
kwargs = {"message": message, "cmd": cmd, "args": args, "prefix": prefix}
func = await cmds[cmd](**kwargs)
async def time(message, args, **kwargs):
now = datetime.datetime.utcnow()
await message.channel.send(f"\⌚ The time is now `{now.strftime('%H:%M')}` UTC.")
_ = None
async def evaluate(message, args, **kwargs):
if message.author.id == cfg["owner_id"] and args:
global _
ctx = message
if args.split(" ", 1)[0] == "await":
try:
_ = await eval(args.split(" ", 1)[1])
await message.channel.send(str(_))
except Exception as e:
await message.channel.send("```\n" + str(e) + "\n```")
else:
try:
_ = eval(args)
await message.channel.send(str(_))
except Exception as e:
await message.channel.send("```\n" + str(e) + "\n```")
return True
async def sudo(message, args, **kwargs):
if message.author.id == cfg["owner_id"]:
sudo_funcs = {
"reset": (_reset, (message, None)),
"forcecheckall": (bot._guild_check_queue.extend, (bot.guilds,)),
"forcecheckthis": (bot._guild_check_queue.append, (message.guild,)),
}
if args:
a = args.split(" ", 1)
cmd = a[0].lower()
arg = a[1] if len(a) > 1 else None
if cmd in sudo_funcs:
try:
if inspect.iscoroutinefunction(sudo_funcs[cmd][0]):
ret = await sudo_funcs[cmd][0](*sudo_funcs[cmd][1])
else:
ret = sudo_funcs[cmd][0](*sudo_funcs[cmd][1])
except Exception as e:
ret = str(e)
if ret == None:
ret = "no u"
await message.channel.send(f"```\n{ret}\n```")
return True
else:
await message.channel.send(
f"All sudo commands:\n```\n{', '.join(sudo_funcs.keys())}\n```"
)
return True
async def close(message, **kwargs):
if message.author.id == cfg["owner_id"]:
msg = await message.channel.send("Shutting down...")
await bot.db.close()
await bot.logout()
await bot.close()
exit()
def get_case_number(num, max_num, offset=0, allow_case_range=False):
ret = []
num = str(num).lower()
rng = [num]
if allow_case_range:
rng = num.split("..")
if len(rng) > 2:
raise ValueError("Invalid case number")
for end in rng:
if end == "":
end = "l"
pc = end.split("~")
if pc[0] in ("i", "|"):
raise ValueError(
"You realise that `L` is supposed to stand for `latest`, right?"
)
if len(pc) > 2 or (len(pc) == 2 and not pc[0] in ("l", "latest")):
raise ValueError("Invalid case number")
add = max_num
try:
if not pc[0] in ("l", "latest"):
add = int(pc[0])
add -= offset
if len(pc) == 2:
add -= int(pc[1])
except:
raise ValueError("Invalid case number.")
if add > max_num:
raise ValueError("Invalid case number.")
if add <= 0:
raise ValueError("Invalid case number.")
ret += [add]
if not allow_case_range:
ret = ret[0]
else:
ret = sorted(ret)
return ret
def is_mod(member):
perms = member.guild_permissions
return any((perms.ban_members, perms.kick_members, perms.manage_roles))
async def reason(message, args, **kwargs):
if not args:
return
perms = message.author.guild_permissions
if not is_mod(message.author):
return
configs = await get_guild_configs(message.guild.id)
channel = message.guild.get_channel(configs.post_channel)
if not (
configs.guild_id
and channel
and channel.permissions_for(message.guild.me).send_messages
):
await message.channel.send(
"This guild has not been (or is improperly) set up. Please use the `setup` command to get started."
)
return
num = configs.latest_event_count
arg = args.split(None, 1)
offset = configs.offset
try:
num = get_case_number(arg[0], num, offset, allow_case_range=True)
except ValueError as e:
await message.channel.send(str(e))
return
if len(arg) < 2:
await message.channel.send("No reason was given!")
return
reason = arg[1]
events = await bot.db.fetch(
"SELECT * FROM events WHERE guild_id = $1 AND event_id BETWEEN $2 AND $3;",
message.guild.id,
num[0],
num[-1],
)
if not events:
await message.channel.send(
"!!! That event doesn't exist. You shouldn't be seeing this. Please contact the bot maintainer."
)
return
events = [Event.from_row(e, message.author, reason) for e in events]
event_perms = set()
if perms.ban_members:
event_perms.update({"ban", "unban"})
if perms.kick_members:
event_perms.update({"kick"})
if perms.manage_roles:
event_perms.update({"role_add", "role_remove"})
for e in events:
if not e.event_type in event_perms:
msg = "You have insufficient permissions to update that reason."
if len(events) > 1:
msg = f"You have insufficient permissions to update at least one of those reasons. (Check halted at case {e.count+offset})"
await message.channel.send(msg)
return
if len(events) > 3:
await message.channel.send(
f"This will update cases **{num[0]+offset}** to **{num[-1]+offset}**.\nAre you sure you want to update **{len(events)}** cases? (Say `{len(events)}` to confirm)"
)
def check(m):
return (
m.author.id == message.author.id and m.channel.id == message.channel.id
)
try:
msg = await bot.wait_for("message", check=check)
except asyncio.TimeoutError:
return
if (not msg.content) or msg.content.lower() != str(len(events)):
await message.channel.send("Reason aborted.")
return
msgs = []
for e in events:
msg = e.message_id
if msg:
msg = await util.get_message(bot, channel, msg)
if msg:
msgs += [(msg, e)]
await bot.db.execute(
f"""
UPDATE events
SET reason = $1,
actor = $2
WHERE guild_id = $3
AND event_id BETWEEN $4 AND $5;
""",
reason,
message.author.id,
message.guild.id,
num[0],
num[-1],
)
ret = "👌"
if len(events) > 1:
ret += f"\nUpdated **{len(events)}** cases."
# async with message.channel.typing():
for m in msgs:
await update_entry(m[0], m[1], configs)
if len(events) != len(msgs):
msg = f"\nUnfortunately, the message tied to this case cannot be found. Please `recall` this case to resend it. (Case {num[0]+offset})"
if len(events) > 1:
msg = f"\n\nUnfortunately, at least one message tied to these cases cannot be found. Please `recall` the missing cases to resend it. (Check cases {num[0]+offset} to {num[-1]+offset})"
ret += msg
await message.channel.send(ret)
return True
async def recall(message, args, **kwargs):
if not args:
return
configs = await get_guild_configs(message.guild.id)
channel = message.guild.get_channel(configs.post_channel)
if not (
configs.guild_id
and channel
and channel.permissions_for(message.guild.me).send_messages
):
return
num = configs.latest_event_count
try:
num = get_case_number(
args, num, configs.offset
) # , allow_case_range=is_mod(message.author))
except ValueError as e:
await message.channel.send(str(e))
return
# if not is_mod(message.author):
num = [num]
resp = {}
embed = None
for n in num:
event = await bot.db.fetchrow(
"SELECT * FROM events WHERE guild_id = $1 AND event_id = $2;",
message.guild.id,
n,
)
if not event:
resp[
n
] = "!!! That event doesn't exist. You shouldn't be seeing this. Please contact the bot maintainer."
continue
new_entry = Event.from_row(event)
entry_text = generate_entry(new_entry, configs)
msg = event.get("message_id")
if msg:
msg = await util.get_message(bot, channel, msg)
ret = None
if not msg:
ret = "This entry has been deleted. Please ask a mod to run this command to reinstate it."
if is_mod(message.author):
ret = "This entry has been reinstated."
actor = await util.get_member(bot, event.get("actor"))
new_entry.set_actor(actor)
msg = await post_entries([new_entry], channel, configs)
msg = msg[0]
if msg:
if (not is_mod(message.author)) or entry_text == msg.content:
if len(num) == 1:
embed = discord.Embed(
title=ret,
color=util.get_color(message.guild.me),
description="\n".join(
[
e
if i != 0
else " | ".join(
[
v
if u != e.count(" | ")
else f"[{v}]({msg.jump_url})"
for u, v in enumerate(e.split(" | "))
]
)
for i, e in enumerate(msg.content.split("\n"))
]
), # this is so bad aaaaaaaaaaa
timestamp=new_entry.timestamp,
)
await message.channel.send(embed=embed)
return
elif is_mod(message.author):
actor = await util.get_member(bot, event.get("actor"))
new_entry.set_actor(actor)
await update_entry(msg, new_entry, configs)
ret = "This entry has been updated"
if ret:
resp[n] = ret
if embed:
await message.channel.send(embed=embed)
else:
await message.channel.send(resp)
return True
async def setup(message, args, **kwargs):
if not message.author.guild_permissions.manage_guild:
await message.channel.send(
"You require the `MANAGE_GUILD` permission to use this command!"
)
return
configs = await get_guild_configs(message.guild.id)
if not args:
if not (
message.channel.permissions_for(message.guild.me).embed_links
and message.channel.permissions_for(message.guild.me).attach_files
):
await message.channel.send(
"I require the `EMBED_LINKS` and `ATTACH_FILES` permissions to use this command!"
)
return
embed = discord.Embed(color=util.get_color(message.guild.me))
config_export = "None generated."
files = []
if configs.guild_id:
config_export = configs.export()
if len(config_export) > 1024:
b = BytesIO()
b.write(config_export.encode("utf-8"))
b.seek(0)
config_export = (
"This string was too long to send. Please check the uploaded file."
)
files += [discord.File(b, "config_export.txt")]
embed.add_field(name="Config Export", value=config_export)
guild_file = None
guild_export = {
"roles": [
[i.name, str(i.id), str(i.color)]
for i in sorted(
message.guild.roles, key=lambda x: x.position, reverse=True
)
if i.id != message.guild.id
],
"channels": [
[i.name, str(i.id)]
for i in message.guild.text_channels
if i.permissions_for(message.guild.me).send_messages
],
}
guild_export = encode(json.dumps(guild_export))
full_guild_export = guild_export
if len(guild_export) > 2048:
b = BytesIO()
b.write(guild_export.encode("utf-8"))
b.seek(0)
guild_export = (
"This string was too long to send. Please check the uploaded file."
)
files += [discord.File(b, "guild_data_export.txt")]
elif len(guild_export) > 1024:
embed.title = "Guild Data Export (Full code)"
embed.description = guild_export
guild_export = "This string was too long to put in here. Please check the long bit of text above."
embed.add_field(name="Guild Data Export", value=guild_export)
ret = "Welcome to the ⌚ setup!\nPlease go to https://sink.discord.bot/⌚ to generate an import code!\nRun this command with the Import config to set up the bot on this guild."
if len(full_guild_export) <= 2000 and message.author.is_on_mobile():
ret += "\n\nI am detecting that you are currently on a mobile device. React to this message with ☎ (`telephone`) to receive a DM with the data that can easily be copied."
msg = await message.channel.send(ret, embed=embed, files=files)
if len(full_guild_export) <= 2000:
def check(reaction, user):
return (
reaction.message.id == msg.id
and reaction.emoji.strip("\uFE0F\uFE0E") == "☎"
and user.id == message.author.id
)
try:
reaction, user = await bot.wait_for("reaction_add", check=check)
except asyncio.TimeoutError:
return
if reaction:
try:
await message.author.send(full_guild_export)
except:
await message.channel.send(
"DM failed. Please ensure your DMs are enabled and run the command again."
)
return True
else:
if not (
message.channel.permissions_for(message.guild.me).embed_links
and message.channel.permissions_for(message.guild.me).add_reactions
):
await message.channel.send(
"I require the `EMBED_LINKS` and `ADD_REACTIONS` permissions to use this command!"
)
return
channel = None
try:
args = json.loads(decode(args))
args["guild_id"] = message.guild.id
args["post_channel"] = configs.post_channel
args["special_roles"] = [int(r) for r in args["roles"]]
args["prefix"] = args["prefix"].strip()[:32] if args["prefix"] else None
args["options"] = int(args["options"])
offset = 0 if not args["offset"] else args["offset"]
args["_offset"] = max(0, min(2147483647, int(offset)) - 1)
if not configs.guild_id:
args["post_channel"] = int(args["channel"])
channel = message.guild.get_channel(args["post_channel"])
if not channel:
raise ValueError
except:
await message.channel.send("Invalid input!")
return
if configs:
args["offset"] = configs.offset
emotes = ["✅", "❎"]
args = Configs.from_row(args)
msg = await message.channel.send(
"Here are your imported settings! Please react with ✅ to confirm them. (You can check then again later with the `settings` command)",
embed=args.as_embed(message.guild),
)
for e in emotes:
await msg.add_reaction(e)
def check(reaction, user):
return (
reaction.message.id == msg.id
and reaction.emoji in emotes
and user.id == message.author.id
)
try:
reaction, user = await bot.wait_for("reaction_add", check=check)
except asyncio.TimeoutError:
return
if reaction.emoji == "✅":
await bot.db.execute(
"""
INSERT INTO guild_configs (
guild_id, post_channel, prefix, options, latest_event_count, special_roles, recent_events, _offset
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
ON CONFLICT (guild_id)
DO UPDATE SET
prefix = EXCLUDED.prefix,
options = EXCLUDED.options,
special_roles = EXCLUDED.special_roles
;""",
*args.db_insert(),
)
bot._guild_prefix_cache[message.guild.id] = args.prefix
await message.channel.send("Your settings have been updated.")
else:
await message.channel.send("Process aborted.")
return True
async def settings(message, **kwargs):
if not is_mod(message.author):
return
configs = await get_guild_configs(message.guild.id)
if not configs:
return
if not message.channel.permissions_for(message.guild.me).embed_links:
await message.channel.send(
"I require the `EMBED_LINKS` permission to use this command!"
)
return
await message.channel.send(
f"Settings for **{message.guild.name}**: (You can use the `setup` command to change them)",
embed=configs.as_embed(message.guild),
)
return True
async def reset(message, **kwargs):
if is_mod(message.author):
if message.author.id != message.guild.owner.id:
await message.channel.send("Only the server owner can run this command!")
return
else:
return
configs = await get_guild_configs(message.guild.id)
if not configs.guild_id:
await message.channel.send("You have nothing to reset.")
return
return await _reset(message, configs)
async def _reset(message, configs):
if not configs:
configs = await get_guild_configs(message.guild.id)
await message.channel.send(
"**!! WARNING !!**\nDANGER ZONE\n**!! WARNING !!**\n\nThis command will delete all bot configs and events related to this guild. All already-logged messages will be dissociated and uneditable.\n\n**Are you sure you want to do this?**\nEnter `Yes, please wipe everything` to confirm."
)
def check(m):
return m.author.id == message.author.id and m.channel.id == message.channel.id
try:
msg = await bot.wait_for("message", check=check)
except asyncio.TimeoutError:
return
if (not msg.content) or msg.content.lower() != "yes, please wipe everything":
await message.channel.send("Reset aborted.")
return
channel = message.guild.get_channel(configs.post_channel)
if channel and channel.permissions_for(message.guild.me):
await channel.send(
"**==================**\nGood night, sweet prince\n**==================**"
)
await bot.db.execute("DELETE FROM events WHERE guild_id = $1;", message.guild.id)
await bot.db.execute(
"DELETE FROM guild_configs WHERE guild_id = $1;", message.guild.id
)
await message.channel.send(
f"Data deleted. For postierity, your guild settings were:\n```\n{configs.export()}\n```"
)
return True
async def invite(message, **kwargs):
await message.channel.send(
f"<https://discordapp.com/oauth2/authorize?client_id={cfg['bot_id']}&scope=bot&permissions=128>"
)
return True
async def information(message, args, prefix, **kwargs):
if prefix == "!":
return
msg = """For first time users, read this to learn how to set up: https://gist.github.com/Roadcrosser/04837764051b35fa10acad650281106d
Prefixes: Mentions, `w!`, `⌚`
Commands:
```
help - This message
invite - Display bot invite
setup - Setup the bot for the server (Mod only)
settings - View your server settings (Mod only)
reason - Set the reason for a case (Mod only)
(Run "help reason" for further information)
recall - Recall a previously posted case
reset - Reset all settings/cases (Owner only)
```"""
if args and args.lower() == "reason":
msg = """```
reason <case number> <reason>
Mod only
Sets the reason and responsible moderator for the given case number.
Case number can be:
2 Case #2
latest The latest case
latest~2 The third-latest case
2..5 Case #2 to #5
2.. Case #2 to the latest case
latest~2.. The last 3 cases
Ex: reason latest Keeps spamming
Ex: reason 22..50 Lots of riaders
```"""
await message.channel.send(msg)
return True
cmds = {
"time": time,
"eval": evaluate,
"sudo": sudo,
"help": information,
"quit": close,
"reason": reason,
"recall": recall,
"setup": setup,
"settings": settings,
"reset": reset,
"invite": invite,
}
bot.run(cfg["token"]) | watch.py | help - This message
invite - Display bot invite
setup - Setup the bot for the server (Mod only)
settings - View your server settings (Mod only)
reason - Set the reason for a case (Mod only)
(Run "help reason" for further information)
recall - Recall a previously posted case
reset - Reset all settings/cases (Owner only)
reason <case number> <reason>
Mod only
Sets the reason and responsible moderator for the given case number.
Case number can be:
2 Case #2
latest The latest case
latest~2 The third-latest case
2..5 Case #2 to #5
2.. Case #2 to the latest case
latest~2.. The last 3 cases
Ex: reason latest Keeps spamming
Ex: reason 22..50 Lots of riaders | 0.404743 | 0.111024 |
import argparse
import sqlite3
import sys
import time
import ttystatus
from common import *
# If similar placements are closer in time than this number of seconds, consider
# them to be duplicates. /r/place always gave at least a 5 minute cooldown,
# so we'll use that as our window.
DUPLICATE_WINDOW = 300
parser = argparse.ArgumentParser()
parser.add_argument('-w', '--working-database', default='working.sqlite', help='Intermediate SQLite database to use. Default: %(default)s.')
parser.add_argument('-p', '--pixel', nargs=2, metavar=('X', 'Y'), type=int, help='Instead of merging data, print the merged timeline for the given pixel.')
args = parser.parse_args()
source_db = sqlite3.connect(args.working_database)
source_db.row_factory = sqlite3.Row
def offset_timestamp(timestamp, source):
before_row = source_db.execute('SELECT timestamp, offset FROM offsets WHERE source = ? and timestamp <= ? ORDER BY timestamp DESC LIMIT 1', (source, timestamp)).fetchone()
after_row = source_db.execute('SELECT timestamp, offset FROM offsets WHERE source = ? and timestamp >= ? ORDER BY timestamp LIMIT 1', (source, timestamp)).fetchone()
if before_row is None:
return timestamp + after_row['offset']
elif after_row is None:
return timestamp + before_row['offset']
elif before_row['timestamp'] == after_row['timestamp']:
return timestamp + after_row['offset']
else:
ts_0, o_0 = (before_row['timestamp'], before_row['offset'])
ts_1, o_1 = (after_row['timestamp'], after_row['offset'])
slope = (o_1 - o_0) / (ts_1 - ts_0)
intercept = o_0 - slope * ts_0
return timestamp + (slope * timestamp + intercept)
source_db.create_function('offset_timestamp', 2, offset_timestamp)
def new_pixel(canvas, row, offsets, dest_cur, st):
x = row['x']
y = row['y']
timestamp = row['timestamp']
# This pixel is a duplicate if:
# * It's the same color as the current pixel,
# * It's the same author as the current pixel, AND
# * It's within the window of duplication.
if canvas[y, x]['color'] == row['color'] and \
canvas[y, x]['author'] == row['author'] and \
timestamp - canvas[y, x]['timestamp'] < DUPLICATE_WINDOW:
duplicate_pixel(timestamp, x, y, row['color'], row['author'], row['source'], dest_cur, st)
else:
update_pixel(timestamp, x, y, row['color'], row['author'], row['source'], dest_cur, st)
update_canvas(canvas, x, y, row['color'], timestamp, row['author'])
def new_board(canvas, row, dest_cur, st):
bitmap = board_bitmap(row['board'])
if args.pixel is None:
for y, x in np.array(np.where(canvas['color'] != bitmap)).T:
update_pixel(row['timestamp'], x, y, bitmap[y, x], None, row['source'], dest_cur, st)
else:
x, y = args.pixel
if canvas[y, x]['color'] != bitmap[y, x]:
update_pixel(row['timestamp'], x, y, bitmap[y, x], None, row['source'], dest_cur, st)
np.copyto(canvas['color'], bitmap)
def update_pixel(timestamp, x, y, color, author, source, dest_cur, st):
if args.pixel is None:
if author is None:
# Have to cast color to an int because sqlite3 doesn't understand numpy.uint8.
dest_cur.execute('INSERT INTO placements (timestamp, x, y, color) VALUES (?, ?, ?, ?)', (timestamp, x, y, int(color)))
else:
dest_cur.execute('INSERT INTO placements (timestamp, x, y, color, author) VALUES (?, ?, ?, ?, ?)', (timestamp, x, y, color, author))
else:
st.notify('{} {:3} {:3} {:3} {:16} {}'.format(time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(timestamp)), x, y, color, '' if author is None else author, source))
def duplicate_pixel(timestamp, x, y, color, author, source, dest_cur, st):
if args.pixel is not None:
st.notify(' {} + {}'.format(time.strftime('%H:%M:%S', time.gmtime(timestamp)), source))
def update_canvas(canvas, x, y, color, timestamp, author):
canvas['color'][y, x] = color
canvas['timestamp'][y, x] = timestamp
canvas['author'][y, x] = author
print 'loading offsets...',
offsets = {}
for row in source_db.execute('SELECT * FROM offsets'):
if row['source'] not in offsets:
offsets[row['source']] = []
offsets[row['source']].append((row['timestamp'], row['offset']))
for key in offsets.keys():
offsets[key].sort()
print 'done.'
st = ttystatus.TerminalStatus(period=0.1)
st.format('%ElapsedTime() %PercentDone(done,total) [%ProgressBar(done,total)] ETA: %RemainingTime(done,total)')
st['done'] = 0
board_count = source_db.execute('SELECT COUNT(*) FROM raw_boards').fetchone()[0]
if args.pixel is None:
raw_placement_count = source_db.execute('SELECT COUNT(*) FROM raw_placements').fetchone()[0]
known_placement_count = source_db.execute('SELECT COUNT(*) FROM known_placements').fetchone()[0]
else:
raw_placement_count = source_db.execute('SELECT COUNT(*) FROM raw_placements WHERE x = ? and y = ?', args.pixel).fetchone()[0]
known_placement_count = source_db.execute('SELECT COUNT(*) FROM known_placements WHERE x = ? and y = ?', args.pixel).fetchone()[0]
st['total'] = raw_placement_count + known_placement_count + board_count
st.flush()
if args.pixel is None:
dest_db = sqlite3.connect('merged.sqlite')
dest_cur = dest_db.cursor()
dest_cur.execute('DROP TABLE IF EXISTS placements')
dest_cur.execute("""
CREATE TABLE placements (
timestamp REAL,
x INTEGER,
y INTEGER,
color INTEGER,
author TEXT
)""")
dest_cur.execute('CREATE INDEX placements_timestamp_idx ON placements(timestamp)')
dest_cur.execute('CREATE INDEX placements_color_idx ON placements(color)')
dest_cur.execute('CREATE INDEX placements_author_idx ON placements(author)')
dest_cur.execute('CREATE INDEX placements_position_idx ON placements(x, y)')
else:
dest_cur = None
ALL_PLACEMENT_QUERY = """
SELECT offset_timestamp(timestamp, source) AS timestamp, x, y, color, author, source
FROM raw_placements
UNION
SELECT timestamp, x, y, color, author, '_known' AS source
FROM known_placements
ORDER BY timestamp, x, y, source"""
PIXEL_PLACEMENT_QUERY = """
SELECT offset_timestamp(timestamp, source) AS timestamp, x, y, color, author, source
FROM raw_placements
WHERE x = :x AND y = :y
UNION
SELECT timestamp, x, y, color, author, '_known' AS source
FROM known_placements
WHERE x = :x AND y = :y
ORDER BY timestamp, x, y, source"""
if args.pixel is None:
placement_cur = source_db.execute(ALL_PLACEMENT_QUERY)
else:
placement_cur = source_db.execute(PIXEL_PLACEMENT_QUERY, {'x':args.pixel[0], 'y':args.pixel[1]})
board_cur = source_db.execute('SELECT * FROM raw_boards ORDER BY timestamp, source')
canvas = np.empty((1000, 1000), dtype=[('color', 'u1'), ('timestamp', 'f4'), ('author', 'O')])
canvas['color'] = 0
canvas['timestamp'] = 1490979600
canvas['author'] = ''
placement_row = placement_cur.fetchone()
board_row = board_cur.fetchone()
while placement_row is not None or board_row is not None:
if placement_row is not None and (board_row is None or placement_row['timestamp'] < board_row['timestamp']):
new_pixel(canvas, placement_row, offsets, dest_cur, st)
placement_row = placement_cur.fetchone()
else:
new_board(canvas, board_row, dest_cur, st)
board_row = board_cur.fetchone()
st['done'] += 1
if args.pixel is None:
dest_db.commit() | merge.py |
import argparse
import sqlite3
import sys
import time
import ttystatus
from common import *
# If similar placements are closer in time than this number of seconds, consider
# them to be duplicates. /r/place always gave at least a 5 minute cooldown,
# so we'll use that as our window.
DUPLICATE_WINDOW = 300
parser = argparse.ArgumentParser()
parser.add_argument('-w', '--working-database', default='working.sqlite', help='Intermediate SQLite database to use. Default: %(default)s.')
parser.add_argument('-p', '--pixel', nargs=2, metavar=('X', 'Y'), type=int, help='Instead of merging data, print the merged timeline for the given pixel.')
args = parser.parse_args()
source_db = sqlite3.connect(args.working_database)
source_db.row_factory = sqlite3.Row
def offset_timestamp(timestamp, source):
before_row = source_db.execute('SELECT timestamp, offset FROM offsets WHERE source = ? and timestamp <= ? ORDER BY timestamp DESC LIMIT 1', (source, timestamp)).fetchone()
after_row = source_db.execute('SELECT timestamp, offset FROM offsets WHERE source = ? and timestamp >= ? ORDER BY timestamp LIMIT 1', (source, timestamp)).fetchone()
if before_row is None:
return timestamp + after_row['offset']
elif after_row is None:
return timestamp + before_row['offset']
elif before_row['timestamp'] == after_row['timestamp']:
return timestamp + after_row['offset']
else:
ts_0, o_0 = (before_row['timestamp'], before_row['offset'])
ts_1, o_1 = (after_row['timestamp'], after_row['offset'])
slope = (o_1 - o_0) / (ts_1 - ts_0)
intercept = o_0 - slope * ts_0
return timestamp + (slope * timestamp + intercept)
source_db.create_function('offset_timestamp', 2, offset_timestamp)
def new_pixel(canvas, row, offsets, dest_cur, st):
x = row['x']
y = row['y']
timestamp = row['timestamp']
# This pixel is a duplicate if:
# * It's the same color as the current pixel,
# * It's the same author as the current pixel, AND
# * It's within the window of duplication.
if canvas[y, x]['color'] == row['color'] and \
canvas[y, x]['author'] == row['author'] and \
timestamp - canvas[y, x]['timestamp'] < DUPLICATE_WINDOW:
duplicate_pixel(timestamp, x, y, row['color'], row['author'], row['source'], dest_cur, st)
else:
update_pixel(timestamp, x, y, row['color'], row['author'], row['source'], dest_cur, st)
update_canvas(canvas, x, y, row['color'], timestamp, row['author'])
def new_board(canvas, row, dest_cur, st):
bitmap = board_bitmap(row['board'])
if args.pixel is None:
for y, x in np.array(np.where(canvas['color'] != bitmap)).T:
update_pixel(row['timestamp'], x, y, bitmap[y, x], None, row['source'], dest_cur, st)
else:
x, y = args.pixel
if canvas[y, x]['color'] != bitmap[y, x]:
update_pixel(row['timestamp'], x, y, bitmap[y, x], None, row['source'], dest_cur, st)
np.copyto(canvas['color'], bitmap)
def update_pixel(timestamp, x, y, color, author, source, dest_cur, st):
if args.pixel is None:
if author is None:
# Have to cast color to an int because sqlite3 doesn't understand numpy.uint8.
dest_cur.execute('INSERT INTO placements (timestamp, x, y, color) VALUES (?, ?, ?, ?)', (timestamp, x, y, int(color)))
else:
dest_cur.execute('INSERT INTO placements (timestamp, x, y, color, author) VALUES (?, ?, ?, ?, ?)', (timestamp, x, y, color, author))
else:
st.notify('{} {:3} {:3} {:3} {:16} {}'.format(time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(timestamp)), x, y, color, '' if author is None else author, source))
def duplicate_pixel(timestamp, x, y, color, author, source, dest_cur, st):
if args.pixel is not None:
st.notify(' {} + {}'.format(time.strftime('%H:%M:%S', time.gmtime(timestamp)), source))
def update_canvas(canvas, x, y, color, timestamp, author):
canvas['color'][y, x] = color
canvas['timestamp'][y, x] = timestamp
canvas['author'][y, x] = author
print 'loading offsets...',
offsets = {}
for row in source_db.execute('SELECT * FROM offsets'):
if row['source'] not in offsets:
offsets[row['source']] = []
offsets[row['source']].append((row['timestamp'], row['offset']))
for key in offsets.keys():
offsets[key].sort()
print 'done.'
st = ttystatus.TerminalStatus(period=0.1)
st.format('%ElapsedTime() %PercentDone(done,total) [%ProgressBar(done,total)] ETA: %RemainingTime(done,total)')
st['done'] = 0
board_count = source_db.execute('SELECT COUNT(*) FROM raw_boards').fetchone()[0]
if args.pixel is None:
raw_placement_count = source_db.execute('SELECT COUNT(*) FROM raw_placements').fetchone()[0]
known_placement_count = source_db.execute('SELECT COUNT(*) FROM known_placements').fetchone()[0]
else:
raw_placement_count = source_db.execute('SELECT COUNT(*) FROM raw_placements WHERE x = ? and y = ?', args.pixel).fetchone()[0]
known_placement_count = source_db.execute('SELECT COUNT(*) FROM known_placements WHERE x = ? and y = ?', args.pixel).fetchone()[0]
st['total'] = raw_placement_count + known_placement_count + board_count
st.flush()
if args.pixel is None:
dest_db = sqlite3.connect('merged.sqlite')
dest_cur = dest_db.cursor()
dest_cur.execute('DROP TABLE IF EXISTS placements')
dest_cur.execute("""
CREATE TABLE placements (
timestamp REAL,
x INTEGER,
y INTEGER,
color INTEGER,
author TEXT
)""")
dest_cur.execute('CREATE INDEX placements_timestamp_idx ON placements(timestamp)')
dest_cur.execute('CREATE INDEX placements_color_idx ON placements(color)')
dest_cur.execute('CREATE INDEX placements_author_idx ON placements(author)')
dest_cur.execute('CREATE INDEX placements_position_idx ON placements(x, y)')
else:
dest_cur = None
ALL_PLACEMENT_QUERY = """
SELECT offset_timestamp(timestamp, source) AS timestamp, x, y, color, author, source
FROM raw_placements
UNION
SELECT timestamp, x, y, color, author, '_known' AS source
FROM known_placements
ORDER BY timestamp, x, y, source"""
PIXEL_PLACEMENT_QUERY = """
SELECT offset_timestamp(timestamp, source) AS timestamp, x, y, color, author, source
FROM raw_placements
WHERE x = :x AND y = :y
UNION
SELECT timestamp, x, y, color, author, '_known' AS source
FROM known_placements
WHERE x = :x AND y = :y
ORDER BY timestamp, x, y, source"""
if args.pixel is None:
placement_cur = source_db.execute(ALL_PLACEMENT_QUERY)
else:
placement_cur = source_db.execute(PIXEL_PLACEMENT_QUERY, {'x':args.pixel[0], 'y':args.pixel[1]})
board_cur = source_db.execute('SELECT * FROM raw_boards ORDER BY timestamp, source')
canvas = np.empty((1000, 1000), dtype=[('color', 'u1'), ('timestamp', 'f4'), ('author', 'O')])
canvas['color'] = 0
canvas['timestamp'] = 1490979600
canvas['author'] = ''
placement_row = placement_cur.fetchone()
board_row = board_cur.fetchone()
while placement_row is not None or board_row is not None:
if placement_row is not None and (board_row is None or placement_row['timestamp'] < board_row['timestamp']):
new_pixel(canvas, placement_row, offsets, dest_cur, st)
placement_row = placement_cur.fetchone()
else:
new_board(canvas, board_row, dest_cur, st)
board_row = board_cur.fetchone()
st['done'] += 1
if args.pixel is None:
dest_db.commit() | 0.345436 | 0.282141 |
import numpy as np
from keras.engine.sequential import Sequential
from keras.layers.core import Dense, Dropout
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM
from keras.preprocessing.text import Tokenizer
from keras.utils.np_utils import to_categorical
from pymystem3 import Mystem
from utils import (create_dataset, get_callbacks, preprocess, read_text,
save_labels, save_tokenizer)
mystem = Mystem()
FILE_NAME = 'data/1.txt'
FILE_NAME2 = 'data/2.txt'
EMBEDDING_N_DIM = 32
def build_model(vocab_size, window=3):
model = Sequential()
model.add(Embedding(vocab_size, EMBEDDING_N_DIM, input_length=window))
model.add(Dropout(0.1))
model.add(LSTM(256,
dropout=0.1,
recurrent_dropout=0.1))
model.add(Dense(1024, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(vocab_size, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy', ],
)
return model
if __name__ == '__main__':
text = read_text([FILE_NAME, FILE_NAME2])
text = preprocess(text, force=True)
max_words = 100000000
WINDOW = 4
tokenizer = Tokenizer(
num_words=max_words,
filters='"#$%&()*+-/:;<=>@[\]^_`{|}~'
)
tokenizer.fit_on_texts(text)
X_train = tokenizer.texts_to_sequences(text)
print('Train shape:', np.array(X_train).shape)
X_train_time, Y_train_time = create_dataset(np.array(X_train), WINDOW)
vocab_size = len(tokenizer.word_index) + 1
y = to_categorical(Y_train_time, num_classes=vocab_size)
print('Vocabulary size: ', vocab_size)
save_labels('models/labels.dump', Y_train_time, y)
save_tokenizer(FILE_NAME)
model = build_model(vocab_size, WINDOW)
model.fit(X_train_time, y,
epochs=50,
batch_size=16,
validation_split=0.1,
verbose=1,
callbacks=get_callbacks(),
shuffle=False) | lstm/main.py | import numpy as np
from keras.engine.sequential import Sequential
from keras.layers.core import Dense, Dropout
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM
from keras.preprocessing.text import Tokenizer
from keras.utils.np_utils import to_categorical
from pymystem3 import Mystem
from utils import (create_dataset, get_callbacks, preprocess, read_text,
save_labels, save_tokenizer)
mystem = Mystem()
FILE_NAME = 'data/1.txt'
FILE_NAME2 = 'data/2.txt'
EMBEDDING_N_DIM = 32
def build_model(vocab_size, window=3):
model = Sequential()
model.add(Embedding(vocab_size, EMBEDDING_N_DIM, input_length=window))
model.add(Dropout(0.1))
model.add(LSTM(256,
dropout=0.1,
recurrent_dropout=0.1))
model.add(Dense(1024, activation='relu'))
model.add(Dropout(0.3))
model.add(Dense(vocab_size, activation='softmax'))
model.compile(
loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy', ],
)
return model
if __name__ == '__main__':
text = read_text([FILE_NAME, FILE_NAME2])
text = preprocess(text, force=True)
max_words = 100000000
WINDOW = 4
tokenizer = Tokenizer(
num_words=max_words,
filters='"#$%&()*+-/:;<=>@[\]^_`{|}~'
)
tokenizer.fit_on_texts(text)
X_train = tokenizer.texts_to_sequences(text)
print('Train shape:', np.array(X_train).shape)
X_train_time, Y_train_time = create_dataset(np.array(X_train), WINDOW)
vocab_size = len(tokenizer.word_index) + 1
y = to_categorical(Y_train_time, num_classes=vocab_size)
print('Vocabulary size: ', vocab_size)
save_labels('models/labels.dump', Y_train_time, y)
save_tokenizer(FILE_NAME)
model = build_model(vocab_size, WINDOW)
model.fit(X_train_time, y,
epochs=50,
batch_size=16,
validation_split=0.1,
verbose=1,
callbacks=get_callbacks(),
shuffle=False) | 0.82559 | 0.162314 |
import re
import argparse
from pathlib import Path
number_mappings = {
"0" : "null",
"1" : "ein",
"2" : "zwei",
"3" : "drei",
"4" : "vier",
"5" : "fünf",
"6" : "sechs",
"7" : "sieben",
"8" : "acht",
"9" : "neun",
"10" : "zehn",
"11" : "elf",
"12" : "zwölf",
"13" : "dreizehn",
"14" : "vierzehn",
"15" : "fünfzehn",
"16" : "sechzehn",
"17" : "siebzehn",
"18" : "achtzehn",
"19" : "neunzehn",
"20" : "zwanzig",
"30" : "dreißig",
"60" : "sechzig",
"70" : "siebzig",
"100" : "einhundert"
}
ordinal_mappings = {
"1" : "erste",
"3" : "dritte",
"7" : "siebte",
"8" : "achte",
}
customs_mappings = {
"¼" : "ein viertel",
"½" : "einhalb",
"¾" : "drei viertel",
}
'''
ordinal_genders = {
["diese"] : "te",
["als"] : "ter",
[""] : "tes",
["am", "zum", "en", "im", "die", "dieser", "diese", "em"] : "ten",
}
'''
def number_literal(number):
x_str = str(number)
if x_str in number_mappings:
return number_mappings[x_str]
x_str_left = x_str[0]
x_str_right = x_str[1:].lstrip("0")
if len(x_str) == 8:
x_str_left = x_str[0:2]
x_str_right = x_str[2:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"millionen"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"millionen"
if len(x_str) == 7:
x_str_left = x_str[0]
x_str_right = x_str[1:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"millionen"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"millionen"
if len(x_str) == 6:
x_str_left = x_str[0:3]
x_str_right = x_str[3:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 5:
x_str_left = x_str[0:2]
x_str_right = x_str[2:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 4:
if x_str_right != "":
if int(number) >= 1200 and int(number) < 2000:
decade = x_str[2:].lstrip("0")
if decade != "":
return number_literal(x_str[0:2])+"hundert"+number_literal(x_str[2:].lstrip("0"))
else:
return number_literal(x_str[0:2])+"hundert"
else:
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 3:
if x_str_right != "":
return number_literal(x_str_left)+"hundert"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"hundert"
if len(x_str) == 2:
if x_str_right != "":
return number_literal(x_str_right)+"und"+number_literal(x_str_left+"0")
else:
return number_literal(x_str_left) + "zig"
class TextNormalizer:
def __init__(self) -> None:
pass
def normalize_rationals(self, input_sentence:str):
rationals = re.findall(r"(\d+[\. ']*\d*,\d+)",input_sentence)
for rational in rationals:
number, decimals = rational.split(",")
normalized_number = self.normalize_integer(number)
if number == "1":
normalized_number = normalized_number + "s"
decimals_list = []
for decimal in decimals:
normalized_decimal = self.normalize_integer(decimal)
if decimal == "1":
normalized_decimal = normalized_decimal + "s"
decimals_list.append(normalized_decimal)
normalized_rational = normalized_number + " komma " + " ".join(decimals_list)
input_sentence = re.sub(rational, normalized_rational, input_sentence)
return input_sentence
def normalize_time(self, input_sentence:str):
times = re.findall(r"(\d{1,2}[\.:]\d{1,2}(?:( Uhr)?))(?!\d)",input_sentence)
if not len(times) > 0:
return input_sentence
if type(times[0]) is tuple:
temp_times = []
for t, _ in times:
temp_times.append(t)
times = temp_times
for time in times:
hour, minute = time.split()[0].replace(".",":").split(":")
if len(hour) > 2 or len(minute) > 2:
print("TOO LONG")
continue
if len(hour) == 2 and hour.startswith("0"):
hour = hour[1]
hour = self.normalize_integer(hour).capitalize()
if len(minute) == 2 and minute.startswith("0"):
minute = minute[1]
if minute == "0":
minute = ""
else:
minute = " "+self.normalize_integer(minute).capitalize()
normalized_time = hour + " Uhr" + minute
input_sentence = re.sub(time, normalized_time, input_sentence)
return input_sentence
def normalize_date(self, input_sentence:str):
dates = re.findall(r"(\d{1,2}\.\d{1,2}\.\d{2,4})",input_sentence)
for date in dates:
day, month, year = date.split(".")
day = self.normalize_ordinal(day.lstrip("0")+".")
month = self.normalize_ordinal(month.lstrip("0")+".")
year = self.normalize_integer(year.lstrip("0"))
normalized_date = " ".join([day, month, year])
input_sentence = re.sub(date, normalized_date, input_sentence)
return input_sentence
def normalize_ordinal(self, input_sentence:str):
ordinals = re.findall(r"([\.]*\d+[\. ']*\d*)\.(?!\d)",input_sentence)
for number in ordinals:
normalized_number = number
if len(normalized_number) > 2:
if normalized_number[-2] == 0 and normalized_number[-1] in ordinal_mappings:
temp_number = self.normalize_integer(normalized_number[:-2]+"00")
normalized_number = temp_number + "ste"
else:
normalized_number = self.normalize_integer(normalized_number)+"te"
elif len(normalized_number) == 2:
normalized_number = self.normalize_integer(number)
normalized_number+="sten"
else:
if normalized_number in ordinal_mappings:
normalized_number = ordinal_mappings[normalized_number]
else:
normalized_number = self.normalize_integer(normalized_number)+"te"
input_sentence = re.sub(number+".", normalized_number, input_sentence)
return input_sentence
def normalize_integer(self, input_sentence:str):
numbers = re.findall(r"(\d+[\. ']*\d*)",input_sentence)
for number in numbers:
number_cleaned = number.replace(" ","").replace(".", "").replace("'","")
number = number.strip()
normalized_number = number_literal(number_cleaned)
input_sentence=re.sub(number, normalized_number, input_sentence)
return input_sentence
def normalize_customs(self, input_sentence:str):
for custom_character in customs_mappings:
if custom_character in input_sentence:
input_sentence = input_sentence.replace(" "+custom_character, " "+customs_mappings[custom_character])
input_sentence = input_sentence.replace(custom_character, " "+customs_mappings[custom_character])
return input_sentence
def normalize_percent(self, input_sentence:str):
numbers = re.findall(r"(\d+%)",input_sentence)
for number in numbers:
number_cleaned = number.replace(" ","").replace(".", "").replace("'","")
number = number.strip()
normalized_number = number_literal(number_cleaned[:-1]) + " prozent"
input_sentence=re.sub(number, normalized_number, input_sentence)
return input_sentence
def normalize(self, input_sentence:str):
input_sentence = self.normalize_percent(input_sentence)
input_sentence = self.normalize_rationals(input_sentence)
input_sentence = self.normalize_time(input_sentence)
input_sentence = self.normalize_date(input_sentence)
input_sentence = self.normalize_ordinal(input_sentence)
input_sentence = self.normalize_integer(input_sentence)
input_sentence = self.normalize_customs(input_sentence)
return input_sentence
def main():
parser = argparse.ArgumentParser(description="Normalizer control")
parser.add_argument("--files", required=True,action="append")
parser.add_argument("--save_path", required=True)
args = parser.parse_args()
normalizer = TextNormalizer()
normalized_sentences = []
for text_file in args.files:
with open(text_file, encoding="UTF-8") as file:
lines = file.readlines()
for line in lines:
normalized_line = normalizer.normalize(line)
normalized_sentences.append(normalized_line)
text_file_name = Path(text_file).name
with open(args.save_path+text_file_name+"_normalized.txt", "w", encoding="UTF-8") as file:
file.writelines(normalized_sentences)
normalized_sentences = []
if __name__ == "__main__":
main() | huiAudioCorpus/calculator/TextNormalizer.py | import re
import argparse
from pathlib import Path
number_mappings = {
"0" : "null",
"1" : "ein",
"2" : "zwei",
"3" : "drei",
"4" : "vier",
"5" : "fünf",
"6" : "sechs",
"7" : "sieben",
"8" : "acht",
"9" : "neun",
"10" : "zehn",
"11" : "elf",
"12" : "zwölf",
"13" : "dreizehn",
"14" : "vierzehn",
"15" : "fünfzehn",
"16" : "sechzehn",
"17" : "siebzehn",
"18" : "achtzehn",
"19" : "neunzehn",
"20" : "zwanzig",
"30" : "dreißig",
"60" : "sechzig",
"70" : "siebzig",
"100" : "einhundert"
}
ordinal_mappings = {
"1" : "erste",
"3" : "dritte",
"7" : "siebte",
"8" : "achte",
}
customs_mappings = {
"¼" : "ein viertel",
"½" : "einhalb",
"¾" : "drei viertel",
}
'''
ordinal_genders = {
["diese"] : "te",
["als"] : "ter",
[""] : "tes",
["am", "zum", "en", "im", "die", "dieser", "diese", "em"] : "ten",
}
'''
def number_literal(number):
x_str = str(number)
if x_str in number_mappings:
return number_mappings[x_str]
x_str_left = x_str[0]
x_str_right = x_str[1:].lstrip("0")
if len(x_str) == 8:
x_str_left = x_str[0:2]
x_str_right = x_str[2:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"millionen"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"millionen"
if len(x_str) == 7:
x_str_left = x_str[0]
x_str_right = x_str[1:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"millionen"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"millionen"
if len(x_str) == 6:
x_str_left = x_str[0:3]
x_str_right = x_str[3:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 5:
x_str_left = x_str[0:2]
x_str_right = x_str[2:].lstrip("0")
if x_str_right != "":
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 4:
if x_str_right != "":
if int(number) >= 1200 and int(number) < 2000:
decade = x_str[2:].lstrip("0")
if decade != "":
return number_literal(x_str[0:2])+"hundert"+number_literal(x_str[2:].lstrip("0"))
else:
return number_literal(x_str[0:2])+"hundert"
else:
return number_literal(x_str_left)+"tausend"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"tausend"
if len(x_str) == 3:
if x_str_right != "":
return number_literal(x_str_left)+"hundert"+number_literal(x_str_right)
else:
return number_literal(x_str_left)+"hundert"
if len(x_str) == 2:
if x_str_right != "":
return number_literal(x_str_right)+"und"+number_literal(x_str_left+"0")
else:
return number_literal(x_str_left) + "zig"
class TextNormalizer:
def __init__(self) -> None:
pass
def normalize_rationals(self, input_sentence:str):
rationals = re.findall(r"(\d+[\. ']*\d*,\d+)",input_sentence)
for rational in rationals:
number, decimals = rational.split(",")
normalized_number = self.normalize_integer(number)
if number == "1":
normalized_number = normalized_number + "s"
decimals_list = []
for decimal in decimals:
normalized_decimal = self.normalize_integer(decimal)
if decimal == "1":
normalized_decimal = normalized_decimal + "s"
decimals_list.append(normalized_decimal)
normalized_rational = normalized_number + " komma " + " ".join(decimals_list)
input_sentence = re.sub(rational, normalized_rational, input_sentence)
return input_sentence
def normalize_time(self, input_sentence:str):
times = re.findall(r"(\d{1,2}[\.:]\d{1,2}(?:( Uhr)?))(?!\d)",input_sentence)
if not len(times) > 0:
return input_sentence
if type(times[0]) is tuple:
temp_times = []
for t, _ in times:
temp_times.append(t)
times = temp_times
for time in times:
hour, minute = time.split()[0].replace(".",":").split(":")
if len(hour) > 2 or len(minute) > 2:
print("TOO LONG")
continue
if len(hour) == 2 and hour.startswith("0"):
hour = hour[1]
hour = self.normalize_integer(hour).capitalize()
if len(minute) == 2 and minute.startswith("0"):
minute = minute[1]
if minute == "0":
minute = ""
else:
minute = " "+self.normalize_integer(minute).capitalize()
normalized_time = hour + " Uhr" + minute
input_sentence = re.sub(time, normalized_time, input_sentence)
return input_sentence
def normalize_date(self, input_sentence:str):
dates = re.findall(r"(\d{1,2}\.\d{1,2}\.\d{2,4})",input_sentence)
for date in dates:
day, month, year = date.split(".")
day = self.normalize_ordinal(day.lstrip("0")+".")
month = self.normalize_ordinal(month.lstrip("0")+".")
year = self.normalize_integer(year.lstrip("0"))
normalized_date = " ".join([day, month, year])
input_sentence = re.sub(date, normalized_date, input_sentence)
return input_sentence
def normalize_ordinal(self, input_sentence:str):
ordinals = re.findall(r"([\.]*\d+[\. ']*\d*)\.(?!\d)",input_sentence)
for number in ordinals:
normalized_number = number
if len(normalized_number) > 2:
if normalized_number[-2] == 0 and normalized_number[-1] in ordinal_mappings:
temp_number = self.normalize_integer(normalized_number[:-2]+"00")
normalized_number = temp_number + "ste"
else:
normalized_number = self.normalize_integer(normalized_number)+"te"
elif len(normalized_number) == 2:
normalized_number = self.normalize_integer(number)
normalized_number+="sten"
else:
if normalized_number in ordinal_mappings:
normalized_number = ordinal_mappings[normalized_number]
else:
normalized_number = self.normalize_integer(normalized_number)+"te"
input_sentence = re.sub(number+".", normalized_number, input_sentence)
return input_sentence
def normalize_integer(self, input_sentence:str):
numbers = re.findall(r"(\d+[\. ']*\d*)",input_sentence)
for number in numbers:
number_cleaned = number.replace(" ","").replace(".", "").replace("'","")
number = number.strip()
normalized_number = number_literal(number_cleaned)
input_sentence=re.sub(number, normalized_number, input_sentence)
return input_sentence
def normalize_customs(self, input_sentence:str):
for custom_character in customs_mappings:
if custom_character in input_sentence:
input_sentence = input_sentence.replace(" "+custom_character, " "+customs_mappings[custom_character])
input_sentence = input_sentence.replace(custom_character, " "+customs_mappings[custom_character])
return input_sentence
def normalize_percent(self, input_sentence:str):
numbers = re.findall(r"(\d+%)",input_sentence)
for number in numbers:
number_cleaned = number.replace(" ","").replace(".", "").replace("'","")
number = number.strip()
normalized_number = number_literal(number_cleaned[:-1]) + " prozent"
input_sentence=re.sub(number, normalized_number, input_sentence)
return input_sentence
def normalize(self, input_sentence:str):
input_sentence = self.normalize_percent(input_sentence)
input_sentence = self.normalize_rationals(input_sentence)
input_sentence = self.normalize_time(input_sentence)
input_sentence = self.normalize_date(input_sentence)
input_sentence = self.normalize_ordinal(input_sentence)
input_sentence = self.normalize_integer(input_sentence)
input_sentence = self.normalize_customs(input_sentence)
return input_sentence
def main():
parser = argparse.ArgumentParser(description="Normalizer control")
parser.add_argument("--files", required=True,action="append")
parser.add_argument("--save_path", required=True)
args = parser.parse_args()
normalizer = TextNormalizer()
normalized_sentences = []
for text_file in args.files:
with open(text_file, encoding="UTF-8") as file:
lines = file.readlines()
for line in lines:
normalized_line = normalizer.normalize(line)
normalized_sentences.append(normalized_line)
text_file_name = Path(text_file).name
with open(args.save_path+text_file_name+"_normalized.txt", "w", encoding="UTF-8") as file:
file.writelines(normalized_sentences)
normalized_sentences = []
if __name__ == "__main__":
main() | 0.193223 | 0.420778 |
import os
import re
import sys
from os.path import expanduser
import ipfshttpclient
from broker import cfg
from broker._utils._log import log
from broker._utils.tools import get_ip, is_byte_str_zero, print_tb
from broker._utils.web3_tools import get_tx_status
from broker._utils.yaml import Yaml
from broker.config import env
from broker.errors import QuietExit
from broker.utils import start_ipfs_daemon
Ebb = cfg.Ebb
ipfs = cfg.ipfs
def _register_provider(self, *args, **kwargs):
"""Register provider."""
if is_byte_str_zero(env.PROVIDER_ID):
log(f"E: PROVIDER_ID={env.PROVIDER_ID} is not valid, change it in [m]~/.ebloc-broker/.env")
raise QuietExit
if self.does_provider_exist(env.PROVIDER_ID):
log(
f"warning: Provider {env.PROVIDER_ID} is already registered.\n"
"Please call the [blue]update_provider_info.py[/blue] or "
"[blue]update_provider_prices.py[/blue] script for an update."
)
raise QuietExit
if kwargs["commitment_blk"] < cfg.ONE_HOUR_BLOCK_DURATION:
raise Exception(f"Commitment block number should be greater than {cfg.ONE_HOUR_BLOCK_DURATION}")
if len(kwargs["f_id"]) >= 128:
raise Exception("f_id hould be lesser than 128")
if len(kwargs["gmail"]) >= 128:
raise Exception("e-mail should be less than 128")
try:
tx = self.register_provider(*args)
return self.tx_id(tx)
except Exception as e:
raise e
def get_ipfs_id() -> str:
start_ipfs_daemon()
if ipfs.client:
return ipfs.get_ipfs_id()
else:
try:
# may create error
client = ipfshttpclient.connect("/ip4/127.0.0.1/tcp/5001/http")
except ipfshttpclient.exceptions.ConnectionError:
log(
"E: Failed to establish a new connection to IPFS, please run it on the background.\n"
"Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py"
)
sys.exit(1)
except Exception as e:
print_tb(e)
log(
"E: Failed to establish a new connection to IPFS, please run it on the background.\n"
"Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py"
)
sys.exit(1)
try:
return ipfs.get_ipfs_id(client)
except Exception as e:
print_tb(str(e))
sys.exit(1)
def error_msg(key, yaml_fn):
log(f"E: [blue]{key}[/blue] is empty in [magenta]{yaml_fn}")
def register_provider_wrapper(yaml_fn):
"""Register provider."""
yaml_fn = expanduser(yaml_fn)
if not os.path.exists(yaml_fn):
log(f"E: yaml_fn({yaml_fn}) does not exist")
raise QuietExit
args = Yaml(yaml_fn, auto_dump=False)
f_id = args["cfg"]["oc_username"].replace("@b2drop.eudat.eu", "")
gmail = args["cfg"]["gmail"]
_args = args["cfg"]["provider"]
available_core = _args["available_core"]
commitment_blk = _args["prices"]["commitment_blk"]
price_core_min = _args["prices"]["price_core_min"]
price_data_transfer = _args["prices"]["price_data_transfer"]
price_storage = _args["prices"]["price_storage"]
price_cache = _args["prices"]["price_cache"]
exit_flag = False
if env.PROVIDER_ID == Ebb.get_owner():
log("E: Address cannot be same as owner's")
exit_flag = True
if not f_id:
error_msg("f_id", yaml_fn)
exit_flag = True
if not available_core:
error_msg("available_core", yaml_fn)
exit_flag = True
if not commitment_blk:
error_msg("commitment_blk", yaml_fn)
exit_flag = True
if not price_core_min:
error_msg("price_core_min", yaml_fn)
exit_flag = True
if not price_data_transfer:
error_msg("price_data_transfer", yaml_fn)
exit_flag = True
if not price_storage:
error_msg("price_storage", yaml_fn)
exit_flag = True
if not price_cache:
error_msg("price_cache", yaml_fn)
exit_flag = True
if not gmail:
error_msg("gmail", yaml_fn)
exit_flag = True
if exit_flag:
sys.exit(1)
ipfs_id = get_ipfs_id()
ip_address = get_ip()
if ip_address not in ipfs_id:
# public IP should exists in the ipfs id
ipfs_address = re.sub("ip4.*?tcp", f"ip4/{ip_address}/tcp", ipfs_id, flags=re.DOTALL)
log(f"==> ipfs_address={ipfs_address}")
else:
ipfs_address = ipfs_id
try:
gmail = env.GMAIL
gpg_fingerprint = ipfs.get_gpg_fingerprint(gmail)
ipfs.is_gpg_published(gpg_fingerprint)
ipfs.publish_gpg(gpg_fingerprint)
except Exception as e:
raise e
if not gmail:
log("E: Please provide a valid e-mail")
sys.exit(1)
prices = [price_core_min, price_data_transfer, price_storage, price_cache]
args = (gpg_fingerprint, gmail, f_id, ipfs_address, available_core, prices, commitment_blk)
kwargs = {
"gmail": gmail,
"f_id": f_id,
"commitment_blk": commitment_blk,
}
try:
tx_hash = Ebb._register_provider(*args, **kwargs)
if tx_hash:
get_tx_status(tx_hash)
else:
log()
except QuietExit:
pass
except Exception as e:
raise e
def main():
try:
# yaml_fn = expanduser("~/ebloc-broker/broker/yaml_files/register_provider.yaml")
yaml_fn = expanduser("~/.ebloc-broker/cfg.yaml") # setup for the provider
register_provider_wrapper(yaml_fn)
except Exception as e:
print_tb(e)
if __name__ == "__main__":
main() | broker/eblocbroker_scripts/register_provider.py |
import os
import re
import sys
from os.path import expanduser
import ipfshttpclient
from broker import cfg
from broker._utils._log import log
from broker._utils.tools import get_ip, is_byte_str_zero, print_tb
from broker._utils.web3_tools import get_tx_status
from broker._utils.yaml import Yaml
from broker.config import env
from broker.errors import QuietExit
from broker.utils import start_ipfs_daemon
Ebb = cfg.Ebb
ipfs = cfg.ipfs
def _register_provider(self, *args, **kwargs):
"""Register provider."""
if is_byte_str_zero(env.PROVIDER_ID):
log(f"E: PROVIDER_ID={env.PROVIDER_ID} is not valid, change it in [m]~/.ebloc-broker/.env")
raise QuietExit
if self.does_provider_exist(env.PROVIDER_ID):
log(
f"warning: Provider {env.PROVIDER_ID} is already registered.\n"
"Please call the [blue]update_provider_info.py[/blue] or "
"[blue]update_provider_prices.py[/blue] script for an update."
)
raise QuietExit
if kwargs["commitment_blk"] < cfg.ONE_HOUR_BLOCK_DURATION:
raise Exception(f"Commitment block number should be greater than {cfg.ONE_HOUR_BLOCK_DURATION}")
if len(kwargs["f_id"]) >= 128:
raise Exception("f_id hould be lesser than 128")
if len(kwargs["gmail"]) >= 128:
raise Exception("e-mail should be less than 128")
try:
tx = self.register_provider(*args)
return self.tx_id(tx)
except Exception as e:
raise e
def get_ipfs_id() -> str:
start_ipfs_daemon()
if ipfs.client:
return ipfs.get_ipfs_id()
else:
try:
# may create error
client = ipfshttpclient.connect("/ip4/127.0.0.1/tcp/5001/http")
except ipfshttpclient.exceptions.ConnectionError:
log(
"E: Failed to establish a new connection to IPFS, please run it on the background.\n"
"Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py"
)
sys.exit(1)
except Exception as e:
print_tb(e)
log(
"E: Failed to establish a new connection to IPFS, please run it on the background.\n"
"Please run [magenta]~/ebloc-broker/broker/_daemons/ipfs.py"
)
sys.exit(1)
try:
return ipfs.get_ipfs_id(client)
except Exception as e:
print_tb(str(e))
sys.exit(1)
def error_msg(key, yaml_fn):
log(f"E: [blue]{key}[/blue] is empty in [magenta]{yaml_fn}")
def register_provider_wrapper(yaml_fn):
"""Register provider."""
yaml_fn = expanduser(yaml_fn)
if not os.path.exists(yaml_fn):
log(f"E: yaml_fn({yaml_fn}) does not exist")
raise QuietExit
args = Yaml(yaml_fn, auto_dump=False)
f_id = args["cfg"]["oc_username"].replace("@b2drop.eudat.eu", "")
gmail = args["cfg"]["gmail"]
_args = args["cfg"]["provider"]
available_core = _args["available_core"]
commitment_blk = _args["prices"]["commitment_blk"]
price_core_min = _args["prices"]["price_core_min"]
price_data_transfer = _args["prices"]["price_data_transfer"]
price_storage = _args["prices"]["price_storage"]
price_cache = _args["prices"]["price_cache"]
exit_flag = False
if env.PROVIDER_ID == Ebb.get_owner():
log("E: Address cannot be same as owner's")
exit_flag = True
if not f_id:
error_msg("f_id", yaml_fn)
exit_flag = True
if not available_core:
error_msg("available_core", yaml_fn)
exit_flag = True
if not commitment_blk:
error_msg("commitment_blk", yaml_fn)
exit_flag = True
if not price_core_min:
error_msg("price_core_min", yaml_fn)
exit_flag = True
if not price_data_transfer:
error_msg("price_data_transfer", yaml_fn)
exit_flag = True
if not price_storage:
error_msg("price_storage", yaml_fn)
exit_flag = True
if not price_cache:
error_msg("price_cache", yaml_fn)
exit_flag = True
if not gmail:
error_msg("gmail", yaml_fn)
exit_flag = True
if exit_flag:
sys.exit(1)
ipfs_id = get_ipfs_id()
ip_address = get_ip()
if ip_address not in ipfs_id:
# public IP should exists in the ipfs id
ipfs_address = re.sub("ip4.*?tcp", f"ip4/{ip_address}/tcp", ipfs_id, flags=re.DOTALL)
log(f"==> ipfs_address={ipfs_address}")
else:
ipfs_address = ipfs_id
try:
gmail = env.GMAIL
gpg_fingerprint = ipfs.get_gpg_fingerprint(gmail)
ipfs.is_gpg_published(gpg_fingerprint)
ipfs.publish_gpg(gpg_fingerprint)
except Exception as e:
raise e
if not gmail:
log("E: Please provide a valid e-mail")
sys.exit(1)
prices = [price_core_min, price_data_transfer, price_storage, price_cache]
args = (gpg_fingerprint, gmail, f_id, ipfs_address, available_core, prices, commitment_blk)
kwargs = {
"gmail": gmail,
"f_id": f_id,
"commitment_blk": commitment_blk,
}
try:
tx_hash = Ebb._register_provider(*args, **kwargs)
if tx_hash:
get_tx_status(tx_hash)
else:
log()
except QuietExit:
pass
except Exception as e:
raise e
def main():
try:
# yaml_fn = expanduser("~/ebloc-broker/broker/yaml_files/register_provider.yaml")
yaml_fn = expanduser("~/.ebloc-broker/cfg.yaml") # setup for the provider
register_provider_wrapper(yaml_fn)
except Exception as e:
print_tb(e)
if __name__ == "__main__":
main() | 0.333286 | 0.104935 |
__author__ = 'Charlie'
import numpy as np
import os, sys, inspect
import tensorflow as tf
utils_path = os.path.realpath(
os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0], "..")))
if utils_path not in sys.path:
sys.path.insert(0, utils_path)
import TensorflowUtils as utils
import read_notMNIST
BATCH_SIZE = 128
TRAIN_DATA_URL = 'http://commondatastorage.googleapis.com/books1000/notMNIST_large.tar.gz'
TEST_DATA_URL = 'http://commondatastorage.googleapis.com/books1000/notMNIST_small.tar.gz'
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string("mode", "train", "Mode train/ test")
tf.flags.DEFINE_string("logs_dir", "logs/notMNIST_logs/", "Path to save log and train checkpoints")
tf.flags.DEFINE_string("data_dir", "Data_zoo/notMNIST/", "Path to save/ load notMNIST data")
NUMBER_OF_CLASSES = 10
IMAGE_SIZE = 28
MAX_ITERATIONS = int(1 + 1e4)
LEARNING_RATE = 1e-3
def inference_fully_convolutional(dataset):
'''
Fully convolutional inference on notMNIST dataset
:param datset: [batch_size, 28*28*1] tensor
:return: logits
'''
dataset_reshaped = tf.reshape(dataset, [-1, 28, 28, 1])
with tf.name_scope("conv1") as scope:
W_conv1 = utils.weight_variable_xavier_initialized([3, 3, 1, 32], name="W_conv1")
b_conv1 = utils.bias_variable([32], name="b_conv1")
h_conv1 = tf.nn.relu(utils.conv2d_strided(dataset_reshaped, W_conv1, b_conv1))
with tf.name_scope("conv2") as scope:
W_conv2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64], name="W_conv2")
b_conv2 = utils.bias_variable([64], name="b_conv2")
h_conv2 = tf.nn.relu(utils.conv2d_strided(h_conv1, W_conv2, b_conv2))
with tf.name_scope("conv3") as scope:
W_conv3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128], name="W_conv3")
b_conv3 = utils.bias_variable([128], name="b_conv3")
h_conv3 = tf.nn.relu(utils.conv2d_strided(h_conv2, W_conv3, b_conv3))
with tf.name_scope("conv4") as scope:
W_conv4 = utils.weight_variable_xavier_initialized([3, 3, 128, 256], name="W_conv4")
b_conv4 = utils.bias_variable([256], name="b_conv4")
h_conv4 = tf.nn.relu(utils.conv2d_strided(h_conv3, W_conv4, b_conv4))
with tf.name_scope("conv5") as scope:
# W_conv5 = utils.weight_variable_xavier_initialized([2, 2, 256, 512], name="W_conv5")
# b_conv5 = utils.bias_variable([512], name="b_conv5")
# h_conv5 = tf.nn.relu(utils.conv2d_strided(h_conv4, W_conv5, b_conv5))
h_conv5 = utils.avg_pool_2x2(h_conv4)
with tf.name_scope("conv6") as scope:
W_conv6 = utils.weight_variable_xavier_initialized([1, 1, 256, 10], name="W_conv6")
b_conv6 = utils.bias_variable([10], name="b_conv6")
logits = tf.nn.relu(utils.conv2d_basic(h_conv5, W_conv6, b_conv6))
print logits.get_shape()
logits = tf.reshape(logits, [-1, 10])
return logits
def loss(logits, labels):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, labels))
tf.scalar_summary("Entropy", cross_entropy)
return cross_entropy
def train(loss_val, step):
return tf.train.AdamOptimizer(LEARNING_RATE).minimize(loss_val, global_step=step)
def main(argv=None):
print "Reading notMNIST data..."
train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels = \
read_notMNIST.get_notMNISTData(FLAGS.data_dir)
print "Setting up tf model..."
dataset = tf.placeholder(tf.float32, shape=(None, IMAGE_SIZE * IMAGE_SIZE))
labels = tf.placeholder(tf.float32, shape=(None, NUMBER_OF_CLASSES))
global_step = tf.Variable(0, trainable=False)
logits = inference_fully_convolutional(dataset)
for var in tf.trainable_variables():
utils.add_to_regularization_and_summary(var)
loss_val = loss(logits, labels)
train_op = train(loss_val, global_step)
summary_op = tf.merge_all_summaries()
with tf.Session() as sess:
print "Setting up summary and saver..."
sess.run(tf.initialize_all_variables())
summary_writer = tf.train.SummaryWriter(FLAGS.logs_dir, sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
print "Model restored!"
if FLAGS.mode == "train":
for step in xrange(MAX_ITERATIONS):
offset = (step * BATCH_SIZE) % (train_labels.shape[0] - BATCH_SIZE)
batch_data = train_dataset[offset:(offset + BATCH_SIZE), :]
batch_labels = train_labels[offset:(offset + BATCH_SIZE), :]
feed_dict = {dataset: batch_data, labels: batch_labels}
if step % 100 == 0:
l, summary_str = sess.run([loss_val, summary_op], feed_dict=feed_dict)
print "Step: %d Mini batch loss: %g"%(step, l)
summary_writer.add_summary(summary_str, step)
if step % 1000 == 0:
valid_loss = sess.run(loss_val, feed_dict={dataset:valid_dataset, labels:valid_labels})
print "-- Validation loss %g" % valid_loss
saver.save(sess, FLAGS.logs_dir +"model.ckpt", global_step=step)
sess.run(train_op, feed_dict=feed_dict)
test_loss = sess.run(loss_val, feed_dict={dataset:test_dataset, labels:test_labels})
print "Test loss: %g" % test_loss
if __name__ == "__main__":
tf.app.run() | notMNIST/notMNISTFullyConvultional.py | __author__ = 'Charlie'
import numpy as np
import os, sys, inspect
import tensorflow as tf
utils_path = os.path.realpath(
os.path.abspath(os.path.join(os.path.split(inspect.getfile(inspect.currentframe()))[0], "..")))
if utils_path not in sys.path:
sys.path.insert(0, utils_path)
import TensorflowUtils as utils
import read_notMNIST
BATCH_SIZE = 128
TRAIN_DATA_URL = 'http://commondatastorage.googleapis.com/books1000/notMNIST_large.tar.gz'
TEST_DATA_URL = 'http://commondatastorage.googleapis.com/books1000/notMNIST_small.tar.gz'
FLAGS = tf.flags.FLAGS
tf.flags.DEFINE_string("mode", "train", "Mode train/ test")
tf.flags.DEFINE_string("logs_dir", "logs/notMNIST_logs/", "Path to save log and train checkpoints")
tf.flags.DEFINE_string("data_dir", "Data_zoo/notMNIST/", "Path to save/ load notMNIST data")
NUMBER_OF_CLASSES = 10
IMAGE_SIZE = 28
MAX_ITERATIONS = int(1 + 1e4)
LEARNING_RATE = 1e-3
def inference_fully_convolutional(dataset):
'''
Fully convolutional inference on notMNIST dataset
:param datset: [batch_size, 28*28*1] tensor
:return: logits
'''
dataset_reshaped = tf.reshape(dataset, [-1, 28, 28, 1])
with tf.name_scope("conv1") as scope:
W_conv1 = utils.weight_variable_xavier_initialized([3, 3, 1, 32], name="W_conv1")
b_conv1 = utils.bias_variable([32], name="b_conv1")
h_conv1 = tf.nn.relu(utils.conv2d_strided(dataset_reshaped, W_conv1, b_conv1))
with tf.name_scope("conv2") as scope:
W_conv2 = utils.weight_variable_xavier_initialized([3, 3, 32, 64], name="W_conv2")
b_conv2 = utils.bias_variable([64], name="b_conv2")
h_conv2 = tf.nn.relu(utils.conv2d_strided(h_conv1, W_conv2, b_conv2))
with tf.name_scope("conv3") as scope:
W_conv3 = utils.weight_variable_xavier_initialized([3, 3, 64, 128], name="W_conv3")
b_conv3 = utils.bias_variable([128], name="b_conv3")
h_conv3 = tf.nn.relu(utils.conv2d_strided(h_conv2, W_conv3, b_conv3))
with tf.name_scope("conv4") as scope:
W_conv4 = utils.weight_variable_xavier_initialized([3, 3, 128, 256], name="W_conv4")
b_conv4 = utils.bias_variable([256], name="b_conv4")
h_conv4 = tf.nn.relu(utils.conv2d_strided(h_conv3, W_conv4, b_conv4))
with tf.name_scope("conv5") as scope:
# W_conv5 = utils.weight_variable_xavier_initialized([2, 2, 256, 512], name="W_conv5")
# b_conv5 = utils.bias_variable([512], name="b_conv5")
# h_conv5 = tf.nn.relu(utils.conv2d_strided(h_conv4, W_conv5, b_conv5))
h_conv5 = utils.avg_pool_2x2(h_conv4)
with tf.name_scope("conv6") as scope:
W_conv6 = utils.weight_variable_xavier_initialized([1, 1, 256, 10], name="W_conv6")
b_conv6 = utils.bias_variable([10], name="b_conv6")
logits = tf.nn.relu(utils.conv2d_basic(h_conv5, W_conv6, b_conv6))
print logits.get_shape()
logits = tf.reshape(logits, [-1, 10])
return logits
def loss(logits, labels):
cross_entropy = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, labels))
tf.scalar_summary("Entropy", cross_entropy)
return cross_entropy
def train(loss_val, step):
return tf.train.AdamOptimizer(LEARNING_RATE).minimize(loss_val, global_step=step)
def main(argv=None):
print "Reading notMNIST data..."
train_dataset, train_labels, valid_dataset, valid_labels, test_dataset, test_labels = \
read_notMNIST.get_notMNISTData(FLAGS.data_dir)
print "Setting up tf model..."
dataset = tf.placeholder(tf.float32, shape=(None, IMAGE_SIZE * IMAGE_SIZE))
labels = tf.placeholder(tf.float32, shape=(None, NUMBER_OF_CLASSES))
global_step = tf.Variable(0, trainable=False)
logits = inference_fully_convolutional(dataset)
for var in tf.trainable_variables():
utils.add_to_regularization_and_summary(var)
loss_val = loss(logits, labels)
train_op = train(loss_val, global_step)
summary_op = tf.merge_all_summaries()
with tf.Session() as sess:
print "Setting up summary and saver..."
sess.run(tf.initialize_all_variables())
summary_writer = tf.train.SummaryWriter(FLAGS.logs_dir, sess.graph)
saver = tf.train.Saver()
ckpt = tf.train.get_checkpoint_state(FLAGS.logs_dir)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
print "Model restored!"
if FLAGS.mode == "train":
for step in xrange(MAX_ITERATIONS):
offset = (step * BATCH_SIZE) % (train_labels.shape[0] - BATCH_SIZE)
batch_data = train_dataset[offset:(offset + BATCH_SIZE), :]
batch_labels = train_labels[offset:(offset + BATCH_SIZE), :]
feed_dict = {dataset: batch_data, labels: batch_labels}
if step % 100 == 0:
l, summary_str = sess.run([loss_val, summary_op], feed_dict=feed_dict)
print "Step: %d Mini batch loss: %g"%(step, l)
summary_writer.add_summary(summary_str, step)
if step % 1000 == 0:
valid_loss = sess.run(loss_val, feed_dict={dataset:valid_dataset, labels:valid_labels})
print "-- Validation loss %g" % valid_loss
saver.save(sess, FLAGS.logs_dir +"model.ckpt", global_step=step)
sess.run(train_op, feed_dict=feed_dict)
test_loss = sess.run(loss_val, feed_dict={dataset:test_dataset, labels:test_labels})
print "Test loss: %g" % test_loss
if __name__ == "__main__":
tf.app.run() | 0.538498 | 0.401101 |
import os
from pathlib import Path
import pytest
from nozomi import api
from nozomi.data import Post
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/26905532.html#veigar',
"https://nozomi.la/post/26932594.html#cho'gath",
'https://nozomi.la/post/25802243.html#nautilus'
])
def test_get_post_single_img(url: str):
post = api.get_post(url)
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/25937459.html#pixiv_id_31112502'
])
def test_get_post_multi_img(url: str):
post = api.get_post(url)
assert isinstance(post, Post)
assert len(post.imageurls) > 1
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags', [
(['akali', 'sakimichan']),
(['veigar'])
])
def test_retrieval_positive_tags(positive_tags):
for post in api.get_posts(positive_tags=positive_tags, negative_tags=[]):
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags, negative_tags', [
(['akali', 'sakimichan'], ['nudity']),
(['veigar'], ['nudity'])
])
def test_retrieval_negative_tags(positive_tags, negative_tags):
for post in api.get_posts(positive_tags=positive_tags, negative_tags=negative_tags):
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags, negative_tags', [
(['akali', 'sakimichan'], [])
])
def test_download_single_img(positive_tags, negative_tags):
repeat = 10
for post in api.get_posts(positive_tags=positive_tags, negative_tags=negative_tags):
[image_name] = api.download_media(post, Path.cwd())
os.remove(Path.cwd().joinpath(image_name))
repeat -= 1
if repeat == 0:
break
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/25937459.html#pixiv_id_31112502'
])
def test_download_multi_img(url):
post = api.get_post(url)
assert len(post.imageurls) > 0
image_names = api.download_media(post, Path.cwd())
assert len(image_names) == len(post.imageurls)
for image_name in image_names:
os.remove(Path.cwd().joinpath(image_name)) | tests/integration/test_api.py |
import os
from pathlib import Path
import pytest
from nozomi import api
from nozomi.data import Post
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/26905532.html#veigar',
"https://nozomi.la/post/26932594.html#cho'gath",
'https://nozomi.la/post/25802243.html#nautilus'
])
def test_get_post_single_img(url: str):
post = api.get_post(url)
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/25937459.html#pixiv_id_31112502'
])
def test_get_post_multi_img(url: str):
post = api.get_post(url)
assert isinstance(post, Post)
assert len(post.imageurls) > 1
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags', [
(['akali', 'sakimichan']),
(['veigar'])
])
def test_retrieval_positive_tags(positive_tags):
for post in api.get_posts(positive_tags=positive_tags, negative_tags=[]):
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags, negative_tags', [
(['akali', 'sakimichan'], ['nudity']),
(['veigar'], ['nudity'])
])
def test_retrieval_negative_tags(positive_tags, negative_tags):
for post in api.get_posts(positive_tags=positive_tags, negative_tags=negative_tags):
assert isinstance(post, Post)
@pytest.mark.integration
@pytest.mark.parametrize('positive_tags, negative_tags', [
(['akali', 'sakimichan'], [])
])
def test_download_single_img(positive_tags, negative_tags):
repeat = 10
for post in api.get_posts(positive_tags=positive_tags, negative_tags=negative_tags):
[image_name] = api.download_media(post, Path.cwd())
os.remove(Path.cwd().joinpath(image_name))
repeat -= 1
if repeat == 0:
break
@pytest.mark.integration
@pytest.mark.parametrize('url', [
'https://nozomi.la/post/25937459.html#pixiv_id_31112502'
])
def test_download_multi_img(url):
post = api.get_post(url)
assert len(post.imageurls) > 0
image_names = api.download_media(post, Path.cwd())
assert len(image_names) == len(post.imageurls)
for image_name in image_names:
os.remove(Path.cwd().joinpath(image_name)) | 0.407098 | 0.415847 |
from unittest import mock
import pytest
from werkzeug.exceptions import HTTPException
from secure_scaffold.contrib.appengine import users
@pytest.fixture
def request():
with mock.patch("flask.request") as mock_request:
mock_request.headers = {
users.USER_EMAIL_HEADER: "<EMAIL>",
users.USER_ID_HEADER: "1",
users.USER_AUTH_DOMAIN_HEADER: "gmail.com",
}
yield mock_request
def test_requires_auth_decorator_success(request):
func = mock.MagicMock()
decorated_req = users.requires_auth(func)
decorated_req(request)
assert func.called
def test_requires_auth_decorator_fail(request):
request.headers.pop(users.USER_EMAIL_HEADER)
request.headers.pop(users.USER_ID_HEADER)
func = mock.MagicMock()
decorated_req = users.requires_auth(func)
with pytest.raises(HTTPException):
try:
decorated_req(request)
except HTTPException as e:
assert e.code == 401
raise
assert not func.called
def test_requires_admin_decorator_success(request):
request.headers[users.USER_ADMIN_HEADER] = "1"
func = mock.MagicMock()
decorated_req = users.requires_admin(func)
decorated_req(request)
assert func.called
def test_requires_admin_decorator_fail(request):
request.headers[users.USER_ADMIN_HEADER] = "2"
func = mock.MagicMock()
decorated_req = users.requires_admin(func)
with pytest.raises(HTTPException):
try:
decorated_req(request)
except HTTPException as e:
assert e.code == 401
raise
assert not func.called
def test_get_header(request):
request.headers["test-header"] = "test"
assert users.get_header("test-header") == "test"
assert not users.get_header("no-header")
def test_is_current_user_admin(request):
request.headers[users.USER_ADMIN_HEADER] = "1"
assert users.is_current_user_admin()
request.headers[users.USER_ADMIN_HEADER] = "2"
assert not users.is_current_user_admin()
def test_gets_user_gets_data_from_headers(request):
user = users.get_current_user()
assert user.email() == "<EMAIL>"
assert user.user_id() == "1"
assert user.auth_domain() == "gmail.com"
def test_user_with_kwargs():
user = users.User(
email="<EMAIL>", _user_id="2", _auth_domain="gmail.com"
)
assert user.email() == "<EMAIL>"
assert user.user_id() == "2"
assert user.auth_domain() == "gmail.com"
def test_user_strict(request):
users.User()
request.headers[users.USER_EMAIL_HEADER] = None
with pytest.raises(users.UserNotFoundError):
users.User()
users.User(_strict_mode=False)
def test_user_nickname(request):
user = users.User()
assert user.nickname() == "<EMAIL>"
request.headers[users.USER_EMAIL_HEADER] = "<EMAIL>"
user = users.User()
assert user.nickname() == "test"
def test_user_hash(request):
user = users.User()
assert hash(user) == hash((user.email(), user.auth_domain())) | secure_scaffold/contrib/appengine/tests/test_users.py | from unittest import mock
import pytest
from werkzeug.exceptions import HTTPException
from secure_scaffold.contrib.appengine import users
@pytest.fixture
def request():
with mock.patch("flask.request") as mock_request:
mock_request.headers = {
users.USER_EMAIL_HEADER: "<EMAIL>",
users.USER_ID_HEADER: "1",
users.USER_AUTH_DOMAIN_HEADER: "gmail.com",
}
yield mock_request
def test_requires_auth_decorator_success(request):
func = mock.MagicMock()
decorated_req = users.requires_auth(func)
decorated_req(request)
assert func.called
def test_requires_auth_decorator_fail(request):
request.headers.pop(users.USER_EMAIL_HEADER)
request.headers.pop(users.USER_ID_HEADER)
func = mock.MagicMock()
decorated_req = users.requires_auth(func)
with pytest.raises(HTTPException):
try:
decorated_req(request)
except HTTPException as e:
assert e.code == 401
raise
assert not func.called
def test_requires_admin_decorator_success(request):
request.headers[users.USER_ADMIN_HEADER] = "1"
func = mock.MagicMock()
decorated_req = users.requires_admin(func)
decorated_req(request)
assert func.called
def test_requires_admin_decorator_fail(request):
request.headers[users.USER_ADMIN_HEADER] = "2"
func = mock.MagicMock()
decorated_req = users.requires_admin(func)
with pytest.raises(HTTPException):
try:
decorated_req(request)
except HTTPException as e:
assert e.code == 401
raise
assert not func.called
def test_get_header(request):
request.headers["test-header"] = "test"
assert users.get_header("test-header") == "test"
assert not users.get_header("no-header")
def test_is_current_user_admin(request):
request.headers[users.USER_ADMIN_HEADER] = "1"
assert users.is_current_user_admin()
request.headers[users.USER_ADMIN_HEADER] = "2"
assert not users.is_current_user_admin()
def test_gets_user_gets_data_from_headers(request):
user = users.get_current_user()
assert user.email() == "<EMAIL>"
assert user.user_id() == "1"
assert user.auth_domain() == "gmail.com"
def test_user_with_kwargs():
user = users.User(
email="<EMAIL>", _user_id="2", _auth_domain="gmail.com"
)
assert user.email() == "<EMAIL>"
assert user.user_id() == "2"
assert user.auth_domain() == "gmail.com"
def test_user_strict(request):
users.User()
request.headers[users.USER_EMAIL_HEADER] = None
with pytest.raises(users.UserNotFoundError):
users.User()
users.User(_strict_mode=False)
def test_user_nickname(request):
user = users.User()
assert user.nickname() == "<EMAIL>"
request.headers[users.USER_EMAIL_HEADER] = "<EMAIL>"
user = users.User()
assert user.nickname() == "test"
def test_user_hash(request):
user = users.User()
assert hash(user) == hash((user.email(), user.auth_domain())) | 0.557845 | 0.436682 |
from .base_node import BaseNode
from .data_holder_node import DataHolderNode
from ..utils import view_full
from ..utils import view_summary
import warnings
import copy
class DataNode(BaseNode):
def __init__(self, graph_uid, graph_alias, node_uid, value="__specialPFV__NoneData", persist=False, verbose=False, alias=None, graph_dict=None):
super(DataNode, self).__init__(graph_uid, graph_alias, node_uid, 'data', verbose, alias or 'data')
self.value_holder = DataHolderNode(graph_uid, graph_alias, self.node_uid, value, self.verbose)
self.data_persist = persist
self.graph_uid = graph_uid
self.graph_alias = graph_alias
self.graph_dict = graph_dict
self.shallow_persist = False
self.is_active = False
def set_value(self, value):
self.value_holder.set_value(value)
def has_value(self):
return self.value_holder.has_value()
def persist(self):
self.data_persist = True
def shallowly_persist(self):
self.shallow_persist = True
def is_persisted(self):
return self.data_persist
def is_shallowly_persisted(self):
return self.shallow_persist
def get_persisted_data_dim_as_str(self):
if self.has_value():
return self.value_holder.get_persisted_data_dim_as_str()
else:
return ''
def view_activated(self, summary):
dependency_ancestor_node_weak_refs = self.get_all_dependency_ancestor_node_weak_refs()
dependency_ancestor_node_uids = [elem().node_uid for elem in dependency_ancestor_node_weak_refs]
dependency_ancestor_node_uids += [self.node_uid]
graph_dict_copied = copy.deepcopy(self.graph_dict)
for dependency_ancestor_node_uid in dependency_ancestor_node_uids:
graph_dict_copied[dependency_ancestor_node_uid]['is_activated'] = True
_graph_attributes = {'data_node_fontsize': '10',
'data_node_shape': 'box',
'data_node_color': 'None',
'op_node_fontsize': '12',
'op_node_shape': 'box',
'op_node_color': 'white',
'graph_ranksep': '0.415',
'graph_node_fontsize': '12.85',
'graph_node_shape': 'box3d',
'graph_node_color': 'white',
'graph_node_shapesize': '0.574',
'persist_record_shape': 'True'}
if summary :
return view_summary(graph_dict_copied, _graph_attributes, verbose=self.verbose, current_graph_uid=self.graph_uid)
else:
return view_full(graph_dict_copied, _graph_attributes, verbose=self.verbose, current_graph_uid=self.graph_uid)
def get(self, view_dependency=False, summary=True):
# this is to support the multi-graph paradigm
self.remove_dead_child_nodes()
if self.value_holder.has_value():
if view_dependency:
self.view_activated(summary)
if self.verbose:
if self.is_shallowly_persisted():
print('{} has been shallowly persisted'.format(self.node_uid))
elif self.is_persisted():
print('{} has been persisted'.format(self.node_uid))
elif self.has_value():
print('{} has been computed'.format(self.node_uid))
# update graph_dict
# during a computation execution, the value_holder can hold transient data
# so we need to check that this data node is indeed persisted
# as well as actually has data
if self.is_persisted():
data_dim = self.get_persisted_data_dim_as_str()
self.graph_dict[self.node_uid]['data_dim'] = data_dim
return self.value_holder.get()
else:
self.activate_dependency_op_nodes()
if view_dependency:
self.view_activated(summary)
if self.verbose:
print('computing for {}'.format(self.node_uid))
self.parent_node_weak_refs[0]().run() # a data node can only have 1 op parent node
# update graph_dict
if self.is_persisted():
data_dim = self.get_persisted_data_dim_as_str()
self.graph_dict[self.node_uid]['data_dim'] = data_dim
return self.value_holder.get()
def activate_dependency_op_nodes(self):
dependency_ancestor_node_weak_refs = self.get_dependency_ancestor_node_weak_refs()
dependency_op_nodes_weak_refs = [elem for elem in dependency_ancestor_node_weak_refs
if elem().node_type == 'operation']
for dependency_op_node_weak_ref in dependency_op_nodes_weak_refs:
dependency_op_node_weak_ref().activate()
def release_memory(self):
if self.is_persisted():
warnings.warn("You are releasing a DataNode that was persisted!", RuntimeWarning)
self.graph_dict[self.node_uid]['data_dim'] = ''
del self.value_holder
self.value_holder = DataHolderNode(self.graph_uid, self.graph_alias, self.node_uid, "__specialPFV__NoneData", self.verbose) | src/pyflow/node/data_node.py | from .base_node import BaseNode
from .data_holder_node import DataHolderNode
from ..utils import view_full
from ..utils import view_summary
import warnings
import copy
class DataNode(BaseNode):
def __init__(self, graph_uid, graph_alias, node_uid, value="__specialPFV__NoneData", persist=False, verbose=False, alias=None, graph_dict=None):
super(DataNode, self).__init__(graph_uid, graph_alias, node_uid, 'data', verbose, alias or 'data')
self.value_holder = DataHolderNode(graph_uid, graph_alias, self.node_uid, value, self.verbose)
self.data_persist = persist
self.graph_uid = graph_uid
self.graph_alias = graph_alias
self.graph_dict = graph_dict
self.shallow_persist = False
self.is_active = False
def set_value(self, value):
self.value_holder.set_value(value)
def has_value(self):
return self.value_holder.has_value()
def persist(self):
self.data_persist = True
def shallowly_persist(self):
self.shallow_persist = True
def is_persisted(self):
return self.data_persist
def is_shallowly_persisted(self):
return self.shallow_persist
def get_persisted_data_dim_as_str(self):
if self.has_value():
return self.value_holder.get_persisted_data_dim_as_str()
else:
return ''
def view_activated(self, summary):
dependency_ancestor_node_weak_refs = self.get_all_dependency_ancestor_node_weak_refs()
dependency_ancestor_node_uids = [elem().node_uid for elem in dependency_ancestor_node_weak_refs]
dependency_ancestor_node_uids += [self.node_uid]
graph_dict_copied = copy.deepcopy(self.graph_dict)
for dependency_ancestor_node_uid in dependency_ancestor_node_uids:
graph_dict_copied[dependency_ancestor_node_uid]['is_activated'] = True
_graph_attributes = {'data_node_fontsize': '10',
'data_node_shape': 'box',
'data_node_color': 'None',
'op_node_fontsize': '12',
'op_node_shape': 'box',
'op_node_color': 'white',
'graph_ranksep': '0.415',
'graph_node_fontsize': '12.85',
'graph_node_shape': 'box3d',
'graph_node_color': 'white',
'graph_node_shapesize': '0.574',
'persist_record_shape': 'True'}
if summary :
return view_summary(graph_dict_copied, _graph_attributes, verbose=self.verbose, current_graph_uid=self.graph_uid)
else:
return view_full(graph_dict_copied, _graph_attributes, verbose=self.verbose, current_graph_uid=self.graph_uid)
def get(self, view_dependency=False, summary=True):
# this is to support the multi-graph paradigm
self.remove_dead_child_nodes()
if self.value_holder.has_value():
if view_dependency:
self.view_activated(summary)
if self.verbose:
if self.is_shallowly_persisted():
print('{} has been shallowly persisted'.format(self.node_uid))
elif self.is_persisted():
print('{} has been persisted'.format(self.node_uid))
elif self.has_value():
print('{} has been computed'.format(self.node_uid))
# update graph_dict
# during a computation execution, the value_holder can hold transient data
# so we need to check that this data node is indeed persisted
# as well as actually has data
if self.is_persisted():
data_dim = self.get_persisted_data_dim_as_str()
self.graph_dict[self.node_uid]['data_dim'] = data_dim
return self.value_holder.get()
else:
self.activate_dependency_op_nodes()
if view_dependency:
self.view_activated(summary)
if self.verbose:
print('computing for {}'.format(self.node_uid))
self.parent_node_weak_refs[0]().run() # a data node can only have 1 op parent node
# update graph_dict
if self.is_persisted():
data_dim = self.get_persisted_data_dim_as_str()
self.graph_dict[self.node_uid]['data_dim'] = data_dim
return self.value_holder.get()
def activate_dependency_op_nodes(self):
dependency_ancestor_node_weak_refs = self.get_dependency_ancestor_node_weak_refs()
dependency_op_nodes_weak_refs = [elem for elem in dependency_ancestor_node_weak_refs
if elem().node_type == 'operation']
for dependency_op_node_weak_ref in dependency_op_nodes_weak_refs:
dependency_op_node_weak_ref().activate()
def release_memory(self):
if self.is_persisted():
warnings.warn("You are releasing a DataNode that was persisted!", RuntimeWarning)
self.graph_dict[self.node_uid]['data_dim'] = ''
del self.value_holder
self.value_holder = DataHolderNode(self.graph_uid, self.graph_alias, self.node_uid, "__specialPFV__NoneData", self.verbose) | 0.437583 | 0.18866 |
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import decorators, exceptions, response, status, serializers as rf_serializers
from waldur_core.core import validators as core_validators, exceptions as core_exceptions
from waldur_core.structure import (views as structure_views, filters as structure_filters,
permissions as structure_permissions)
from . import models, filters, serializers, executors
class OpenStackServiceViewSet(structure_views.BaseServiceViewSet):
queryset = models.OpenStackService.objects.all().order_by('id')
serializer_class = serializers.ServiceSerializer
def list(self, request, *args, **kwargs):
"""
To create a service, issue a **POST** to */api/openstack/* as a customer owner.
You can create service based on shared service settings. Example:
.. code-block:: http
POST /api/openstack/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "Common OpenStack",
"customer": "http://example.com/api/customers/1040561ca9e046d2b74268600c7e1105/",
"settings": "http://example.com/api/service-settings/93ba615d6111466ebe3f792669059cb4/"
}
Or provide your own credentials. Example:
.. code-block:: http
POST /api/openstack/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "My OpenStack",
"customer": "http://example.com/api/customers/1040561ca9e046d<PASSWORD>/",
"backend_url": "http://keystone.example.com:5000/v2.0",
"username": "admin",
"password": "<PASSWORD>"
}
"""
return super(OpenStackServiceViewSet, self).list(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
"""
To update OpenStack service issue **PUT** or **PATCH** against */api/openstack/<service_uuid>/*
as a customer owner. You can update service's `name` and `available_for_all` fields.
Example of a request:
.. code-block:: http
PUT /api/openstack/c6526bac12b343a9a65c4cd6710666ee/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "My OpenStack2"
}
To remove OpenStack service, issue **DELETE** against */api/openstack/<service_uuid>/* as
staff user or customer owner.
"""
return super(OpenStackServiceViewSet, self).retrieve(request, *args, **kwargs)
class OpenStackServiceProjectLinkViewSet(structure_views.BaseServiceProjectLinkViewSet):
queryset = models.OpenStackServiceProjectLink.objects.all()
serializer_class = serializers.ServiceProjectLinkSerializer
filter_class = filters.OpenStackServiceProjectLinkFilter
def list(self, request, *args, **kwargs):
"""
In order to be able to provision OpenStack resources, it must first be linked to a project. To do that,
**POST** a connection between project and a service to */api/openstack-service-project-link/*
as stuff user or customer owner.
Example of a request:
.. code-block:: http
POST /api/openstack-service-project-link/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>653b9ec92c6cbac41c706593e66f567a7fa4
Host: example.com
{
"project": "http://example.com/api/projects/e5f973af2eb14d2d8c38d62bcbaccb33/",
"service": "http://example.com/api/openstack/b0e8a4cbd47c4f9ca01642b7ec033db4/"
}
To remove a link, issue DELETE to URL of the corresponding connection as stuff user or customer owner.
"""
return super(OpenStackServiceProjectLinkViewSet, self).list(request, *args, **kwargs)
class FlavorViewSet(structure_views.BaseServicePropertyViewSet):
"""
VM instance flavor is a pre-defined set of virtual hardware parameters that the instance will use:
CPU, memory, disk size etc. VM instance flavor is not to be confused with VM template -- flavor is a set of virtual
hardware parameters whereas template is a definition of a system to be installed on this instance.
"""
queryset = models.Flavor.objects.all().order_by('settings', 'cores', 'ram', 'disk')
serializer_class = serializers.FlavorSerializer
lookup_field = 'uuid'
filter_class = filters.FlavorFilter
class ImageViewSet(structure_views.BaseServicePropertyViewSet):
queryset = models.Image.objects.all()
serializer_class = serializers.ImageSerializer
lookup_field = 'uuid'
filter_class = filters.ImageFilter
class SecurityGroupViewSet(structure_views.BaseResourceViewSet):
queryset = models.SecurityGroup.objects.all()
serializer_class = serializers.SecurityGroupSerializer
filter_class = filters.SecurityGroupFilter
disabled_actions = ['create', 'pull'] # pull operation should be implemented in WAL-323
def default_security_group_validator(security_group):
if security_group.name == 'default':
raise exceptions.ValidationError({
'name': _('Default security group is managed by OpenStack itself.')
})
update_validators = partial_update_validators = structure_views.ResourceViewSet.update_validators + [
default_security_group_validator
]
update_executor = executors.SecurityGroupUpdateExecutor
destroy_validators = structure_views.ResourceViewSet.destroy_validators + [
default_security_group_validator
]
delete_executor = executors.SecurityGroupDeleteExecutor
@decorators.detail_route(methods=['POST'])
def set_rules(self, request, uuid=None):
""" WARNING! Auto-generated HTML form is wrong for this endpoint. List should be defined as input.
Example:
[
{
"protocol": "tcp",
"from_port": 1,
"to_port": 10,
"cidr": "10.1.1.0/24"
}
]
"""
# XXX: DRF does not support forms generation for list serializers.
# Thats why we use different serializer in view.
serializer = serializers.SecurityGroupRuleListUpdateSerializer(
data=request.data, context=self.get_serializer_context())
serializer.is_valid(raise_exception=True)
serializer.save()
executors.PushSecurityGroupRulesExecutor().execute(self.get_object())
return response.Response(
{'status': _('Rules update was successfully scheduled.')}, status=status.HTTP_202_ACCEPTED)
set_rules_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
set_rules_serializer_class = serializers.SecurityGroupRuleUpdateSerializer
class FloatingIPViewSet(structure_views.BaseResourceViewSet):
queryset = models.FloatingIP.objects.all().order_by('address')
serializer_class = serializers.FloatingIPSerializer
filter_class = filters.FloatingIPFilter
disabled_actions = ['update', 'partial_update', 'create']
delete_executor = executors.FloatingIPDeleteExecutor
pull_executor = executors.FloatingIPPullExecutor
def list(self, request, *args, **kwargs):
"""
To get a list of all available floating IPs, issue **GET** against */api/floating-ips/*.
Floating IPs are read only. Each floating IP has fields: 'address', 'status'.
Status *DOWN* means that floating IP is not linked to a VM, status *ACTIVE* means that it is in use.
"""
return super(FloatingIPViewSet, self).list(request, *args, **kwargs)
class TenantViewSet(structure_views.ImportableResourceViewSet):
queryset = models.Tenant.objects.all()
serializer_class = serializers.TenantSerializer
filter_class = structure_filters.BaseResourceFilter
create_executor = executors.TenantCreateExecutor
update_executor = executors.TenantUpdateExecutor
pull_executor = executors.TenantPullExecutor
importable_resources_backend_method = 'get_tenants_for_import'
importable_resources_serializer_class = serializers.TenantImportableSerializer
importable_resources_permissions = [structure_permissions.is_staff]
import_resource_serializer_class = serializers.TenantImportSerializer
import_resource_permissions = [structure_permissions.is_staff]
import_resource_executor = executors.TenantImportExecutor
def delete_permission_check(request, view, obj=None):
if not obj:
return
if obj.service_project_link.service.settings.shared:
if settings.WALDUR_OPENSTACK['MANAGER_CAN_MANAGE_TENANTS']:
structure_permissions.is_manager(request, view, obj)
else:
structure_permissions.is_owner(request, view, obj)
else:
structure_permissions.is_administrator(request, view, obj)
delete_executor = executors.TenantDeleteExecutor
destroy_permissions = [
delete_permission_check,
structure_permissions.check_access_to_services_management,
]
create_permissions = update_permissions = partial_update_permissions = [
structure_permissions.check_access_to_services_management,
]
@decorators.detail_route(methods=['post'])
def set_quotas(self, request, uuid=None):
"""
A quota can be set for a particular tenant. Only staff users can do that.
In order to set quota submit **POST** request to */api/openstack-tenants/<uuid>/set_quotas/*.
The quota values are propagated to the backend.
The following quotas are supported. All values are expected to be integers:
- instances - maximal number of created instances.
- ram - maximal size of ram for allocation. In MiB_.
- storage - maximal size of storage for allocation. In MiB_.
- vcpu - maximal number of virtual cores for allocation.
- security_group_count - maximal number of created security groups.
- security_group_rule_count - maximal number of created security groups rules.
- volumes - maximal number of created volumes.
- snapshots - maximal number of created snapshots.
It is possible to update quotas by one or by submitting all the fields in one request.
Waldur will attempt to update the provided quotas. Please note, that if provided quotas are
conflicting with the backend (e.g. requested number of instances is below of the already existing ones),
some quotas might not be applied.
.. _MiB: http://en.wikipedia.org/wiki/Mebibyte
Example of a valid request (token is user specific):
.. code-block:: http
POST /api/openstack-tenants/c84d653b9ec92c6cbac41c706593e66f567a7fa4/set_quotas/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Host: example.com
{
"instances": 30,
"ram": 100000,
"storage": 1000000,
"vcpu": 30,
"security_group_count": 100,
"security_group_rule_count": 100,
"volumes": 10,
"snapshots": 20
}
Response code of a successful request is **202 ACCEPTED**.
In case tenant is in a non-stable status, the response would be **409 CONFLICT**.
In this case REST client is advised to repeat the request after some time.
On successful completion the task will synchronize quotas with the backend.
"""
tenant = self.get_object()
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
quotas = dict(serializer.validated_data)
for quota_name, limit in quotas.items():
tenant.set_quota_limit(quota_name, limit)
executors.TenantPushQuotasExecutor.execute(tenant, quotas=quotas)
return response.Response(
{'detail': _('Quota update has been scheduled')}, status=status.HTTP_202_ACCEPTED)
set_quotas_permissions = [structure_permissions.is_staff]
set_quotas_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
set_quotas_serializer_class = serializers.TenantQuotaSerializer
@decorators.detail_route(methods=['post'])
def create_network(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
network = serializer.save()
executors.NetworkCreateExecutor().execute(network)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_network_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
create_network_serializer_class = serializers.NetworkSerializer
def external_network_is_defined(tenant):
if not tenant.external_network_id:
raise core_exceptions.IncorrectStateException(
_('Cannot create floating IP if tenant external network is not defined.'))
@decorators.detail_route(methods=['post'])
def create_floating_ip(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
floating_ip = serializer.save()
executors.FloatingIPCreateExecutor.execute(floating_ip)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_floating_ip_validators = [core_validators.StateValidator(models.Tenant.States.OK),
external_network_is_defined]
create_floating_ip_serializer_class = serializers.FloatingIPSerializer
@decorators.detail_route(methods=['post'])
def pull_floating_ips(self, request, uuid=None):
tenant = self.get_object()
executors.TenantPullFloatingIPsExecutor.execute(tenant)
return response.Response(status=status.HTTP_202_ACCEPTED)
pull_floating_ips_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
pull_floating_ips_serializer_class = rf_serializers.Serializer
@decorators.detail_route(methods=['post'])
def create_security_group(self, request, uuid=None):
"""
Example of a request:
.. code-block:: http
{
"name": "Security group name",
"description": "description",
"rules": [
{
"protocol": "tcp",
"from_port": 1,
"to_port": 10,
"cidr": "10.1.1.0/24"
},
{
"protocol": "udp",
"from_port": 10,
"to_port": 8000,
"cidr": "10.1.1.0/24"
}
]
}
"""
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
security_group = serializer.save()
executors.SecurityGroupCreateExecutor().execute(security_group)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_security_group_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
create_security_group_serializer_class = serializers.SecurityGroupSerializer
@decorators.detail_route(methods=['post'])
def pull_security_groups(self, request, uuid=None):
executors.TenantPullSecurityGroupsExecutor.execute(self.get_object())
return response.Response(
{'status': _('Security groups pull has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
pull_security_groups_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
@decorators.detail_route(methods=['post'])
def change_password(self, request, uuid=None):
serializer = self.get_serializer(instance=self.get_object(), data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
executors.TenantChangeUserPasswordExecutor.execute(self.get_object())
return response.Response({'status': _('Password update has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
change_password_serializer_class = serializers.TenantChangePasswordSerializer
change_password_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
@decorators.detail_route(methods=['post'])
def pull_quotas(self, request, uuid=None):
executors.TenantPullQuotasExecutor.execute(self.get_object())
return response.Response({'status': _('Quotas pull has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
pull_quotas_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
class NetworkViewSet(structure_views.BaseResourceViewSet):
queryset = models.Network.objects.all()
serializer_class = serializers.NetworkSerializer
filter_class = filters.NetworkFilter
disabled_actions = ['create']
update_executor = executors.NetworkUpdateExecutor
delete_executor = executors.NetworkDeleteExecutor
pull_executor = executors.NetworkPullExecutor
@decorators.detail_route(methods=['post'])
def create_subnet(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
subnet = serializer.save()
executors.SubNetCreateExecutor.execute(subnet)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_subnet_validators = [core_validators.StateValidator(models.Network.States.OK)]
create_subnet_serializer_class = serializers.SubNetSerializer
class SubNetViewSet(structure_views.BaseResourceViewSet):
queryset = models.SubNet.objects.all()
serializer_class = serializers.SubNetSerializer
filter_class = filters.SubNetFilter
disabled_actions = ['create']
update_executor = executors.SubNetUpdateExecutor
delete_executor = executors.SubNetDeleteExecutor
pull_executor = executors.SubNetPullExecutor | src/waldur_openstack/openstack/views.py | from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import decorators, exceptions, response, status, serializers as rf_serializers
from waldur_core.core import validators as core_validators, exceptions as core_exceptions
from waldur_core.structure import (views as structure_views, filters as structure_filters,
permissions as structure_permissions)
from . import models, filters, serializers, executors
class OpenStackServiceViewSet(structure_views.BaseServiceViewSet):
queryset = models.OpenStackService.objects.all().order_by('id')
serializer_class = serializers.ServiceSerializer
def list(self, request, *args, **kwargs):
"""
To create a service, issue a **POST** to */api/openstack/* as a customer owner.
You can create service based on shared service settings. Example:
.. code-block:: http
POST /api/openstack/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "Common OpenStack",
"customer": "http://example.com/api/customers/1040561ca9e046d2b74268600c7e1105/",
"settings": "http://example.com/api/service-settings/93ba615d6111466ebe3f792669059cb4/"
}
Or provide your own credentials. Example:
.. code-block:: http
POST /api/openstack/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "My OpenStack",
"customer": "http://example.com/api/customers/1040561ca9e046d<PASSWORD>/",
"backend_url": "http://keystone.example.com:5000/v2.0",
"username": "admin",
"password": "<PASSWORD>"
}
"""
return super(OpenStackServiceViewSet, self).list(request, *args, **kwargs)
def retrieve(self, request, *args, **kwargs):
"""
To update OpenStack service issue **PUT** or **PATCH** against */api/openstack/<service_uuid>/*
as a customer owner. You can update service's `name` and `available_for_all` fields.
Example of a request:
.. code-block:: http
PUT /api/openstack/c6526bac12b343a9a65c4cd6710666ee/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>
Host: example.com
{
"name": "My OpenStack2"
}
To remove OpenStack service, issue **DELETE** against */api/openstack/<service_uuid>/* as
staff user or customer owner.
"""
return super(OpenStackServiceViewSet, self).retrieve(request, *args, **kwargs)
class OpenStackServiceProjectLinkViewSet(structure_views.BaseServiceProjectLinkViewSet):
queryset = models.OpenStackServiceProjectLink.objects.all()
serializer_class = serializers.ServiceProjectLinkSerializer
filter_class = filters.OpenStackServiceProjectLinkFilter
def list(self, request, *args, **kwargs):
"""
In order to be able to provision OpenStack resources, it must first be linked to a project. To do that,
**POST** a connection between project and a service to */api/openstack-service-project-link/*
as stuff user or customer owner.
Example of a request:
.. code-block:: http
POST /api/openstack-service-project-link/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Authorization: Token <PASSWORD>653b9ec92c6cbac41c706593e66f567a7fa4
Host: example.com
{
"project": "http://example.com/api/projects/e5f973af2eb14d2d8c38d62bcbaccb33/",
"service": "http://example.com/api/openstack/b0e8a4cbd47c4f9ca01642b7ec033db4/"
}
To remove a link, issue DELETE to URL of the corresponding connection as stuff user or customer owner.
"""
return super(OpenStackServiceProjectLinkViewSet, self).list(request, *args, **kwargs)
class FlavorViewSet(structure_views.BaseServicePropertyViewSet):
"""
VM instance flavor is a pre-defined set of virtual hardware parameters that the instance will use:
CPU, memory, disk size etc. VM instance flavor is not to be confused with VM template -- flavor is a set of virtual
hardware parameters whereas template is a definition of a system to be installed on this instance.
"""
queryset = models.Flavor.objects.all().order_by('settings', 'cores', 'ram', 'disk')
serializer_class = serializers.FlavorSerializer
lookup_field = 'uuid'
filter_class = filters.FlavorFilter
class ImageViewSet(structure_views.BaseServicePropertyViewSet):
queryset = models.Image.objects.all()
serializer_class = serializers.ImageSerializer
lookup_field = 'uuid'
filter_class = filters.ImageFilter
class SecurityGroupViewSet(structure_views.BaseResourceViewSet):
queryset = models.SecurityGroup.objects.all()
serializer_class = serializers.SecurityGroupSerializer
filter_class = filters.SecurityGroupFilter
disabled_actions = ['create', 'pull'] # pull operation should be implemented in WAL-323
def default_security_group_validator(security_group):
if security_group.name == 'default':
raise exceptions.ValidationError({
'name': _('Default security group is managed by OpenStack itself.')
})
update_validators = partial_update_validators = structure_views.ResourceViewSet.update_validators + [
default_security_group_validator
]
update_executor = executors.SecurityGroupUpdateExecutor
destroy_validators = structure_views.ResourceViewSet.destroy_validators + [
default_security_group_validator
]
delete_executor = executors.SecurityGroupDeleteExecutor
@decorators.detail_route(methods=['POST'])
def set_rules(self, request, uuid=None):
""" WARNING! Auto-generated HTML form is wrong for this endpoint. List should be defined as input.
Example:
[
{
"protocol": "tcp",
"from_port": 1,
"to_port": 10,
"cidr": "10.1.1.0/24"
}
]
"""
# XXX: DRF does not support forms generation for list serializers.
# Thats why we use different serializer in view.
serializer = serializers.SecurityGroupRuleListUpdateSerializer(
data=request.data, context=self.get_serializer_context())
serializer.is_valid(raise_exception=True)
serializer.save()
executors.PushSecurityGroupRulesExecutor().execute(self.get_object())
return response.Response(
{'status': _('Rules update was successfully scheduled.')}, status=status.HTTP_202_ACCEPTED)
set_rules_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
set_rules_serializer_class = serializers.SecurityGroupRuleUpdateSerializer
class FloatingIPViewSet(structure_views.BaseResourceViewSet):
queryset = models.FloatingIP.objects.all().order_by('address')
serializer_class = serializers.FloatingIPSerializer
filter_class = filters.FloatingIPFilter
disabled_actions = ['update', 'partial_update', 'create']
delete_executor = executors.FloatingIPDeleteExecutor
pull_executor = executors.FloatingIPPullExecutor
def list(self, request, *args, **kwargs):
"""
To get a list of all available floating IPs, issue **GET** against */api/floating-ips/*.
Floating IPs are read only. Each floating IP has fields: 'address', 'status'.
Status *DOWN* means that floating IP is not linked to a VM, status *ACTIVE* means that it is in use.
"""
return super(FloatingIPViewSet, self).list(request, *args, **kwargs)
class TenantViewSet(structure_views.ImportableResourceViewSet):
queryset = models.Tenant.objects.all()
serializer_class = serializers.TenantSerializer
filter_class = structure_filters.BaseResourceFilter
create_executor = executors.TenantCreateExecutor
update_executor = executors.TenantUpdateExecutor
pull_executor = executors.TenantPullExecutor
importable_resources_backend_method = 'get_tenants_for_import'
importable_resources_serializer_class = serializers.TenantImportableSerializer
importable_resources_permissions = [structure_permissions.is_staff]
import_resource_serializer_class = serializers.TenantImportSerializer
import_resource_permissions = [structure_permissions.is_staff]
import_resource_executor = executors.TenantImportExecutor
def delete_permission_check(request, view, obj=None):
if not obj:
return
if obj.service_project_link.service.settings.shared:
if settings.WALDUR_OPENSTACK['MANAGER_CAN_MANAGE_TENANTS']:
structure_permissions.is_manager(request, view, obj)
else:
structure_permissions.is_owner(request, view, obj)
else:
structure_permissions.is_administrator(request, view, obj)
delete_executor = executors.TenantDeleteExecutor
destroy_permissions = [
delete_permission_check,
structure_permissions.check_access_to_services_management,
]
create_permissions = update_permissions = partial_update_permissions = [
structure_permissions.check_access_to_services_management,
]
@decorators.detail_route(methods=['post'])
def set_quotas(self, request, uuid=None):
"""
A quota can be set for a particular tenant. Only staff users can do that.
In order to set quota submit **POST** request to */api/openstack-tenants/<uuid>/set_quotas/*.
The quota values are propagated to the backend.
The following quotas are supported. All values are expected to be integers:
- instances - maximal number of created instances.
- ram - maximal size of ram for allocation. In MiB_.
- storage - maximal size of storage for allocation. In MiB_.
- vcpu - maximal number of virtual cores for allocation.
- security_group_count - maximal number of created security groups.
- security_group_rule_count - maximal number of created security groups rules.
- volumes - maximal number of created volumes.
- snapshots - maximal number of created snapshots.
It is possible to update quotas by one or by submitting all the fields in one request.
Waldur will attempt to update the provided quotas. Please note, that if provided quotas are
conflicting with the backend (e.g. requested number of instances is below of the already existing ones),
some quotas might not be applied.
.. _MiB: http://en.wikipedia.org/wiki/Mebibyte
Example of a valid request (token is user specific):
.. code-block:: http
POST /api/openstack-tenants/c84d653b9ec92c6cbac41c706593e66f567a7fa4/set_quotas/ HTTP/1.1
Content-Type: application/json
Accept: application/json
Host: example.com
{
"instances": 30,
"ram": 100000,
"storage": 1000000,
"vcpu": 30,
"security_group_count": 100,
"security_group_rule_count": 100,
"volumes": 10,
"snapshots": 20
}
Response code of a successful request is **202 ACCEPTED**.
In case tenant is in a non-stable status, the response would be **409 CONFLICT**.
In this case REST client is advised to repeat the request after some time.
On successful completion the task will synchronize quotas with the backend.
"""
tenant = self.get_object()
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
quotas = dict(serializer.validated_data)
for quota_name, limit in quotas.items():
tenant.set_quota_limit(quota_name, limit)
executors.TenantPushQuotasExecutor.execute(tenant, quotas=quotas)
return response.Response(
{'detail': _('Quota update has been scheduled')}, status=status.HTTP_202_ACCEPTED)
set_quotas_permissions = [structure_permissions.is_staff]
set_quotas_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
set_quotas_serializer_class = serializers.TenantQuotaSerializer
@decorators.detail_route(methods=['post'])
def create_network(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
network = serializer.save()
executors.NetworkCreateExecutor().execute(network)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_network_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
create_network_serializer_class = serializers.NetworkSerializer
def external_network_is_defined(tenant):
if not tenant.external_network_id:
raise core_exceptions.IncorrectStateException(
_('Cannot create floating IP if tenant external network is not defined.'))
@decorators.detail_route(methods=['post'])
def create_floating_ip(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
floating_ip = serializer.save()
executors.FloatingIPCreateExecutor.execute(floating_ip)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_floating_ip_validators = [core_validators.StateValidator(models.Tenant.States.OK),
external_network_is_defined]
create_floating_ip_serializer_class = serializers.FloatingIPSerializer
@decorators.detail_route(methods=['post'])
def pull_floating_ips(self, request, uuid=None):
tenant = self.get_object()
executors.TenantPullFloatingIPsExecutor.execute(tenant)
return response.Response(status=status.HTTP_202_ACCEPTED)
pull_floating_ips_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
pull_floating_ips_serializer_class = rf_serializers.Serializer
@decorators.detail_route(methods=['post'])
def create_security_group(self, request, uuid=None):
"""
Example of a request:
.. code-block:: http
{
"name": "Security group name",
"description": "description",
"rules": [
{
"protocol": "tcp",
"from_port": 1,
"to_port": 10,
"cidr": "10.1.1.0/24"
},
{
"protocol": "udp",
"from_port": 10,
"to_port": 8000,
"cidr": "10.1.1.0/24"
}
]
}
"""
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
security_group = serializer.save()
executors.SecurityGroupCreateExecutor().execute(security_group)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_security_group_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
create_security_group_serializer_class = serializers.SecurityGroupSerializer
@decorators.detail_route(methods=['post'])
def pull_security_groups(self, request, uuid=None):
executors.TenantPullSecurityGroupsExecutor.execute(self.get_object())
return response.Response(
{'status': _('Security groups pull has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
pull_security_groups_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
@decorators.detail_route(methods=['post'])
def change_password(self, request, uuid=None):
serializer = self.get_serializer(instance=self.get_object(), data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save()
executors.TenantChangeUserPasswordExecutor.execute(self.get_object())
return response.Response({'status': _('Password update has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
change_password_serializer_class = serializers.TenantChangePasswordSerializer
change_password_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
@decorators.detail_route(methods=['post'])
def pull_quotas(self, request, uuid=None):
executors.TenantPullQuotasExecutor.execute(self.get_object())
return response.Response({'status': _('Quotas pull has been scheduled.')}, status=status.HTTP_202_ACCEPTED)
pull_quotas_validators = [core_validators.StateValidator(models.Tenant.States.OK)]
class NetworkViewSet(structure_views.BaseResourceViewSet):
queryset = models.Network.objects.all()
serializer_class = serializers.NetworkSerializer
filter_class = filters.NetworkFilter
disabled_actions = ['create']
update_executor = executors.NetworkUpdateExecutor
delete_executor = executors.NetworkDeleteExecutor
pull_executor = executors.NetworkPullExecutor
@decorators.detail_route(methods=['post'])
def create_subnet(self, request, uuid=None):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
subnet = serializer.save()
executors.SubNetCreateExecutor.execute(subnet)
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
create_subnet_validators = [core_validators.StateValidator(models.Network.States.OK)]
create_subnet_serializer_class = serializers.SubNetSerializer
class SubNetViewSet(structure_views.BaseResourceViewSet):
queryset = models.SubNet.objects.all()
serializer_class = serializers.SubNetSerializer
filter_class = filters.SubNetFilter
disabled_actions = ['create']
update_executor = executors.SubNetUpdateExecutor
delete_executor = executors.SubNetDeleteExecutor
pull_executor = executors.SubNetPullExecutor | 0.749821 | 0.14777 |
import sys
__version__ = '0.99'
header = '''\
__ _ __ ___ _ _ __ __ ___ _ _ _ ______
|_)|_|(_ (_ |_|| ||_)| \(_ |_||_||_/|_ |_)
| | |__)__)||||_|| \|_/__)| || || \|__| \_v{}
'''.format(__version__)
charsets = {
'alphanum': '0..9A..Za..z',
'extended': '0..9A..Za..z!@#$%',
'ascii': '!..~',
}
def expand(s):
'''Create a string of characters by expanding dots
Given an input string of the form "a..e" expands the dots to form "abcde"
following the order of the ascii table. The full printable ascii table is
generated from "!..~". The input string can contain multiple sets of dots, as
in "0..9A..Za..z" which generates the full set of alphanumerical symbols. To
begin a range with a dot it should be escaped as "./..".'''
chars, *parts = s.split('..')
for part in parts:
chars += ''.join(chr(i) for i in range(ord(chars[-1])+1,ord(part[0]))) + part
return chars
def get_config_path(service):
'''Create a path object to the config file for service
Tests for existence of a ~/.config/passwordshaker or ~/.passwordshaker
directory, in that order, and returns a pathlib.Path instance to the
(possibly nonexistent) configuration file within, or None if no directory was
found to exist.'''
import pathlib
home = pathlib.Path.home()
for path in home/'.config'/'passwordshaker', home/'.passwordshaker':
if path.is_dir():
return path / service
def load_options(service, **args):
'''Load options from config file and optionally amend them
Returns a dictionary with the keys 'modifier', 'length' and 'charset', and
possibly other data. Values are obtained from keyword arguments, a config
file (if existing), or default values, in decreasing order of priority. An
empty modifier results in automatic generation of a new, previously unused
modifier. For this a configuration path must be active.'''
path = get_config_path(service)
path_items = [line.rstrip().partition(' ')[::2] for line in path.open()] if path and path.is_file() else []
conf = {'modifier': service, 'length': '32', 'charset': 'ascii'} # default values
conf.update(path_items) # stored values
conf.update(args) # newly specified values
if not conf['modifier']:
print('automatically selecting new modifier', file=sys.stderr)
assert path, 'automatic modification requires a config path'
i = 1
used_suffices = [value[len(service):] for key, value in path_items if key=='modifier' and value.startswith(service)]
while str(i) in used_suffices:
i += 1
conf['modifier'] = service+str(i)
# parse options
options = {key: value for key, value in conf.items() if value} # clear erased values
assert options['charset'] in charsets, 'invalid character set {!r}; choose from {}'.format(options['charset'], ', '.join(charsets))
assert options['length'].isdigit(), 'invalid length {!r}'.format(options['length'])
options['length'] = int(conf['length'])
return options
def save_options(service, options):
'''Store options in config file
Takes an options dictionary as returned by load_options and stores it in the
configuration directory, appending to previously existing data in case values
have changed. Silently returns if no configuration directory exists.'''
path = get_config_path(service)
if not path:
return
conf = {'modifier': service}
if path.is_file():
conf.update(line.rstrip().partition(' ')[::2] for line in path.open())
changes = {key: str(value) for key, value in options.items() if str(value) != conf.pop(key, '')}
changes.update((key, '') for key, value in conf.items() if value) # clear removed items
if not changes:
return
print('updating', ', '.join(changes), file=sys.stderr)
with path.open('a') as f:
for key, value in changes.items():
print(key, value, file=f)
def generate(key, chars, length):
'''Generate character string by long division of shake_256 hash
The core functionality of the passwordshaker module. Hashes the `key` string
using the shake_256 algorithm, and composes from that a string of speficied
length and characters.'''
import hashlib, math
nchars = len(chars)
nbytes = math.ceil(length * math.log2(nchars) / 8)
v = 0
for b in hashlib.shake_256(key.encode()).digest(nbytes):
v <<= 8
v += b
pw = ''
for _ in range(length):
v, n = divmod(v, nchars)
pw += chars[n]
assert v < 256
return pw
def password(modifier, length, charset, **other):
'''Convenience function for command line interaction
Displays the provided information, requests the master key, displays a
4-syllable fingerprint, and returns the password corresponding to the given
specifications.'''
import getpass
print('shaking {}{} password for {}'.format(charset, length, modifier), file=sys.stderr)
for key, val in other.items():
print('{}: {}'.format(key, val), file=sys.stderr)
secret = getpass.getpass('master key: ')
if not secret:
raise KeyboardInterrupt
print('fingerprint:', generate(key=secret, chars=[c+v for c in 'bcdfghjklmnpqrstvwxz' for v in 'aeiouy'], length=2), file=sys.stderr)
return generate(key=secret+modifier, chars=expand(charsets[charset]), length=length) | passwordshaker.py |
import sys
__version__ = '0.99'
header = '''\
__ _ __ ___ _ _ __ __ ___ _ _ _ ______
|_)|_|(_ (_ |_|| ||_)| \(_ |_||_||_/|_ |_)
| | |__)__)||||_|| \|_/__)| || || \|__| \_v{}
'''.format(__version__)
charsets = {
'alphanum': '0..9A..Za..z',
'extended': '0..9A..Za..z!@#$%',
'ascii': '!..~',
}
def expand(s):
'''Create a string of characters by expanding dots
Given an input string of the form "a..e" expands the dots to form "abcde"
following the order of the ascii table. The full printable ascii table is
generated from "!..~". The input string can contain multiple sets of dots, as
in "0..9A..Za..z" which generates the full set of alphanumerical symbols. To
begin a range with a dot it should be escaped as "./..".'''
chars, *parts = s.split('..')
for part in parts:
chars += ''.join(chr(i) for i in range(ord(chars[-1])+1,ord(part[0]))) + part
return chars
def get_config_path(service):
'''Create a path object to the config file for service
Tests for existence of a ~/.config/passwordshaker or ~/.passwordshaker
directory, in that order, and returns a pathlib.Path instance to the
(possibly nonexistent) configuration file within, or None if no directory was
found to exist.'''
import pathlib
home = pathlib.Path.home()
for path in home/'.config'/'passwordshaker', home/'.passwordshaker':
if path.is_dir():
return path / service
def load_options(service, **args):
'''Load options from config file and optionally amend them
Returns a dictionary with the keys 'modifier', 'length' and 'charset', and
possibly other data. Values are obtained from keyword arguments, a config
file (if existing), or default values, in decreasing order of priority. An
empty modifier results in automatic generation of a new, previously unused
modifier. For this a configuration path must be active.'''
path = get_config_path(service)
path_items = [line.rstrip().partition(' ')[::2] for line in path.open()] if path and path.is_file() else []
conf = {'modifier': service, 'length': '32', 'charset': 'ascii'} # default values
conf.update(path_items) # stored values
conf.update(args) # newly specified values
if not conf['modifier']:
print('automatically selecting new modifier', file=sys.stderr)
assert path, 'automatic modification requires a config path'
i = 1
used_suffices = [value[len(service):] for key, value in path_items if key=='modifier' and value.startswith(service)]
while str(i) in used_suffices:
i += 1
conf['modifier'] = service+str(i)
# parse options
options = {key: value for key, value in conf.items() if value} # clear erased values
assert options['charset'] in charsets, 'invalid character set {!r}; choose from {}'.format(options['charset'], ', '.join(charsets))
assert options['length'].isdigit(), 'invalid length {!r}'.format(options['length'])
options['length'] = int(conf['length'])
return options
def save_options(service, options):
'''Store options in config file
Takes an options dictionary as returned by load_options and stores it in the
configuration directory, appending to previously existing data in case values
have changed. Silently returns if no configuration directory exists.'''
path = get_config_path(service)
if not path:
return
conf = {'modifier': service}
if path.is_file():
conf.update(line.rstrip().partition(' ')[::2] for line in path.open())
changes = {key: str(value) for key, value in options.items() if str(value) != conf.pop(key, '')}
changes.update((key, '') for key, value in conf.items() if value) # clear removed items
if not changes:
return
print('updating', ', '.join(changes), file=sys.stderr)
with path.open('a') as f:
for key, value in changes.items():
print(key, value, file=f)
def generate(key, chars, length):
'''Generate character string by long division of shake_256 hash
The core functionality of the passwordshaker module. Hashes the `key` string
using the shake_256 algorithm, and composes from that a string of speficied
length and characters.'''
import hashlib, math
nchars = len(chars)
nbytes = math.ceil(length * math.log2(nchars) / 8)
v = 0
for b in hashlib.shake_256(key.encode()).digest(nbytes):
v <<= 8
v += b
pw = ''
for _ in range(length):
v, n = divmod(v, nchars)
pw += chars[n]
assert v < 256
return pw
def password(modifier, length, charset, **other):
'''Convenience function for command line interaction
Displays the provided information, requests the master key, displays a
4-syllable fingerprint, and returns the password corresponding to the given
specifications.'''
import getpass
print('shaking {}{} password for {}'.format(charset, length, modifier), file=sys.stderr)
for key, val in other.items():
print('{}: {}'.format(key, val), file=sys.stderr)
secret = getpass.getpass('master key: ')
if not secret:
raise KeyboardInterrupt
print('fingerprint:', generate(key=secret, chars=[c+v for c in 'bcdfghjklmnpqrstvwxz' for v in 'aeiouy'], length=2), file=sys.stderr)
return generate(key=secret+modifier, chars=expand(charsets[charset]), length=length) | 0.486575 | 0.245627 |
import sys
import os.path
import re
from collections import defaultdict
from typing import List, DefaultDict, Iterator
initial_state_re = re.compile(r"initial state: ([.#]+)")
rule_re = re.compile(r"([.#]+) => ([.#])")
State = DefaultDict[int, str]
"""Represent a configuration of plants by mapping index to "#" if the pot has a plant, else ".".
"""
Rules = DefaultDict[str, str]
"""Represent the generation rules by mapping a sequence of five pots
to "#" if the center pot has a plant, else ".".
"""
def parse_initial_state(line: str) -> State:
initial_state: State = defaultdict(lambda: ".")
match = initial_state_re.match(line)
if not match:
raise ValueError
for idx, c in enumerate(match.group(1)):
initial_state[idx] = c
return initial_state
def parse_rules(lines: List[str]) -> Rules:
rules: Rules = defaultdict(lambda: ".")
for line in lines:
match = rule_re.search(line)
if not match:
raise ValueError
rules[match.group(1)] = match.group(2)
return rules
def simulate(initial_state: State, rules: Rules) -> Iterator[State]:
"""Produce an iterator yielding successive generations of plants.
"""
prev_state = initial_state.copy()
while True:
state = prev_state.copy()
left = min(k for k in state.keys() if state[k] == "#")
right = max(k for k in state.keys() if state[k] == "#")
for pot in range(left - 2, right + 3):
sequence = "".join(prev_state[p] for p in range(pot - 2, pot + 3))
state[pot] = rules[sequence]
yield state
prev_state = state
def sum_state(state: State) -> int:
return sum(p for p in state.keys() if state[p] == "#")
def main():
file_name = os.path.dirname(__file__) + "/input.txt"
if len(sys.argv) >= 2:
file_name = sys.argv[1]
with open(file_name, "rU") as f:
lines = f.read().strip().split("\n")
initial_state = parse_initial_state(lines[0])
rules = parse_rules(lines[2:])
sim = simulate(initial_state, rules)
for x in range(20):
gen = next(sim)
result = sum(gen)
print(result)
if __name__ == "__main__":
main() | day12/part1.py | import sys
import os.path
import re
from collections import defaultdict
from typing import List, DefaultDict, Iterator
initial_state_re = re.compile(r"initial state: ([.#]+)")
rule_re = re.compile(r"([.#]+) => ([.#])")
State = DefaultDict[int, str]
"""Represent a configuration of plants by mapping index to "#" if the pot has a plant, else ".".
"""
Rules = DefaultDict[str, str]
"""Represent the generation rules by mapping a sequence of five pots
to "#" if the center pot has a plant, else ".".
"""
def parse_initial_state(line: str) -> State:
initial_state: State = defaultdict(lambda: ".")
match = initial_state_re.match(line)
if not match:
raise ValueError
for idx, c in enumerate(match.group(1)):
initial_state[idx] = c
return initial_state
def parse_rules(lines: List[str]) -> Rules:
rules: Rules = defaultdict(lambda: ".")
for line in lines:
match = rule_re.search(line)
if not match:
raise ValueError
rules[match.group(1)] = match.group(2)
return rules
def simulate(initial_state: State, rules: Rules) -> Iterator[State]:
"""Produce an iterator yielding successive generations of plants.
"""
prev_state = initial_state.copy()
while True:
state = prev_state.copy()
left = min(k for k in state.keys() if state[k] == "#")
right = max(k for k in state.keys() if state[k] == "#")
for pot in range(left - 2, right + 3):
sequence = "".join(prev_state[p] for p in range(pot - 2, pot + 3))
state[pot] = rules[sequence]
yield state
prev_state = state
def sum_state(state: State) -> int:
return sum(p for p in state.keys() if state[p] == "#")
def main():
file_name = os.path.dirname(__file__) + "/input.txt"
if len(sys.argv) >= 2:
file_name = sys.argv[1]
with open(file_name, "rU") as f:
lines = f.read().strip().split("\n")
initial_state = parse_initial_state(lines[0])
rules = parse_rules(lines[2:])
sim = simulate(initial_state, rules)
for x in range(20):
gen = next(sim)
result = sum(gen)
print(result)
if __name__ == "__main__":
main() | 0.511229 | 0.410874 |
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision
import torchvision.transforms as transforms
import os
import argparse
from models import *
from utils import progress_bar
import ipdb
import pickle
import numpy as np
from tqdm import tqdm
from logger import Logger
from supervisor.sup_net import SupervisorNetwork
parser = argparse.ArgumentParser(description='PyTorch CIFAR10 Training')
parser.add_argument('--lr', default=0.1, type=float, help='learning rate')
parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint')
parser.add_argument('--exp_name', type=str, default='checkpoint')
parser.add_argument('--l1', action='store_true')
parser.add_argument('--compression', default=0.25, type=float)
args = parser.parse_args()
start_epoch = 0
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# Data
print('==> Preparing data..')
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root='/home/gauravm/.torch/', train=True, download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=128, shuffle=True, num_workers=0)
testset = torchvision.datasets.CIFAR10(root='/home/gauravm/.torch/', train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=0)
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
logger = Logger(os.path.join('./checkpoint',args.exp_name), name='main')
# Model
print('==> Building model..')
# net = VGG('VGG19')
net = ResNet18()
checkpoint = torch.load('res/resnet-18-py3.pth')
net.load_state_dict(checkpoint, strict=False)
net.cuda()
net.eval()
path_dims = [64, 64, 128, 128, 256, 256, 512, 512]
sup_net = SupervisorNetwork(path_dims)
sup_net.cuda()
#TODO define loss
ranking_loss = nn.MarginRankingLoss(margin=0.1)
mse_loss = nn.MSELoss()
optimizer = optim.Adam(sup_net.parameters(), lr=args.lr)
def path_ranking_loss(pred_path, true_path):
pred_path_mtrx_1 = pred_path.unsqueeze(2).repeat(1,1,pred_path.size(1))
pred_path_mtrx_2 = pred_path.unsqueeze(1).repeat(1, pred_path.size(-1), 1)
true_path_mtrx1 = true_path.unsqueeze(2).repeat(1, 1, true_path.size(1))
true_path_mtrx2 = true_path.unsqueeze(1).repeat(1, true_path.size(-1), 1)
target_mtrx = (true_path_mtrx1 > true_path_mtrx2).type(torch.cuda.FloatTensor) \
- (true_path_mtrx1 < true_path_mtrx2).type(torch.cuda.FloatTensor)
# loss = ranking_loss(pred_path_mtrx_1.view(pred_path.size(0), -1),
# pred_path_mtrx_2.view(pred_path.size(0), -1),
# target_mtrx.view(pred_path.size(0), -1))
# loss = F.margin_ranking_loss(pred_path_mtrx_1, pred_path_mtrx_2, target_mtrx, margin=0.1)
loss = (pred_path_mtrx_1 - pred_path_mtrx_2)*(-target_mtrx) + 0.1
loss = torch.clamp(loss, min=0.)
loss = torch.mean(loss)
# ipdb.set_trace()
return loss
# Training
def train(epoch):
print('\nEpoch: %d Training' % epoch)
sup_net.train()
train_ranking_loss = 0
train_mse_loss = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(trainloader):
inputs = inputs.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0),10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10 ).type(torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets, src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(np.random.randint(1,10, size=(targets.size(0), 1))).to(device))%10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1, index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
# class_vec = torch.cat((correct_targets, incorrect_targets), dim=0)
# img = inputs.repeat(2,1,1,1)
out, true_correct_paths, true_incorrect_paths = net.forward_with_paths(inputs)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
total_path_loss = 0
total_path_loss_mse = 0
total_path_loss_ranking = 0
for i in range(len(path_dims)):
ipdb.set_trace()
correct_loss = path_ranking_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss = path_ranking_loss(pred_incorrect_paths[i], true_incorrect_paths[i])
correct_loss_mse = mse_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss_mse = mse_loss(pred_incorrect_paths[i], true_correct_paths[i])
total_path_loss += (correct_loss + incorrect_loss + correct_loss_mse + incorrect_loss_mse)
total_path_loss_mse += (correct_loss_mse + incorrect_loss_mse)
total_path_loss_ranking += (correct_loss + incorrect_loss)
optimizer.zero_grad()
total_path_loss.backward()
optimizer.step()
# ipdb.set_trace()
train_mse_loss += total_path_loss.item()
train_ranking_loss += total_path_loss_ranking.item()
progress_bar(batch_idx, len(trainloader), 'MSE Loss: %.3f Ranking Loss: %.3f' %
(train_mse_loss/(batch_idx+1), train_ranking_loss/(batch_idx+1)))
logger.scalar_summary('train/mse_loss', train_mse_loss/len(trainloader), epoch)
logger.scalar_summary('train/ranking_loss', train_ranking_loss / len(trainloader),
epoch)
if not os.path.exists(os.path.join('./checkpoint', args.exp_name, 'models')):
os.makedirs(os.path.join('./checkpoint', args.exp_name, 'models'))
torch.save(sup_net.state_dict(), os.path.join('./checkpoint',
args.exp_name,
'models',
'model_{}.pth'.format(epoch)))
def test(epoch):
print('\nEpoch: %d Testing' % epoch)
sup_net.eval()
test_loss = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(testloader):
inputs = inputs.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0),10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10 ).type(torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets, src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(np.random.randint(1,10, size=(targets.size(0), 1))).to(device))%10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1, index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
# class_vec = torch.cat((correct_targets, incorrect_targets), dim=0)
# img = inputs.repeat(2,1,1,1)
out, true_correct_paths, true_incorrect_paths = net.forward_with_paths(inputs)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
total_path_loss = 0
for i in range(len(path_dims)):
correct_loss = path_ranking_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss = path_ranking_loss(pred_incorrect_paths[i], true_incorrect_paths[i])
total_path_loss += (correct_loss + incorrect_loss)
# optimizer.zero_grad()
# total_path_loss.backward()
# optimizer.step()
# ipdb.set_trace()
test_loss += total_path_loss.item()
progress_bar(batch_idx, len(testloader), 'Loss: %.3f' % (test_loss/(batch_idx+1)))
logger.scalar_summary('test/ranking_loss', test_loss / len(testloader),
epoch)
def test_pruned_accuracy(epoch):
print('\nEpoch: %d Test Pruned Accuracy' % epoch)
sup_net.eval()
total = 0
total_correct = 0
total_incorrect = 0
pred_correct = 0
pred_incorrect = 0
correct = 0
for batch_idx, (inputs, targets) in tqdm(enumerate(trainloader)):
inputs = inputs.to(device)
vec_targets = targets.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0), 10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets,
src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(
np.random.randint(1, 10, size=(targets.size(0), 1))).to(
device)) % 10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1,
index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
correct_outputs, _ = net.pruned_forward_pass_classpath(inputs, pred_correct_paths,
[0, 0, 0, 0, 1, 1, 1,
1], 2)
incorrect_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_incorrect_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
total += (correct_outputs.size(0) + incorrect_outputs.size(0))
_, correct_predicted = correct_outputs.max(1)
_, incorrect_predicted = incorrect_outputs.max(1)
batch_correct = correct_predicted.eq(vec_targets).sum().item()
batch_incorrect = incorrect_outputs.size(0) - incorrect_predicted.eq(vec_targets).sum().item()
pred_correct += batch_correct
pred_incorrect += batch_incorrect
total_correct += correct_outputs.size(0)
total_incorrect += incorrect_outputs.size(0)
correct += (batch_correct + batch_incorrect)
print('accuracy %f', correct/total)
logger.scalar_summary('train/accuracy', correct/total,
epoch)
logger.scalar_summary('train/accuracy_given_correct_class', pred_correct / total_correct,
epoch)
logger.scalar_summary('train/accuracy_given_incorrect_class',
pred_incorrect / total_incorrect,
epoch)
### testing
total = 0
total_correct = 0
total_incorrect = 0
pred_correct = 0
pred_incorrect = 0
correct = 0
for batch_idx, (inputs, targets) in tqdm(enumerate(testloader)):
inputs = inputs.to(device)
vec_targets = targets.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0), 10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets,
src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(
np.random.randint(1, 10, size=(targets.size(0), 1))).to(
device)) % 10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1,
index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
correct_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_correct_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
incorrect_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_incorrect_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
total += (correct_outputs.size(0) + incorrect_outputs.size(0))
_, correct_predicted = correct_outputs.max(1)
_, incorrect_predicted = incorrect_outputs.max(1)
batch_correct = correct_predicted.eq(vec_targets).sum().item()
batch_incorrect = incorrect_outputs.size(0) - incorrect_predicted.eq(
vec_targets).sum().item()
pred_correct += batch_correct
pred_incorrect += batch_incorrect
total_correct += correct_outputs.size(0)
total_incorrect += incorrect_outputs.size(0)
correct += (batch_correct + batch_incorrect)
print('accuracy %f', correct / total)
logger.scalar_summary('test/accuracy', correct / total,
epoch)
logger.scalar_summary('test/accuracy_given_correct_class',
pred_correct / total_correct,
epoch)
logger.scalar_summary('test/accuracy_given_incorrect_class',
pred_incorrect / total_incorrect,
epoch)
for epoch in range(start_epoch, start_epoch+200):
test_pruned_accuracy(epoch)
test(epoch)
train(epoch) | main_sup.py | import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision
import torchvision.transforms as transforms
import os
import argparse
from models import *
from utils import progress_bar
import ipdb
import pickle
import numpy as np
from tqdm import tqdm
from logger import Logger
from supervisor.sup_net import SupervisorNetwork
parser = argparse.ArgumentParser(description='PyTorch CIFAR10 Training')
parser.add_argument('--lr', default=0.1, type=float, help='learning rate')
parser.add_argument('--resume', '-r', action='store_true', help='resume from checkpoint')
parser.add_argument('--exp_name', type=str, default='checkpoint')
parser.add_argument('--l1', action='store_true')
parser.add_argument('--compression', default=0.25, type=float)
args = parser.parse_args()
start_epoch = 0
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# Data
print('==> Preparing data..')
transform_train = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
transform_test = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.4914, 0.4822, 0.4465), (0.2023, 0.1994, 0.2010)),
])
trainset = torchvision.datasets.CIFAR10(root='/home/gauravm/.torch/', train=True, download=True, transform=transform_train)
trainloader = torch.utils.data.DataLoader(trainset, batch_size=128, shuffle=True, num_workers=0)
testset = torchvision.datasets.CIFAR10(root='/home/gauravm/.torch/', train=False, download=True, transform=transform_test)
testloader = torch.utils.data.DataLoader(testset, batch_size=100, shuffle=False, num_workers=0)
classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck')
logger = Logger(os.path.join('./checkpoint',args.exp_name), name='main')
# Model
print('==> Building model..')
# net = VGG('VGG19')
net = ResNet18()
checkpoint = torch.load('res/resnet-18-py3.pth')
net.load_state_dict(checkpoint, strict=False)
net.cuda()
net.eval()
path_dims = [64, 64, 128, 128, 256, 256, 512, 512]
sup_net = SupervisorNetwork(path_dims)
sup_net.cuda()
#TODO define loss
ranking_loss = nn.MarginRankingLoss(margin=0.1)
mse_loss = nn.MSELoss()
optimizer = optim.Adam(sup_net.parameters(), lr=args.lr)
def path_ranking_loss(pred_path, true_path):
pred_path_mtrx_1 = pred_path.unsqueeze(2).repeat(1,1,pred_path.size(1))
pred_path_mtrx_2 = pred_path.unsqueeze(1).repeat(1, pred_path.size(-1), 1)
true_path_mtrx1 = true_path.unsqueeze(2).repeat(1, 1, true_path.size(1))
true_path_mtrx2 = true_path.unsqueeze(1).repeat(1, true_path.size(-1), 1)
target_mtrx = (true_path_mtrx1 > true_path_mtrx2).type(torch.cuda.FloatTensor) \
- (true_path_mtrx1 < true_path_mtrx2).type(torch.cuda.FloatTensor)
# loss = ranking_loss(pred_path_mtrx_1.view(pred_path.size(0), -1),
# pred_path_mtrx_2.view(pred_path.size(0), -1),
# target_mtrx.view(pred_path.size(0), -1))
# loss = F.margin_ranking_loss(pred_path_mtrx_1, pred_path_mtrx_2, target_mtrx, margin=0.1)
loss = (pred_path_mtrx_1 - pred_path_mtrx_2)*(-target_mtrx) + 0.1
loss = torch.clamp(loss, min=0.)
loss = torch.mean(loss)
# ipdb.set_trace()
return loss
# Training
def train(epoch):
print('\nEpoch: %d Training' % epoch)
sup_net.train()
train_ranking_loss = 0
train_mse_loss = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(trainloader):
inputs = inputs.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0),10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10 ).type(torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets, src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(np.random.randint(1,10, size=(targets.size(0), 1))).to(device))%10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1, index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
# class_vec = torch.cat((correct_targets, incorrect_targets), dim=0)
# img = inputs.repeat(2,1,1,1)
out, true_correct_paths, true_incorrect_paths = net.forward_with_paths(inputs)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
total_path_loss = 0
total_path_loss_mse = 0
total_path_loss_ranking = 0
for i in range(len(path_dims)):
ipdb.set_trace()
correct_loss = path_ranking_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss = path_ranking_loss(pred_incorrect_paths[i], true_incorrect_paths[i])
correct_loss_mse = mse_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss_mse = mse_loss(pred_incorrect_paths[i], true_correct_paths[i])
total_path_loss += (correct_loss + incorrect_loss + correct_loss_mse + incorrect_loss_mse)
total_path_loss_mse += (correct_loss_mse + incorrect_loss_mse)
total_path_loss_ranking += (correct_loss + incorrect_loss)
optimizer.zero_grad()
total_path_loss.backward()
optimizer.step()
# ipdb.set_trace()
train_mse_loss += total_path_loss.item()
train_ranking_loss += total_path_loss_ranking.item()
progress_bar(batch_idx, len(trainloader), 'MSE Loss: %.3f Ranking Loss: %.3f' %
(train_mse_loss/(batch_idx+1), train_ranking_loss/(batch_idx+1)))
logger.scalar_summary('train/mse_loss', train_mse_loss/len(trainloader), epoch)
logger.scalar_summary('train/ranking_loss', train_ranking_loss / len(trainloader),
epoch)
if not os.path.exists(os.path.join('./checkpoint', args.exp_name, 'models')):
os.makedirs(os.path.join('./checkpoint', args.exp_name, 'models'))
torch.save(sup_net.state_dict(), os.path.join('./checkpoint',
args.exp_name,
'models',
'model_{}.pth'.format(epoch)))
def test(epoch):
print('\nEpoch: %d Testing' % epoch)
sup_net.eval()
test_loss = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(testloader):
inputs = inputs.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0),10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10 ).type(torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets, src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(np.random.randint(1,10, size=(targets.size(0), 1))).to(device))%10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1, index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
# class_vec = torch.cat((correct_targets, incorrect_targets), dim=0)
# img = inputs.repeat(2,1,1,1)
out, true_correct_paths, true_incorrect_paths = net.forward_with_paths(inputs)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
total_path_loss = 0
for i in range(len(path_dims)):
correct_loss = path_ranking_loss(pred_correct_paths[i], true_correct_paths[i])
incorrect_loss = path_ranking_loss(pred_incorrect_paths[i], true_incorrect_paths[i])
total_path_loss += (correct_loss + incorrect_loss)
# optimizer.zero_grad()
# total_path_loss.backward()
# optimizer.step()
# ipdb.set_trace()
test_loss += total_path_loss.item()
progress_bar(batch_idx, len(testloader), 'Loss: %.3f' % (test_loss/(batch_idx+1)))
logger.scalar_summary('test/ranking_loss', test_loss / len(testloader),
epoch)
def test_pruned_accuracy(epoch):
print('\nEpoch: %d Test Pruned Accuracy' % epoch)
sup_net.eval()
total = 0
total_correct = 0
total_incorrect = 0
pred_correct = 0
pred_incorrect = 0
correct = 0
for batch_idx, (inputs, targets) in tqdm(enumerate(trainloader)):
inputs = inputs.to(device)
vec_targets = targets.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0), 10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets,
src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(
np.random.randint(1, 10, size=(targets.size(0), 1))).to(
device)) % 10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1,
index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
correct_outputs, _ = net.pruned_forward_pass_classpath(inputs, pred_correct_paths,
[0, 0, 0, 0, 1, 1, 1,
1], 2)
incorrect_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_incorrect_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
total += (correct_outputs.size(0) + incorrect_outputs.size(0))
_, correct_predicted = correct_outputs.max(1)
_, incorrect_predicted = incorrect_outputs.max(1)
batch_correct = correct_predicted.eq(vec_targets).sum().item()
batch_incorrect = incorrect_outputs.size(0) - incorrect_predicted.eq(vec_targets).sum().item()
pred_correct += batch_correct
pred_incorrect += batch_incorrect
total_correct += correct_outputs.size(0)
total_incorrect += incorrect_outputs.size(0)
correct += (batch_correct + batch_incorrect)
print('accuracy %f', correct/total)
logger.scalar_summary('train/accuracy', correct/total,
epoch)
logger.scalar_summary('train/accuracy_given_correct_class', pred_correct / total_correct,
epoch)
logger.scalar_summary('train/accuracy_given_incorrect_class',
pred_incorrect / total_incorrect,
epoch)
### testing
total = 0
total_correct = 0
total_incorrect = 0
pred_correct = 0
pred_incorrect = 0
correct = 0
for batch_idx, (inputs, targets) in tqdm(enumerate(testloader)):
inputs = inputs.to(device)
vec_targets = targets.to(device)
targets = targets.to(device).unsqueeze(1).repeat(1, 10)
one_hot = torch.zeros((inputs.size(0), 10)).fill_(1).to(device)
correct_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
correct_targets = correct_targets.scatter_(dim=1, index=targets,
src=one_hot)
correct_targets = correct_targets.to(device)
incorrect_targets_idx = (targets + torch.from_numpy(
np.random.randint(1, 10, size=(targets.size(0), 1))).to(
device)) % 10
incorrect_targets = torch.zeros(inputs.size(0), 10).type(
torch.cuda.FloatTensor)
incorrect_targets = incorrect_targets.scatter_(dim=1,
index=incorrect_targets_idx,
src=one_hot)
incorrect_targets = incorrect_targets.to(device)
pred_correct_paths = sup_net(correct_targets, inputs)
pred_incorrect_paths = sup_net(incorrect_targets, inputs)
correct_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_correct_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
incorrect_outputs, _ = net.pruned_forward_pass_classpath(inputs,
pred_incorrect_paths,
[0, 0, 0, 0, 1,
1, 1,
1], 2)
total += (correct_outputs.size(0) + incorrect_outputs.size(0))
_, correct_predicted = correct_outputs.max(1)
_, incorrect_predicted = incorrect_outputs.max(1)
batch_correct = correct_predicted.eq(vec_targets).sum().item()
batch_incorrect = incorrect_outputs.size(0) - incorrect_predicted.eq(
vec_targets).sum().item()
pred_correct += batch_correct
pred_incorrect += batch_incorrect
total_correct += correct_outputs.size(0)
total_incorrect += incorrect_outputs.size(0)
correct += (batch_correct + batch_incorrect)
print('accuracy %f', correct / total)
logger.scalar_summary('test/accuracy', correct / total,
epoch)
logger.scalar_summary('test/accuracy_given_correct_class',
pred_correct / total_correct,
epoch)
logger.scalar_summary('test/accuracy_given_incorrect_class',
pred_incorrect / total_incorrect,
epoch)
for epoch in range(start_epoch, start_epoch+200):
test_pruned_accuracy(epoch)
test(epoch)
train(epoch) | 0.388734 | 0.298389 |
import tempfile
import mmap
import re
from boot_linux_console import LinuxKernelTest
class PluginKernelBase(LinuxKernelTest):
"""
Boots a Linux kernel with a TCG plugin enabled.
"""
timeout = 120
KERNEL_COMMON_COMMAND_LINE = 'printk.time=1 panic=-1 '
def run_vm(self, kernel_path, kernel_command_line,
plugin, plugin_log, console_pattern, args):
vm = self.get_vm()
vm.set_console()
vm.add_args('-kernel', kernel_path,
'-append', kernel_command_line,
'-plugin', plugin,
'-d', 'plugin',
'-D', plugin_log,
'-net', 'none',
'-no-reboot')
if args:
vm.add_args(*args)
try:
vm.launch()
except:
# TODO: probably fails because plugins not enabled but we
# can't currently probe for the feature.
self.cancel("TCG Plugins not enabled?")
self.wait_for_console_pattern(console_pattern, vm)
# ensure logs are flushed
vm.shutdown()
class PluginKernelNormal(PluginKernelBase):
def _grab_aarch64_kernel(self):
kernel_url = ('http://security.debian.org/'
'debian-security/pool/updates/main/l/linux-signed-arm64/'
'linux-image-4.19.0-12-arm64_4.19.152-1_arm64.deb')
kernel_sha1 = '2036c2792f80ac9c4ccaae742b2e0a28385b6010'
kernel_deb = self.fetch_asset(kernel_url, asset_hash=kernel_sha1)
kernel_path = self.extract_from_deb(kernel_deb,
"/boot/vmlinuz-4.19.0-12-arm64")
return kernel_path
def test_aarch64_virt_insn(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libinsn.so", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.search(br"insns: (?P<count>\d+)", s)
if "count" not in m.groupdict():
self.fail("Failed to find instruction count")
def test_aarch64_virt_insn_icount(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libinsn.so", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53', '-icount', 'shift=1'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.search(br"detected repeat execution @ (?P<addr>0x[0-9A-Fa-f]+)", s)
if m is not None and "addr" in m.groupdict():
self.fail("detected repeated instructions")
def test_aarch64_virt_mem_icount(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libmem.so,arg=both", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53', '-icount', 'shift=1'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.findall(br"mem accesses: (?P<count>\d+)", s)
if m is None or len(m) != 2:
self.fail("no memory access counts found")
else:
inline = int(m[0])
callback = int(m[1])
if inline != callback:
self.fail("mismatched access counts") | qemu/tests/acceptance/tcg_plugins.py |
import tempfile
import mmap
import re
from boot_linux_console import LinuxKernelTest
class PluginKernelBase(LinuxKernelTest):
"""
Boots a Linux kernel with a TCG plugin enabled.
"""
timeout = 120
KERNEL_COMMON_COMMAND_LINE = 'printk.time=1 panic=-1 '
def run_vm(self, kernel_path, kernel_command_line,
plugin, plugin_log, console_pattern, args):
vm = self.get_vm()
vm.set_console()
vm.add_args('-kernel', kernel_path,
'-append', kernel_command_line,
'-plugin', plugin,
'-d', 'plugin',
'-D', plugin_log,
'-net', 'none',
'-no-reboot')
if args:
vm.add_args(*args)
try:
vm.launch()
except:
# TODO: probably fails because plugins not enabled but we
# can't currently probe for the feature.
self.cancel("TCG Plugins not enabled?")
self.wait_for_console_pattern(console_pattern, vm)
# ensure logs are flushed
vm.shutdown()
class PluginKernelNormal(PluginKernelBase):
def _grab_aarch64_kernel(self):
kernel_url = ('http://security.debian.org/'
'debian-security/pool/updates/main/l/linux-signed-arm64/'
'linux-image-4.19.0-12-arm64_4.19.152-1_arm64.deb')
kernel_sha1 = '2036c2792f80ac9c4ccaae742b2e0a28385b6010'
kernel_deb = self.fetch_asset(kernel_url, asset_hash=kernel_sha1)
kernel_path = self.extract_from_deb(kernel_deb,
"/boot/vmlinuz-4.19.0-12-arm64")
return kernel_path
def test_aarch64_virt_insn(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libinsn.so", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.search(br"insns: (?P<count>\d+)", s)
if "count" not in m.groupdict():
self.fail("Failed to find instruction count")
def test_aarch64_virt_insn_icount(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libinsn.so", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53', '-icount', 'shift=1'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.search(br"detected repeat execution @ (?P<addr>0x[0-9A-Fa-f]+)", s)
if m is not None and "addr" in m.groupdict():
self.fail("detected repeated instructions")
def test_aarch64_virt_mem_icount(self):
"""
:avocado: tags=accel:tcg
:avocado: tags=arch:aarch64
:avocado: tags=machine:virt
:avocado: tags=cpu:cortex-a57
"""
kernel_path = self._grab_aarch64_kernel()
kernel_command_line = (self.KERNEL_COMMON_COMMAND_LINE +
'console=ttyAMA0')
console_pattern = 'Kernel panic - not syncing: VFS:'
plugin_log = tempfile.NamedTemporaryFile(mode="r+t", prefix="plugin",
suffix=".log")
self.run_vm(kernel_path, kernel_command_line,
"tests/plugin/libmem.so,arg=both", plugin_log.name,
console_pattern,
args=('-cpu', 'cortex-a53', '-icount', 'shift=1'))
with plugin_log as lf, \
mmap.mmap(lf.fileno(), 0, access=mmap.ACCESS_READ) as s:
m = re.findall(br"mem accesses: (?P<count>\d+)", s)
if m is None or len(m) != 2:
self.fail("no memory access counts found")
else:
inline = int(m[0])
callback = int(m[1])
if inline != callback:
self.fail("mismatched access counts") | 0.207295 | 0.09426 |
from typing import Callable
MAX_STR_LENGTH = 255
class Validator:
@staticmethod
def validate(t, arg):
return not arg or isinstance(arg, t)
class IntValidator(Validator):
@staticmethod
def validate( arg: int):
"""
Validate if arg is instance of integer
:param arg:
:return: boolean
"""
return Validator.validate(int, arg)
@staticmethod
def validate_range(arg: int, min: int, max: int):
"""
Validate if arg is an instance of integer
and in range of [min, max]
min <= arg <= max
:param arg:
:param min: minimum in a range
:param max: maximum in a range
:return: boolean
"""
return IntValidator.validate(arg) and min <= arg <= max
@staticmethod
def validate_non_zero(arg: int):
"""
Validate if arg is an instance of integer
and not zero
:param arg:
:param min:
:return:
"""
return IntValidator.validate(arg) and arg
@staticmethod
def strip_and_cast(arg):
if arg:
return int(arg.strip())
return arg
class StrValidator(Validator):
@staticmethod
def validate(arg: str):
"""
Validate if arg is an instance of string
:param arg:
:return: boolean
"""
return Validator.validate(str, arg)
@staticmethod
def validate_non_empty(arg: str):
"""
Validate if arg is an instance of string
and non empty
:param arg:
:return: boolean
"""
return StrValidator.validate(arg) and arg
@staticmethod
def validate_length(arg: str, l: int):
"""
Validate if arg is an instance of str
and non empty and the length less or equal to l
:param arg:
:param l: len
:return: boolean
"""
return StrValidator.validate_non_empty(arg) and len(arg) <= l
@staticmethod
def validate_max_length(arg: str):
"""
Validate if arg is an instance of str,
non empty and the length less or equal to MAX_STR_LENGTH
:param arg:
:return: boolean
"""
return StrValidator.validate_length(arg, MAX_STR_LENGTH)
@staticmethod
def strip_and_cast(arg):
return str(arg.strip())
def get_invalid_error(name: str):
"""
compose the error reason string
:param name: name of the invalid parameter
:return:
"""
return " Invalid " + name
class FormValidator:
_attributes = None
_name = None
def __init__(self, name):
self._attributes = []
self._name = name
def add(self, name: str, type: type, length: int = 0, cf: Callable[[], type] = None,
vf: Callable[[], bool] = None) -> None:
self._attributes.append({'name': name, 'type': type, 'length': length, 'cf': cf, 'vf': vf})
def cast(self, form: dict) -> dict:
tmp = {}
for attr in self._attributes:
# print("Validating attribute " + attr['name'])
if form[attr['name']] is not None:
try:
tmp[attr['name']] = attr['cf'](form[attr['name']])
except Exception as e:
err = get_invalid_error(attr['name'])
return {}, err
return tmp, None
def validate(self, form) -> str:
for attr in self._attributes:
if not attr['vf'](form[attr['name']]):
err = self._name + " " + attr['name']
return get_invalid_error(err)
return None
def validate_at_least_one(self, form, l: list, vf: Callable[[], bool]) -> bool:
return len(list(filter(lambda x: vf(form[x]), l))) != 0
ServiceValidator = FormValidator('service')
ServiceValidator.add(name='name', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='description', type=str, cf=StrValidator.strip_and_cast, vf=StrValidator.validate)
ServiceValidator.add(name='server_container', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='agent_container', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='server_http_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='server_tcp_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='agent_http_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='agent_tcp_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate) | papaya_server/validator.py | from typing import Callable
MAX_STR_LENGTH = 255
class Validator:
@staticmethod
def validate(t, arg):
return not arg or isinstance(arg, t)
class IntValidator(Validator):
@staticmethod
def validate( arg: int):
"""
Validate if arg is instance of integer
:param arg:
:return: boolean
"""
return Validator.validate(int, arg)
@staticmethod
def validate_range(arg: int, min: int, max: int):
"""
Validate if arg is an instance of integer
and in range of [min, max]
min <= arg <= max
:param arg:
:param min: minimum in a range
:param max: maximum in a range
:return: boolean
"""
return IntValidator.validate(arg) and min <= arg <= max
@staticmethod
def validate_non_zero(arg: int):
"""
Validate if arg is an instance of integer
and not zero
:param arg:
:param min:
:return:
"""
return IntValidator.validate(arg) and arg
@staticmethod
def strip_and_cast(arg):
if arg:
return int(arg.strip())
return arg
class StrValidator(Validator):
@staticmethod
def validate(arg: str):
"""
Validate if arg is an instance of string
:param arg:
:return: boolean
"""
return Validator.validate(str, arg)
@staticmethod
def validate_non_empty(arg: str):
"""
Validate if arg is an instance of string
and non empty
:param arg:
:return: boolean
"""
return StrValidator.validate(arg) and arg
@staticmethod
def validate_length(arg: str, l: int):
"""
Validate if arg is an instance of str
and non empty and the length less or equal to l
:param arg:
:param l: len
:return: boolean
"""
return StrValidator.validate_non_empty(arg) and len(arg) <= l
@staticmethod
def validate_max_length(arg: str):
"""
Validate if arg is an instance of str,
non empty and the length less or equal to MAX_STR_LENGTH
:param arg:
:return: boolean
"""
return StrValidator.validate_length(arg, MAX_STR_LENGTH)
@staticmethod
def strip_and_cast(arg):
return str(arg.strip())
def get_invalid_error(name: str):
"""
compose the error reason string
:param name: name of the invalid parameter
:return:
"""
return " Invalid " + name
class FormValidator:
_attributes = None
_name = None
def __init__(self, name):
self._attributes = []
self._name = name
def add(self, name: str, type: type, length: int = 0, cf: Callable[[], type] = None,
vf: Callable[[], bool] = None) -> None:
self._attributes.append({'name': name, 'type': type, 'length': length, 'cf': cf, 'vf': vf})
def cast(self, form: dict) -> dict:
tmp = {}
for attr in self._attributes:
# print("Validating attribute " + attr['name'])
if form[attr['name']] is not None:
try:
tmp[attr['name']] = attr['cf'](form[attr['name']])
except Exception as e:
err = get_invalid_error(attr['name'])
return {}, err
return tmp, None
def validate(self, form) -> str:
for attr in self._attributes:
if not attr['vf'](form[attr['name']]):
err = self._name + " " + attr['name']
return get_invalid_error(err)
return None
def validate_at_least_one(self, form, l: list, vf: Callable[[], bool]) -> bool:
return len(list(filter(lambda x: vf(form[x]), l))) != 0
ServiceValidator = FormValidator('service')
ServiceValidator.add(name='name', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='description', type=str, cf=StrValidator.strip_and_cast, vf=StrValidator.validate)
ServiceValidator.add(name='server_container', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='agent_container', type=str, length=MAX_STR_LENGTH, cf=StrValidator.strip_and_cast,
vf=StrValidator.validate_max_length)
ServiceValidator.add(name='server_http_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='server_tcp_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='agent_http_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate)
ServiceValidator.add(name='agent_tcp_port', type=int, cf=IntValidator.strip_and_cast, vf=IntValidator.validate) | 0.807688 | 0.416797 |
import bl2sdk
import datetime
class DPS(bl2sdk.BL2MOD):
Name = "DPS/TTK Calculator"
Description = "Upon damaging an enemy a counter will start in the background. On kill it will then calculate the " \
"DPS in that amount of time, from dealing damage to the kill. The longer you need to kill an enemy " \
"the more precise the DPS actually is. "
Author = "Juso"
_started_at = datetime.datetime.utcnow()
_DamagedEnemy = False
#Returns the current WillowPlayerController
def GetPlayerController(self):
return bl2sdk.GetEngine().GamePlayers[0].Actor
def DPSFeedback(self, EnemyName):
#This gets the Players HUD
playerController = self.GetPlayerController()
HUDMovie = playerController.GetHUDMovie()
time_passed = datetime.datetime.utcnow() - self._started_at
DPSCalcString = str(int(self._CombinedDamage/time_passed.total_seconds()))+" DPS"
#This First clears the old message and then rewrites the new one
HUDMovie.ClearTrainingText()
HUDMovie.AddTrainingText("Dealt: "+str(int(self._CombinedDamage))+" in "+str(time_passed.total_seconds())+" seconds", DPSCalcString, 10.000000, (), "", False, 0, playerController.PlayerReplicationInfo, True)
_CombinedDamage = 0
def HandleKill(self, caller, function, params):
self._DamagedEnemy = False
self.DPSFeedback(params.EnemyName)
self._CombinedDamage = 0
return True
def HandleDamage(self, caller, function, params):
if params.InDamageInstigator == self.GetPlayerController():
self._DamagedEnemy = True
self._started_at = datetime.datetime.utcnow()
self._CombinedDamage += params.IncomingDamage
return True
def DamagedEnemy(self):
return self._DamagedEnemy
KillHook = "WillowGame.WillowPlayerController.NotifyKilledEnemy"
DamagedHook = "WillowGame.WillowDamagePipeline.AdjustDamage"
def Enable(self):
bl2sdk.RegisterHook(self.KillHook, "KillHook", KilledHook)
bl2sdk.RegisterHook(self.DamagedHook, "DamagedHook", DamageHook)
def Disable(self):
bl2sdk.RemoveHook(self.KillHook, "KillHook")
bl2sdk.RemoveHook(self.DamagedHook, "DamagedHook")
DPSInstance = DPS()
def KilledHook(caller: bl2sdk.UObject, function: bl2sdk.UFunction, params: bl2sdk.FStruct) -> bool:
DPSInstance.HandleKill(caller, function, params)
return True
def DamageHook(caller: bl2sdk.UObject, function: bl2sdk.UFunction, params: bl2sdk.FStruct) -> bool:
if DPSInstance.DamagedEnemy() == False:
DPSInstance.HandleDamage(caller, function, params)
return True
bl2sdk.Mods.append(DPSInstance) | DPS and TTK/__init__.py | import bl2sdk
import datetime
class DPS(bl2sdk.BL2MOD):
Name = "DPS/TTK Calculator"
Description = "Upon damaging an enemy a counter will start in the background. On kill it will then calculate the " \
"DPS in that amount of time, from dealing damage to the kill. The longer you need to kill an enemy " \
"the more precise the DPS actually is. "
Author = "Juso"
_started_at = datetime.datetime.utcnow()
_DamagedEnemy = False
#Returns the current WillowPlayerController
def GetPlayerController(self):
return bl2sdk.GetEngine().GamePlayers[0].Actor
def DPSFeedback(self, EnemyName):
#This gets the Players HUD
playerController = self.GetPlayerController()
HUDMovie = playerController.GetHUDMovie()
time_passed = datetime.datetime.utcnow() - self._started_at
DPSCalcString = str(int(self._CombinedDamage/time_passed.total_seconds()))+" DPS"
#This First clears the old message and then rewrites the new one
HUDMovie.ClearTrainingText()
HUDMovie.AddTrainingText("Dealt: "+str(int(self._CombinedDamage))+" in "+str(time_passed.total_seconds())+" seconds", DPSCalcString, 10.000000, (), "", False, 0, playerController.PlayerReplicationInfo, True)
_CombinedDamage = 0
def HandleKill(self, caller, function, params):
self._DamagedEnemy = False
self.DPSFeedback(params.EnemyName)
self._CombinedDamage = 0
return True
def HandleDamage(self, caller, function, params):
if params.InDamageInstigator == self.GetPlayerController():
self._DamagedEnemy = True
self._started_at = datetime.datetime.utcnow()
self._CombinedDamage += params.IncomingDamage
return True
def DamagedEnemy(self):
return self._DamagedEnemy
KillHook = "WillowGame.WillowPlayerController.NotifyKilledEnemy"
DamagedHook = "WillowGame.WillowDamagePipeline.AdjustDamage"
def Enable(self):
bl2sdk.RegisterHook(self.KillHook, "KillHook", KilledHook)
bl2sdk.RegisterHook(self.DamagedHook, "DamagedHook", DamageHook)
def Disable(self):
bl2sdk.RemoveHook(self.KillHook, "KillHook")
bl2sdk.RemoveHook(self.DamagedHook, "DamagedHook")
DPSInstance = DPS()
def KilledHook(caller: bl2sdk.UObject, function: bl2sdk.UFunction, params: bl2sdk.FStruct) -> bool:
DPSInstance.HandleKill(caller, function, params)
return True
def DamageHook(caller: bl2sdk.UObject, function: bl2sdk.UFunction, params: bl2sdk.FStruct) -> bool:
if DPSInstance.DamagedEnemy() == False:
DPSInstance.HandleDamage(caller, function, params)
return True
bl2sdk.Mods.append(DPSInstance) | 0.264358 | 0.239372 |
from io import BytesIO
import tarfile
from zipfile import ZipFile
from click import progressbar
from logbook import Logger
import pandas as pd
import requests
from six.moves.urllib.parse import urlencode
from six import iteritems
from trading_calendars import register_calendar_alias
from zipline.utils.deprecate import deprecated
from zipline import core as bundles
from zipline.data.hdf5_daily_bars import HDF5DailyBarWriter, HDF5DailyBarReader
import numpy as np
"""
Module for building a complete daily dataset from Quandl's WIKI dataset.
"""
from io import BytesIO
import tarfile
from zipfile import ZipFile
from click import progressbar
from logbook import Logger
import pandas as pd
import requests
from six.moves.urllib.parse import urlencode
from six import iteritems
from trading_calendars import register_calendar_alias
from zipline.utils.deprecate import deprecated
import numpy as np
log = Logger(__name__)
@bundles.register('hdfbundle')
def quandl_bundle(environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
output_dir):
"""
quandl_bundle builds a daily dataset using Quandl's WIKI Prices dataset.
For more information on Quandl's API and how to obtain an API key,
please visit https://docs.quandl.com/docs#section-authentication
"""
api_key = environ.get('QUANDL_API_KEY')
if api_key is None:
raise ValueError(
"Please set your QUANDL_API_KEY environment variable and retry."
)
raw_data = fetch_data()
asset_metadata = gen_asset_metadata(
raw_data[['symbol', 'date']],
show_progress
)
asset_db_writer.write(asset_metadata)
symbol_map = asset_metadata.symbol
sessions = calendar.sessions_in_range(start_session, end_session)
raw_data.set_index(['date', 'symbol'], inplace=True)
daily_bar_writer.write(
parse_pricing_and_vol(
raw_data,
sessions,
symbol_map
),
show_progress=show_progress
)
raw_data.reset_index(inplace=True)
raw_data['symbol'] = raw_data['symbol'].astype('category')
raw_data['sid'] = raw_data.symbol.cat.codes
adjustment_writer.write(
splits=parse_splits(
raw_data[[
'sid',
'date',
'split_ratio',
]].loc[raw_data.split_ratio != 1],
show_progress=show_progress
),
dividends=parse_dividends(
raw_data[[
'sid',
'date',
'ex_dividend',
]].loc[raw_data.ex_dividend != 0],
show_progress=show_progress
)
)
def fetch_data(*args, **kwargs ):
"""
Fetch data from database
"""
try:
return pd.DataFrame(columns=[ 'symbol',
'date',
'open',
'high',
'low',
'close',
'volume',
'ex_dividend',
'split_ratio',])
except Exception:
log.exception("Exception in fecthing data")
def gen_asset_metadata(data):
data = data.groupby( by='symbol' ).agg({'date': ['min', 'max']}).\
rename(columns={'min':'start_date','max':'end_date'}).\
assign(
exchange='QUANDL',
auto_close_date=lambda x:x['end_date'].values + pd.Timedelta(days=1)).\
reset_index()
data.columns = data.columns.get_level_values(0)
return data
def parse_splits(data, show_progress):
if show_progress:
log.info('Parsing split data.')
data['split_ratio'] = 1.0 / data.split_ratio
data.rename(
columns={
'split_ratio': 'ratio',
'date': 'effective_date',
},
inplace=True,
copy=False,
)
return data
def parse_dividends(data, show_progress):
if show_progress:
log.info('Parsing dividend data.')
data['record_date'] = data['declared_date'] = data['pay_date'] = pd.NaT
data.rename(
columns={
'ex_dividend': 'amount',
'date': 'ex_date',
},
inplace=True,
copy=False,
)
return data
def parse_pricing_and_vol(data,
sessions,
symbol_map):
for asset_id, symbol in iteritems(symbol_map):
asset_data = data.xs(
symbol,
level=1
).reindex(
sessions.tz_localize(None)
).fillna(0.0)
yield asset_id, asset_data
register_calendar_alias("CSVDIR", "NYSE") | alpha_factory/data/bundles/hdf_bundle.py | from io import BytesIO
import tarfile
from zipfile import ZipFile
from click import progressbar
from logbook import Logger
import pandas as pd
import requests
from six.moves.urllib.parse import urlencode
from six import iteritems
from trading_calendars import register_calendar_alias
from zipline.utils.deprecate import deprecated
from zipline import core as bundles
from zipline.data.hdf5_daily_bars import HDF5DailyBarWriter, HDF5DailyBarReader
import numpy as np
"""
Module for building a complete daily dataset from Quandl's WIKI dataset.
"""
from io import BytesIO
import tarfile
from zipfile import ZipFile
from click import progressbar
from logbook import Logger
import pandas as pd
import requests
from six.moves.urllib.parse import urlencode
from six import iteritems
from trading_calendars import register_calendar_alias
from zipline.utils.deprecate import deprecated
import numpy as np
log = Logger(__name__)
@bundles.register('hdfbundle')
def quandl_bundle(environ,
asset_db_writer,
minute_bar_writer,
daily_bar_writer,
adjustment_writer,
calendar,
start_session,
end_session,
cache,
show_progress,
output_dir):
"""
quandl_bundle builds a daily dataset using Quandl's WIKI Prices dataset.
For more information on Quandl's API and how to obtain an API key,
please visit https://docs.quandl.com/docs#section-authentication
"""
api_key = environ.get('QUANDL_API_KEY')
if api_key is None:
raise ValueError(
"Please set your QUANDL_API_KEY environment variable and retry."
)
raw_data = fetch_data()
asset_metadata = gen_asset_metadata(
raw_data[['symbol', 'date']],
show_progress
)
asset_db_writer.write(asset_metadata)
symbol_map = asset_metadata.symbol
sessions = calendar.sessions_in_range(start_session, end_session)
raw_data.set_index(['date', 'symbol'], inplace=True)
daily_bar_writer.write(
parse_pricing_and_vol(
raw_data,
sessions,
symbol_map
),
show_progress=show_progress
)
raw_data.reset_index(inplace=True)
raw_data['symbol'] = raw_data['symbol'].astype('category')
raw_data['sid'] = raw_data.symbol.cat.codes
adjustment_writer.write(
splits=parse_splits(
raw_data[[
'sid',
'date',
'split_ratio',
]].loc[raw_data.split_ratio != 1],
show_progress=show_progress
),
dividends=parse_dividends(
raw_data[[
'sid',
'date',
'ex_dividend',
]].loc[raw_data.ex_dividend != 0],
show_progress=show_progress
)
)
def fetch_data(*args, **kwargs ):
"""
Fetch data from database
"""
try:
return pd.DataFrame(columns=[ 'symbol',
'date',
'open',
'high',
'low',
'close',
'volume',
'ex_dividend',
'split_ratio',])
except Exception:
log.exception("Exception in fecthing data")
def gen_asset_metadata(data):
data = data.groupby( by='symbol' ).agg({'date': ['min', 'max']}).\
rename(columns={'min':'start_date','max':'end_date'}).\
assign(
exchange='QUANDL',
auto_close_date=lambda x:x['end_date'].values + pd.Timedelta(days=1)).\
reset_index()
data.columns = data.columns.get_level_values(0)
return data
def parse_splits(data, show_progress):
if show_progress:
log.info('Parsing split data.')
data['split_ratio'] = 1.0 / data.split_ratio
data.rename(
columns={
'split_ratio': 'ratio',
'date': 'effective_date',
},
inplace=True,
copy=False,
)
return data
def parse_dividends(data, show_progress):
if show_progress:
log.info('Parsing dividend data.')
data['record_date'] = data['declared_date'] = data['pay_date'] = pd.NaT
data.rename(
columns={
'ex_dividend': 'amount',
'date': 'ex_date',
},
inplace=True,
copy=False,
)
return data
def parse_pricing_and_vol(data,
sessions,
symbol_map):
for asset_id, symbol in iteritems(symbol_map):
asset_data = data.xs(
symbol,
level=1
).reindex(
sessions.tz_localize(None)
).fillna(0.0)
yield asset_id, asset_data
register_calendar_alias("CSVDIR", "NYSE") | 0.342572 | 0.234242 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from queue import Queue
from .graph import TernaryTree as SuperTernaryTree
def _clone(x):
if isinstance(x, tuple):
return tuple(_.clone() for _ in x)
else:
return x.clone()
class TernaryVectex(object):
def __init__(self, idx, word, state=None):
self.idx = idx
self.word = word
self.state = state
self.logprob = 0
# children
self.a = None
self.b = None
self.c = None
@classmethod
def from_vectex(cls, vectex):
return cls(vectex.idx, vectex.word)
def clone(self):
v = TernaryVectex(self.idx, self.word.clone(), _clone(self.state))
v.logprob = self.logprob
return v
class TernaryTree(object):
def __init__(self):
self.vectices = {}
self.root = None
self.size = 0
self.logprob = 0
def clone(self):
get_idx = lambda x: -1 if x is None else x.idx
adjacency = {}
for v in self.vectices.values():
adjacency.update({v.idx: (get_idx(v.a), get_idx(v.b), get_idx(v.c))})
t = TernaryTree()
t.vectices = {k: v.clone() for k,v in self.vectices.items()}
t.root = t.vectices[0] if len(t.vectices) > 0 else None
for v in t.vectices.values():
if adjacency[v.idx][0] == -1:
continue
v.a = t.vectices[adjacency[v.idx][0]]
v.b = t.vectices[adjacency[v.idx][1]]
v.c = t.vectices[adjacency[v.idx][2]]
t.logprob = self.logprob
return t
def __preorder_traversal(self, vectex, words):
if vectex.a is not None and vectex.a.word != "EOB":
self.__preorder_traversal(vectex.a, words)
words.append(vectex.word)
if vectex.b is not None and vectex.b.word != "EOB":
self.__preorder_traversal(vectex.b, words)
if vectex.c is not None and vectex.c.word != "EOB":
self.__preorder_traversal(vectex.c, words)
def decode(self, vocab):
for v in self.vectices.values():
v.word = vocab[str(v.word.item())]
words = []
self.__preorder_traversal(self.root, words)
return " ".join(words[1:])
def pprint(self):
for v in self.vectices.values():
print(str(v.idx) + ': ' + v.word)
if v.a is not None:
print(v.a.word + '_' + v.b.word + '_' + v.c.word)
print() | graph_utils/TernaryTree.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from queue import Queue
from .graph import TernaryTree as SuperTernaryTree
def _clone(x):
if isinstance(x, tuple):
return tuple(_.clone() for _ in x)
else:
return x.clone()
class TernaryVectex(object):
def __init__(self, idx, word, state=None):
self.idx = idx
self.word = word
self.state = state
self.logprob = 0
# children
self.a = None
self.b = None
self.c = None
@classmethod
def from_vectex(cls, vectex):
return cls(vectex.idx, vectex.word)
def clone(self):
v = TernaryVectex(self.idx, self.word.clone(), _clone(self.state))
v.logprob = self.logprob
return v
class TernaryTree(object):
def __init__(self):
self.vectices = {}
self.root = None
self.size = 0
self.logprob = 0
def clone(self):
get_idx = lambda x: -1 if x is None else x.idx
adjacency = {}
for v in self.vectices.values():
adjacency.update({v.idx: (get_idx(v.a), get_idx(v.b), get_idx(v.c))})
t = TernaryTree()
t.vectices = {k: v.clone() for k,v in self.vectices.items()}
t.root = t.vectices[0] if len(t.vectices) > 0 else None
for v in t.vectices.values():
if adjacency[v.idx][0] == -1:
continue
v.a = t.vectices[adjacency[v.idx][0]]
v.b = t.vectices[adjacency[v.idx][1]]
v.c = t.vectices[adjacency[v.idx][2]]
t.logprob = self.logprob
return t
def __preorder_traversal(self, vectex, words):
if vectex.a is not None and vectex.a.word != "EOB":
self.__preorder_traversal(vectex.a, words)
words.append(vectex.word)
if vectex.b is not None and vectex.b.word != "EOB":
self.__preorder_traversal(vectex.b, words)
if vectex.c is not None and vectex.c.word != "EOB":
self.__preorder_traversal(vectex.c, words)
def decode(self, vocab):
for v in self.vectices.values():
v.word = vocab[str(v.word.item())]
words = []
self.__preorder_traversal(self.root, words)
return " ".join(words[1:])
def pprint(self):
for v in self.vectices.values():
print(str(v.idx) + ': ' + v.word)
if v.a is not None:
print(v.a.word + '_' + v.b.word + '_' + v.c.word)
print() | 0.735167 | 0.128498 |
from reporter.connections import RedcapInstance
from reporter.application_abstract_reports.redcap.percentage_complete import (
RedcapPercentageCompleteReport,
)
from reporter.application_abstract_reports.redcap.withdrawn_or_excluded_with_data import (
RedcapWithdrawnOrExcludedWithDataReport,
)
from reporter.emailing import (
RECIPIENT_BRICCS_ADMIN as RECIPIENT_ADMIN,
RECIPIENT_BRICCS_MANAGER as RECIPIENT_MANAGER,
RECIPIENT_IT_DQ,
)
from reporter.application_abstract_reports.redcap.web_data_quality import (
RedcapWebDataQuality,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapInvalidNhsNumber,
RedcapInvalidDate,
RedcapInvalidBloodPressure,
RedcapInvalidPulse,
RedcapInvalidHeightInCm,
RedcapInvalidWeightInKg,
RedcapInvalidBmi,
RedcapInvalidUhlSystemNumber,
RedcapInvalidPostCode,
RedcapInvalidEmailAddress,
RedcapFieldMatchesRegularExpression,
RedcapMissingDataWhen,
RedcapMissingData,
)
REDCAP_LEICESTER_PROJECT_ID = 24
REDCAP_DONCASTER_PROJECT_ID = 13
REDCAP_SHEFFIELD_PROJECT_ID = 14
REDCAP_KETTERING_PROJECT_ID = 15
REDCAP_CHESTERFIELD_PROJECT_ID = 16
REDCAP_GRANTHAM_PROJECT_ID = 17
REDCAP_LINCOLN_PROJECT_ID = 18
REDCAP_NORTHAMPTON_PROJECT_ID = 19
REDCAP_DERBY_PROJECT_ID = 25
REDCAP_BOSTON_PROJECT_ID = 26
REDCAP_NOTTINGHAM_PROJECT_ID = 27
# All
class BriccsRedcapPercentageCompleteReport(RedcapPercentageCompleteReport):
def __init__(self):
super().__init__(
study_name='BRICCS',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsRedcapWithdrawnOrExcludedWithDataReport(
RedcapWithdrawnOrExcludedWithDataReport):
def __init__(self):
super().__init__(
study_name='BRICCS',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Leicester
class BriccsLeicesterRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidUhlSystemNumber(
RedcapInvalidUhlSystemNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['s_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLeicesterRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
'urine_sample',
],
regular_expression='^(|BSa\d{8}|BSa00\d{8})$',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLeicesterRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Doncaster
class BriccsDoncasterRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsDoncasterBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDoncasterRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDoncasterRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Sheffield
class BriccsSheffieldRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsSheffieldBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsSheffieldRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsSheffieldRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Kettering
class BriccsKetteringRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsKetteringBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsKetteringRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsKetteringRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Chesterfield
class BriccsChesterfieldRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsChesterfieldBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsChesterfieldRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsChesterfieldRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Grantham
class BriccsGranthamRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsGranthamBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsGranthamRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsGranthamRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Lincoln
class BriccsLincolnRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsLincolnBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLincolnRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLincolnRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Northampton
class BriccsNorthamptonRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsNorthamptonBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNorthamptonRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNorthamptonRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Derby
class BriccsDerbyRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsDerbyBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDerbyRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDerbyRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Boston
class BriccsBostonRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsBostonBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsBostonRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsBostonRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Nottingham
class BriccsNottinghamRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsNottinghamBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNottinghamRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNottinghamRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
) | reporter/uhl_reports/briccs/data_quality/redcap.py |
from reporter.connections import RedcapInstance
from reporter.application_abstract_reports.redcap.percentage_complete import (
RedcapPercentageCompleteReport,
)
from reporter.application_abstract_reports.redcap.withdrawn_or_excluded_with_data import (
RedcapWithdrawnOrExcludedWithDataReport,
)
from reporter.emailing import (
RECIPIENT_BRICCS_ADMIN as RECIPIENT_ADMIN,
RECIPIENT_BRICCS_MANAGER as RECIPIENT_MANAGER,
RECIPIENT_IT_DQ,
)
from reporter.application_abstract_reports.redcap.web_data_quality import (
RedcapWebDataQuality,
)
from reporter.application_abstract_reports.redcap.data_quality import (
RedcapInvalidNhsNumber,
RedcapInvalidDate,
RedcapInvalidBloodPressure,
RedcapInvalidPulse,
RedcapInvalidHeightInCm,
RedcapInvalidWeightInKg,
RedcapInvalidBmi,
RedcapInvalidUhlSystemNumber,
RedcapInvalidPostCode,
RedcapInvalidEmailAddress,
RedcapFieldMatchesRegularExpression,
RedcapMissingDataWhen,
RedcapMissingData,
)
REDCAP_LEICESTER_PROJECT_ID = 24
REDCAP_DONCASTER_PROJECT_ID = 13
REDCAP_SHEFFIELD_PROJECT_ID = 14
REDCAP_KETTERING_PROJECT_ID = 15
REDCAP_CHESTERFIELD_PROJECT_ID = 16
REDCAP_GRANTHAM_PROJECT_ID = 17
REDCAP_LINCOLN_PROJECT_ID = 18
REDCAP_NORTHAMPTON_PROJECT_ID = 19
REDCAP_DERBY_PROJECT_ID = 25
REDCAP_BOSTON_PROJECT_ID = 26
REDCAP_NOTTINGHAM_PROJECT_ID = 27
# All
class BriccsRedcapPercentageCompleteReport(RedcapPercentageCompleteReport):
def __init__(self):
super().__init__(
study_name='BRICCS',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsRedcapWithdrawnOrExcludedWithDataReport(
RedcapWithdrawnOrExcludedWithDataReport):
def __init__(self):
super().__init__(
study_name='BRICCS',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Leicester
class BriccsLeicesterRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidUhlSystemNumber(
RedcapInvalidUhlSystemNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['s_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLeicesterRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
'urine_sample',
],
regular_expression='^(|BSa\d{8}|BSa00\d{8})$',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLeicesterRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLeicesterRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.internal,
project_id=REDCAP_LEICESTER_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Doncaster
class BriccsDoncasterRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsDoncasterBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDoncasterRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDoncasterRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDoncasterRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DONCASTER_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Sheffield
class BriccsSheffieldRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsSheffieldBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsSheffieldRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsSheffieldRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsSheffieldRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_SHEFFIELD_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Kettering
class BriccsKetteringRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsKetteringBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsKetteringRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsKetteringRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsKetteringRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_KETTERING_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Chesterfield
class BriccsChesterfieldRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsChesterfieldBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsChesterfieldRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsChesterfieldRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsChesterfieldRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_CHESTERFIELD_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Grantham
class BriccsGranthamRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsGranthamBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsGranthamRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsGranthamRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsGranthamRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_GRANTHAM_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Lincoln
class BriccsLincolnRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsLincolnBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLincolnRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsLincolnRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsLincolnRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_LINCOLN_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Northampton
class BriccsNorthamptonRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsNorthamptonBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNorthamptonRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNorthamptonRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNorthamptonRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NORTHAMPTON_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Derby
class BriccsDerbyRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsDerbyBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDerbyRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsDerbyRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsDerbyRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_DERBY_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Boston
class BriccsBostonRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsBostonBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsBostonRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsBostonRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsBostonRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_BOSTON_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
# Nottingham
class BriccsNottinghamRedcapWebDataQuality(RedcapWebDataQuality):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
recipients=[RECIPIENT_IT_DQ]
)
class BriccsNottinghamBmiMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_bmi'],
indicator_field='criteria_met',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamHeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_height'],
indicator_field='epi_obs_height_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamWeightMissing(RedcapMissingDataWhen):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_weight'],
indicator_field='epi_obs_weight_yn',
indicator_value='1',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidNhsNumber(RedcapInvalidNhsNumber):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['nhs_number'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidDate(
RedcapInvalidDate):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidStudyNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['record_id'],
regular_expression='BPt\d{8}',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNottinghamRedcapInvalidSampleNumber(
RedcapFieldMatchesRegularExpression):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'blood_tube1',
'blood_tube2',
'blood_tube3',
'blood_tube4',
'blood_tube5',
],
regular_expression='(|BSa\d{8})',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
ignore_case=True,
)
class BriccsNottinghamRedcapInvalidPostCode(
RedcapInvalidPostCode):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['address_postcode'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidEmailAddress(
RedcapInvalidEmailAddress):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'pat_email1',
'pat_email2',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure1(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp1_sys',
diastolic_field_name='part_bp_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure2(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp2_sys',
diastolic_field_name='part_bp2_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressure3(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_bp3_sys',
diastolic_field_name='part_bp3_dias',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBloodPressureAvg(
RedcapInvalidBloodPressure):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
systolic_field_name='part_avg_sys_bp',
diastolic_field_name='part_avg_dias_bp',
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidPulse(
RedcapInvalidPulse):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=[
'part_pulse1',
'part_pulse2',
'part_pulse3',
'avg_pulse',
],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidHeightInCm(
RedcapInvalidHeightInCm):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_height'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidWeightInKg(
RedcapInvalidWeightInKg):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_weight'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
)
class BriccsNottinghamRedcapInvalidBmi(
RedcapInvalidBmi):
def __init__(self):
super().__init__(
redcap_instance=RedcapInstance.external,
project_id=REDCAP_NOTTINGHAM_PROJECT_ID,
fields=['part_bmi'],
recipients=[RECIPIENT_ADMIN, RECIPIENT_MANAGER],
) | 0.513668 | 0.069321 |
import os
import logging
import sqlalchemy
from findd.services import Findd
from findd.utils.path import parents
__LOG__ = logging.getLogger(__name__)
DEBUG_MORE = 5
class ContextException(Exception):
pass
class Context(object):
def __init__(self, base_dir, findd_dir, db_url):
assert base_dir is not None
assert findd_dir is not None
assert db_url is not None
assert os.path.exists(base_dir)
self.base_dir = base_dir
self.findd_dir = findd_dir
self.db_url = db_url
def assert_findd_dir_does_not_exists(self):
if os.path.exists(self.findd_dir):
raise ContextException('.findd directory already exists')
def assert_findd_dir_exists(self):
if not os.path.exists(self.findd_dir):
raise ContextException('.findd directory not found')
def db_engine(self):
echo = __LOG__.isEnabledFor(DEBUG_MORE)
return sqlalchemy.create_engine(self.db_url, echo=echo)
def db_sessionmaker(self):
return sqlalchemy.orm.sessionmaker(bind=self.db_engine())
def db_session(self):
return self.db_sessionmaker()()
def findd(self):
return Findd(base_dir=self.base_dir, db_session=self.db_session())
@property
def is_excluded(self):
return lambda path: os.path.basename(path) == '.findd'
def search_base_dir(start_dir=None):
if start_dir is None:
start_dir = os.getcwd()
for path in parents(start_dir):
if os.path.exists(os.path.join(path, '.findd')):
return path
return os.getcwd()
def create_from_base_dir(base_dir=None):
if base_dir is None:
base_dir = search_base_dir()
findd_dir = os.path.join(base_dir, '.findd')
db_path = os.path.join(findd_dir, 'findd.sqlite3')
db_url = 'sqlite:///%s' % db_path
return Context(
base_dir=base_dir,
findd_dir=findd_dir,
db_url=db_url,
)
def configure_logging(verbosity):
levels = [
logging.CRITICAL,
logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG,
DEBUG_MORE,
]
logging.basicConfig(
format='%(levelname)s: %(message)s',
level=levels[2 + max(min(verbosity, 3), -2)]
)
logging.addLevelName(DEBUG_MORE, 'DEBUG') | findd/cli/context.py | import os
import logging
import sqlalchemy
from findd.services import Findd
from findd.utils.path import parents
__LOG__ = logging.getLogger(__name__)
DEBUG_MORE = 5
class ContextException(Exception):
pass
class Context(object):
def __init__(self, base_dir, findd_dir, db_url):
assert base_dir is not None
assert findd_dir is not None
assert db_url is not None
assert os.path.exists(base_dir)
self.base_dir = base_dir
self.findd_dir = findd_dir
self.db_url = db_url
def assert_findd_dir_does_not_exists(self):
if os.path.exists(self.findd_dir):
raise ContextException('.findd directory already exists')
def assert_findd_dir_exists(self):
if not os.path.exists(self.findd_dir):
raise ContextException('.findd directory not found')
def db_engine(self):
echo = __LOG__.isEnabledFor(DEBUG_MORE)
return sqlalchemy.create_engine(self.db_url, echo=echo)
def db_sessionmaker(self):
return sqlalchemy.orm.sessionmaker(bind=self.db_engine())
def db_session(self):
return self.db_sessionmaker()()
def findd(self):
return Findd(base_dir=self.base_dir, db_session=self.db_session())
@property
def is_excluded(self):
return lambda path: os.path.basename(path) == '.findd'
def search_base_dir(start_dir=None):
if start_dir is None:
start_dir = os.getcwd()
for path in parents(start_dir):
if os.path.exists(os.path.join(path, '.findd')):
return path
return os.getcwd()
def create_from_base_dir(base_dir=None):
if base_dir is None:
base_dir = search_base_dir()
findd_dir = os.path.join(base_dir, '.findd')
db_path = os.path.join(findd_dir, 'findd.sqlite3')
db_url = 'sqlite:///%s' % db_path
return Context(
base_dir=base_dir,
findd_dir=findd_dir,
db_url=db_url,
)
def configure_logging(verbosity):
levels = [
logging.CRITICAL,
logging.ERROR,
logging.WARNING,
logging.INFO,
logging.DEBUG,
DEBUG_MORE,
]
logging.basicConfig(
format='%(levelname)s: %(message)s',
level=levels[2 + max(min(verbosity, 3), -2)]
)
logging.addLevelName(DEBUG_MORE, 'DEBUG') | 0.373076 | 0.084493 |
FILE = 'dictionary.txt'
word_list = []
found_word_list = []
def main():
"""
Input: 4 rows of 4 letters
Output: all the words found within the given 4 x 4 letter matrix under the rule of the game 'Boggle'
"""
global word_list
global found_word_list
word_list = read_dictionary()
letter_list = []
for i in range(4):
row = str(input(f'{i+1} row of letters: '))
if len(row) == 7:
new_row = row.lower()
split_new_row = new_row.split()
letter_list.append(split_new_row)
else:
print('Illegal input')
break
if len(letter_list) == 4:
current = []
for i in range(4):
for j in range(4):
current.append([i, j])
search_machine(letter_list, current)
current.pop()
print(f'There are {len(found_word_list)} words in total.')
def search_machine(letter_list, current):
"""
:param letter_list: list, containing 4 sub-lists (each sub-list contains 4 letters)
:param current: list, containing an i-j pair (stored as a list unit) that stands for a letter
:return: this function returns nothing
"""
prev_index = current[len(current) - 1]
prev_i = prev_index[0]
prev_j = prev_index[1]
for x in range(-1, 2, 1):
for y in range(-1, 2, 1):
next_index_i = prev_i + x
next_index_j = prev_j + y
if 0 <= next_index_i < 4:
if 0 <= next_index_j < 4:
if [next_index_i, next_index_j] not in current:
current.append([next_index_i, next_index_j])
str_current = ''
for ele in current:
ch = letter_list[ele[0]][ele[1]]
str_current += ch
# Choose
if len(current) >= 4:
if str_current in word_list:
if str_current not in found_word_list:
print(f'Found: \"{str_current}\"')
found_word_list.append(str_current)
if has_prefix(str_current):
search_machine(letter_list, current)
# Explore
current.pop()
# Un-choose
def read_dictionary():
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
with open(FILE, 'r') as f:
for line in f:
word = line.strip()
word_list.append(word)
return word_list
def has_prefix(sub_s):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
for word in word_list:
if word.startswith(sub_s):
return True
return False
if __name__ == '__main__':
main() | stanCode_projects/boggle_game_solver/boggle.py | FILE = 'dictionary.txt'
word_list = []
found_word_list = []
def main():
"""
Input: 4 rows of 4 letters
Output: all the words found within the given 4 x 4 letter matrix under the rule of the game 'Boggle'
"""
global word_list
global found_word_list
word_list = read_dictionary()
letter_list = []
for i in range(4):
row = str(input(f'{i+1} row of letters: '))
if len(row) == 7:
new_row = row.lower()
split_new_row = new_row.split()
letter_list.append(split_new_row)
else:
print('Illegal input')
break
if len(letter_list) == 4:
current = []
for i in range(4):
for j in range(4):
current.append([i, j])
search_machine(letter_list, current)
current.pop()
print(f'There are {len(found_word_list)} words in total.')
def search_machine(letter_list, current):
"""
:param letter_list: list, containing 4 sub-lists (each sub-list contains 4 letters)
:param current: list, containing an i-j pair (stored as a list unit) that stands for a letter
:return: this function returns nothing
"""
prev_index = current[len(current) - 1]
prev_i = prev_index[0]
prev_j = prev_index[1]
for x in range(-1, 2, 1):
for y in range(-1, 2, 1):
next_index_i = prev_i + x
next_index_j = prev_j + y
if 0 <= next_index_i < 4:
if 0 <= next_index_j < 4:
if [next_index_i, next_index_j] not in current:
current.append([next_index_i, next_index_j])
str_current = ''
for ele in current:
ch = letter_list[ele[0]][ele[1]]
str_current += ch
# Choose
if len(current) >= 4:
if str_current in word_list:
if str_current not in found_word_list:
print(f'Found: \"{str_current}\"')
found_word_list.append(str_current)
if has_prefix(str_current):
search_machine(letter_list, current)
# Explore
current.pop()
# Un-choose
def read_dictionary():
"""
This function reads file "dictionary.txt" stored in FILE
and appends words in each line into a Python list
"""
with open(FILE, 'r') as f:
for line in f:
word = line.strip()
word_list.append(word)
return word_list
def has_prefix(sub_s):
"""
:param sub_s: (str) A substring that is constructed by neighboring letters on a 4x4 square grid
:return: (bool) If there is any words with prefix stored in sub_s
"""
for word in word_list:
if word.startswith(sub_s):
return True
return False
if __name__ == '__main__':
main() | 0.332852 | 0.307488 |
# This notebook was prepared by [<NAME>](https://github.com/donnemartin). Source and license info is on [GitHub](https://github.com/donnemartin/interactive-coding-challenges).
# # Solution Notebook
# ## Problem: Implement Fizz Buzz.
#
# * [Constraints](#Constraints)
# * [Test Cases](#Test-Cases)
# * [Algorithm](#Algorithm)
# * [Code](#Code)
# * [Unit Test](#Unit-Test)
# ## Constraints
#
# * What is fizz buzz?
# * Return the string representation of numbers from 1 to n
# * Multiples of 3 -> 'Fizz'
# * Multiples of 5 -> 'Buzz'
# * Multiples of 3 and 5 -> 'FizzBuzz'
# * Can we assume the inputs are valid?
# * No
# * Can we assume this fits memory?
# * Yes
# ## Test Cases
#
# <pre>
# * None -> Exception
# * < 1 -> Exception
# * 15 ->
# [
# '1',
# '2',
# 'Fizz',
# '4',
# 'Buzz',
# 'Fizz',
# '7',
# '8',
# 'Fizz',
# 'Buzz',
# '11',
# 'Fizz',
# '13',
# '14',
# 'FizzBuzz'
# ]
# </pre>
# ## Algorithm
#
# There is no fancy algorithm to solve fizz buzz.
#
# * Iterate from 1 through n
# * Use the mod operator to determine if the current iteration is divisible by:
# * 3 and 5 -> 'FizzBuzz'
# * 3 -> 'Fizz'
# * 5 -> 'Buzz'
# * else -> string of current iteration
# * return the results
#
# Complexity:
# * Time: O(n)
# * Space: O(n)
# ## Code
# In[1]:
class Solution(object):
def fizz_buzz(self, num):
if num is None:
raise TypeError('num cannot be None')
if num < 1:
raise ValueError('num cannot be less than one')
results = []
for i in range(1, num + 1):
if i % 3 == 0 and i % 5 == 0:
results.append('FizzBuzz')
elif i % 3 == 0:
results.append('Fizz')
elif i % 5 == 0:
results.append('Buzz')
else:
results.append(str(i))
return results
# ## Unit Test
# In[2]:
get_ipython().run_cell_magic('writefile', 'test_fizz_buzz.py', "import unittest\n\n\nclass TestFizzBuzz(unittest.TestCase):\n\n def test_fizz_buzz(self):\n solution = Solution()\n self.assertRaises(TypeError, solution.fizz_buzz, None)\n self.assertRaises(ValueError, solution.fizz_buzz, 0)\n expected = [\n '1',\n '2',\n 'Fizz',\n '4',\n 'Buzz',\n 'Fizz',\n '7',\n '8',\n 'Fizz',\n 'Buzz',\n '11',\n 'Fizz',\n '13',\n '14',\n 'FizzBuzz'\n ]\n self.assertEqual(solution.fizz_buzz(15), expected)\n print('Success: test_fizz_buzz')\n\n\ndef main():\n test = TestFizzBuzz()\n test.test_fizz_buzz()\n\n\nif __name__ == '__main__':\n main()")
# In[3]:
get_ipython().run_line_magic('run', '-i test_fizz_buzz.py')
# In[ ]: | arrays_strings/fizz_buzz/fizz_buzz_solution.py |
# This notebook was prepared by [<NAME>](https://github.com/donnemartin). Source and license info is on [GitHub](https://github.com/donnemartin/interactive-coding-challenges).
# # Solution Notebook
# ## Problem: Implement Fizz Buzz.
#
# * [Constraints](#Constraints)
# * [Test Cases](#Test-Cases)
# * [Algorithm](#Algorithm)
# * [Code](#Code)
# * [Unit Test](#Unit-Test)
# ## Constraints
#
# * What is fizz buzz?
# * Return the string representation of numbers from 1 to n
# * Multiples of 3 -> 'Fizz'
# * Multiples of 5 -> 'Buzz'
# * Multiples of 3 and 5 -> 'FizzBuzz'
# * Can we assume the inputs are valid?
# * No
# * Can we assume this fits memory?
# * Yes
# ## Test Cases
#
# <pre>
# * None -> Exception
# * < 1 -> Exception
# * 15 ->
# [
# '1',
# '2',
# 'Fizz',
# '4',
# 'Buzz',
# 'Fizz',
# '7',
# '8',
# 'Fizz',
# 'Buzz',
# '11',
# 'Fizz',
# '13',
# '14',
# 'FizzBuzz'
# ]
# </pre>
# ## Algorithm
#
# There is no fancy algorithm to solve fizz buzz.
#
# * Iterate from 1 through n
# * Use the mod operator to determine if the current iteration is divisible by:
# * 3 and 5 -> 'FizzBuzz'
# * 3 -> 'Fizz'
# * 5 -> 'Buzz'
# * else -> string of current iteration
# * return the results
#
# Complexity:
# * Time: O(n)
# * Space: O(n)
# ## Code
# In[1]:
class Solution(object):
def fizz_buzz(self, num):
if num is None:
raise TypeError('num cannot be None')
if num < 1:
raise ValueError('num cannot be less than one')
results = []
for i in range(1, num + 1):
if i % 3 == 0 and i % 5 == 0:
results.append('FizzBuzz')
elif i % 3 == 0:
results.append('Fizz')
elif i % 5 == 0:
results.append('Buzz')
else:
results.append(str(i))
return results
# ## Unit Test
# In[2]:
get_ipython().run_cell_magic('writefile', 'test_fizz_buzz.py', "import unittest\n\n\nclass TestFizzBuzz(unittest.TestCase):\n\n def test_fizz_buzz(self):\n solution = Solution()\n self.assertRaises(TypeError, solution.fizz_buzz, None)\n self.assertRaises(ValueError, solution.fizz_buzz, 0)\n expected = [\n '1',\n '2',\n 'Fizz',\n '4',\n 'Buzz',\n 'Fizz',\n '7',\n '8',\n 'Fizz',\n 'Buzz',\n '11',\n 'Fizz',\n '13',\n '14',\n 'FizzBuzz'\n ]\n self.assertEqual(solution.fizz_buzz(15), expected)\n print('Success: test_fizz_buzz')\n\n\ndef main():\n test = TestFizzBuzz()\n test.test_fizz_buzz()\n\n\nif __name__ == '__main__':\n main()")
# In[3]:
get_ipython().run_line_magic('run', '-i test_fizz_buzz.py')
# In[ ]: | 0.780453 | 0.586049 |
import json
import logging
import time
from . import utils
from . import listener
logger = logging.getLogger(__name__)
class RequestListener(listener.CommandListener):
def __init__(self,core,config):
self.core = core
self.config = config
def command(self):
return 'request'
def action(self, msg, user):
split = msg[8:].strip().split('-')
if len(split) == 1:
query = {'any': split[0].strip().split(' ')}
else:
query = {'track_name': split[0].strip().split(' '),
'artist': split[1].strip().split(' ')}
logger.info(query)
results = self.core.library.search(query).get()
logger.info(str(results))
#source = self.find_best_source(results)
source = results[0]
logger.info('{} results matching query {} and uri {}'.format(len(source.tracks), query, source.uri))
if len(source.tracks) <= 0:
return 'Nothing match your query :('
else:
next_track = source.tracks[0]
current_track_position = self.core.tracklist.index().get()
current_track_position = -1 if current_track_position is None else current_track_position
logger.info('current position {}'.format(current_track_position))
self.core.tracklist.add(tracks=[next_track],
at_position=current_track_position + 1)
return 'Coming next {}'.format(utils.title_dash_artist(next_track))
def usage(self):
return 'request song_name [- artist_name] - Request a new song to be played'
def find_best_source(self, sources):
sources_by_uri = {}
for source in sources:
sources_by_uri[source.uri] = source
sources_order = self.config['backend_priority'].split(',')
for order in sources_order:
if len(sources_by_uri[order + ':search'].tracks) > 0:
return sources_by_uri[order + ':search']
return None | mopidy_slack/request.py | import json
import logging
import time
from . import utils
from . import listener
logger = logging.getLogger(__name__)
class RequestListener(listener.CommandListener):
def __init__(self,core,config):
self.core = core
self.config = config
def command(self):
return 'request'
def action(self, msg, user):
split = msg[8:].strip().split('-')
if len(split) == 1:
query = {'any': split[0].strip().split(' ')}
else:
query = {'track_name': split[0].strip().split(' '),
'artist': split[1].strip().split(' ')}
logger.info(query)
results = self.core.library.search(query).get()
logger.info(str(results))
#source = self.find_best_source(results)
source = results[0]
logger.info('{} results matching query {} and uri {}'.format(len(source.tracks), query, source.uri))
if len(source.tracks) <= 0:
return 'Nothing match your query :('
else:
next_track = source.tracks[0]
current_track_position = self.core.tracklist.index().get()
current_track_position = -1 if current_track_position is None else current_track_position
logger.info('current position {}'.format(current_track_position))
self.core.tracklist.add(tracks=[next_track],
at_position=current_track_position + 1)
return 'Coming next {}'.format(utils.title_dash_artist(next_track))
def usage(self):
return 'request song_name [- artist_name] - Request a new song to be played'
def find_best_source(self, sources):
sources_by_uri = {}
for source in sources:
sources_by_uri[source.uri] = source
sources_order = self.config['backend_priority'].split(',')
for order in sources_order:
if len(sources_by_uri[order + ':search'].tracks) > 0:
return sources_by_uri[order + ':search']
return None | 0.20001 | 0.075961 |
from pudzu.charts import *
import seaborn as sns
from pudzu.dates import *
import dateparser
CONFEDERATE = ["South Carolina", "Mississippi", "Florida", "Alabama", "Georgia", "Louisiana", "Texas"] + ["Arkansas", "North Carolina", "Tennessee", "Virginia"]
UNION = ["California", "Connecticut", "Illinois", "Indiana", "Iowa", "Kansas", "Maine", "Massachusetts", "Michigan", "Minnesota", "Nevada", "New Hampshire", "New Jersey", "New York", "Ohio", "Oregon", "Pennsylvania", "Rhode Island", "Vermont", "Dist. of Col.", "Wisconsin"]
BORDER = ["Delaware", "Kentucky", "Maryland", "Missouri", "West Virginia"]
PALETTE = tmap(RGBA, sns.color_palette())
DCOL = PALETTE[0]
RCOL = PALETTE[2]
EVENTBG = "#555555"
FONT_SIZE = 12
EVENTS = [
("Civil War", "April 12, 1861", "May 9, 1865"),
("Reconstruction", "May 10, 1865", "March 31, 1877"),
("Redemption", "April 1, 1877", "January 1, 1910"),
("<NAME>", "January 2, 1910", "July 25, 1948"),
("Civil Rights", "July 26, 1948", "December 31, 1967"),
("“Southern Strategy”", "January 1, 1968", "November 7, 2017")
]
# Generate data
try:
votes = pd.read_csv("cache/politics_usnorthsouth.csv").set_index("year")
except OSError:
votes = pd.read_csv("datasets/uselections_ucsb.csv", dtype={"rep_col": str, "dem_col": str}).split_columns(("rep_col", "dem_col"), "|").set_index("year")
def get_votes(df, state, year, republican):
cols = list(map(int,make_sequence(votes["rep_col" if republican else "dem_col"][year])))
if state not in df.index: state = state + "*"
if state not in df.index: return 0
return df.loc[state][cols].apply(ignoring_exceptions(int, 0)).sum()
records = []
for y in range(1860,2017,4):
if y == 1864:
votes.set_value(y, "leaning", 0)
else:
tclass = "elections_states" if y != 1976 else "ver11"
dfs = pd.read_html("http://www.presidency.ucsb.edu/showelection.php?year={}".format(y), "Alabama", attrs={"class": tclass})
df = dfs[0].set_index(0)
union_rep = sum(get_votes(df, s, y, True) for s in UNION)
union_dem = sum(get_votes(df, s, y, False) for s in UNION)
conf_rep = sum(get_votes(df, s, y, True) for s in CONFEDERATE)
conf_dem = sum(get_votes(df, s, y, False) for s in CONFEDERATE)
ratio = (union_rep/union_dem)/(conf_rep/conf_dem)
leaning = (ratio - 1) if ratio < 1 else 1 - (1 / ratio)
votes.set_value(y, "leaning", leaning)
print("{},{},{},{},{},{}".format(y, union_rep, union_dem,conf_rep, conf_dem, leaning))
votes.to_csv("cache/politics_usnorthsouth.csv")
# Bar chart
def color_fn(c, r, v):
return RCOL if v < 0 else DCOL
def ylabel_fn(v):
if v == 0: return "same"
party = "Rep" if v < 0 else "Dem"
factor = "∞" if abs(v) == 1 else "{0:.2g}".format(1 / (1 - abs(v)))
return "×{} {}".format(factor, party)
def clabel_fn(c, r):
if votes.index[r] == 1864:
img = Image.from_column([
Image.from_text("{}".format(votes.index[r]), arial(FONT_SIZE, bold=True), bg="white", padding=(1,1)),
Image.from_text("Civil War", arial(FONT_SIZE, bold=True), "grey", bg="white", padding=(1,1))
], bg=0)
else:
v = votes.iloc[r]
rep = v["rep_can"] in v["president"]
img = Image.from_column([
Image.from_text("{}".format(votes.index[r]), arial(FONT_SIZE, bold=True), bg="white", padding=(1,1)),
Image.from_text(votes.iloc[r]["rep_can"], arial(FONT_SIZE, bold=rep), RCOL, bg="white", padding=((int(len(v["rep_can"]) < 8), 1))),
Image.from_text(votes.iloc[r]["dem_can"], arial(FONT_SIZE, bold=not rep), DCOL, bg="white", padding=((int(len(v["dem_can"]) < 8), 1)))
], bg=0)
return img.pad((0, 0, 0, 2), 0)
ylabel = Image.from_column([
Image.from_text("political leaning of the South versus the North", arial(24), padding=(5,2,5,5), bg="white"),
Image.from_text("based on the ratio of Republican to Democrat votes in the South divided by the ratio in the North", arial(14), padding=(5,2,5,10), bg="white")], bg="white").transpose(Image.ROTATE_90)
title = Image.from_column([
Image.from_text("From ‘Solid South’ to Republican heartland".upper(), arial(60, bold=True), bg="white")
, Image.from_text("the political transformation of the U.S. South in presidential elections".upper(), arial(36), bg="white")
], bg="white", padding=(0, 5)).pad((0,0,0,10), "white")
img = bar_chart(votes[["leaning"]], 62, 1000, spacing=2, colors=color_fn, clabels={ BarChartLabelPosition.OUTSIDE : clabel_fn},
ymin=-1, ymax=1, grid_interval=0.125, label_font=arial(FONT_SIZE), ylabels=ylabel_fn, rlabels=None, ylabel=ylabel, title=title)
# Add time chart
events = pd.DataFrame([{ "group": "event", "name": n, "start": dateparser.parse(s).date(), "end": dateparser.parse(e).date() } for n,s,e in EVENTS])
chronology = time_chart(2620, 50, events, "start", "end", lambda _: EVENTBG, interval_label_key=lambda d: Image.from_text(d['name'], arial(16), fg="white", bg=EVENTBG), bg="white")
img = img.place(chronology, (0, 1), padding=(150,100))
# Add state map
UNION = ["District of Columbia" if x == "Dist. of Col." else x for x in UNION]
def state_color_fn(c):
if c == "Borders": return "white"
elif c in UNION: return DCOL
elif c in CONFEDERATE: return RCOL
elif c in BORDER: return "grey"
else: return "#BBBBBB"
map = map_chart("maps/USA.png", state_color_fn)
map_thumb = Image.from_column([
Image.from_text("‘South’ refers to ex-Confederate states\n‘North’ refers to ex-Union non-border states", arial(60, bold=True), line_spacing=5),
map
], bg="white").resize_fixed_aspect(width=500)
img = img.place(map_thumb, (1, 0), padding=(80, 200))
# Save
img = img.pad((10,0), "white")
img.place(Image.from_text("/u/Udzu", font("arial", 16), fg="black", bg="white", padding=5).pad((1,1,0,0), "black"), align=1, padding=10, copy=False)
img.save("output/politics_usnorthsouth.png") | dataviz/politics_usnorthsouth.py | from pudzu.charts import *
import seaborn as sns
from pudzu.dates import *
import dateparser
CONFEDERATE = ["South Carolina", "Mississippi", "Florida", "Alabama", "Georgia", "Louisiana", "Texas"] + ["Arkansas", "North Carolina", "Tennessee", "Virginia"]
UNION = ["California", "Connecticut", "Illinois", "Indiana", "Iowa", "Kansas", "Maine", "Massachusetts", "Michigan", "Minnesota", "Nevada", "New Hampshire", "New Jersey", "New York", "Ohio", "Oregon", "Pennsylvania", "Rhode Island", "Vermont", "Dist. of Col.", "Wisconsin"]
BORDER = ["Delaware", "Kentucky", "Maryland", "Missouri", "West Virginia"]
PALETTE = tmap(RGBA, sns.color_palette())
DCOL = PALETTE[0]
RCOL = PALETTE[2]
EVENTBG = "#555555"
FONT_SIZE = 12
EVENTS = [
("Civil War", "April 12, 1861", "May 9, 1865"),
("Reconstruction", "May 10, 1865", "March 31, 1877"),
("Redemption", "April 1, 1877", "January 1, 1910"),
("<NAME>", "January 2, 1910", "July 25, 1948"),
("Civil Rights", "July 26, 1948", "December 31, 1967"),
("“Southern Strategy”", "January 1, 1968", "November 7, 2017")
]
# Generate data
try:
votes = pd.read_csv("cache/politics_usnorthsouth.csv").set_index("year")
except OSError:
votes = pd.read_csv("datasets/uselections_ucsb.csv", dtype={"rep_col": str, "dem_col": str}).split_columns(("rep_col", "dem_col"), "|").set_index("year")
def get_votes(df, state, year, republican):
cols = list(map(int,make_sequence(votes["rep_col" if republican else "dem_col"][year])))
if state not in df.index: state = state + "*"
if state not in df.index: return 0
return df.loc[state][cols].apply(ignoring_exceptions(int, 0)).sum()
records = []
for y in range(1860,2017,4):
if y == 1864:
votes.set_value(y, "leaning", 0)
else:
tclass = "elections_states" if y != 1976 else "ver11"
dfs = pd.read_html("http://www.presidency.ucsb.edu/showelection.php?year={}".format(y), "Alabama", attrs={"class": tclass})
df = dfs[0].set_index(0)
union_rep = sum(get_votes(df, s, y, True) for s in UNION)
union_dem = sum(get_votes(df, s, y, False) for s in UNION)
conf_rep = sum(get_votes(df, s, y, True) for s in CONFEDERATE)
conf_dem = sum(get_votes(df, s, y, False) for s in CONFEDERATE)
ratio = (union_rep/union_dem)/(conf_rep/conf_dem)
leaning = (ratio - 1) if ratio < 1 else 1 - (1 / ratio)
votes.set_value(y, "leaning", leaning)
print("{},{},{},{},{},{}".format(y, union_rep, union_dem,conf_rep, conf_dem, leaning))
votes.to_csv("cache/politics_usnorthsouth.csv")
# Bar chart
def color_fn(c, r, v):
return RCOL if v < 0 else DCOL
def ylabel_fn(v):
if v == 0: return "same"
party = "Rep" if v < 0 else "Dem"
factor = "∞" if abs(v) == 1 else "{0:.2g}".format(1 / (1 - abs(v)))
return "×{} {}".format(factor, party)
def clabel_fn(c, r):
if votes.index[r] == 1864:
img = Image.from_column([
Image.from_text("{}".format(votes.index[r]), arial(FONT_SIZE, bold=True), bg="white", padding=(1,1)),
Image.from_text("Civil War", arial(FONT_SIZE, bold=True), "grey", bg="white", padding=(1,1))
], bg=0)
else:
v = votes.iloc[r]
rep = v["rep_can"] in v["president"]
img = Image.from_column([
Image.from_text("{}".format(votes.index[r]), arial(FONT_SIZE, bold=True), bg="white", padding=(1,1)),
Image.from_text(votes.iloc[r]["rep_can"], arial(FONT_SIZE, bold=rep), RCOL, bg="white", padding=((int(len(v["rep_can"]) < 8), 1))),
Image.from_text(votes.iloc[r]["dem_can"], arial(FONT_SIZE, bold=not rep), DCOL, bg="white", padding=((int(len(v["dem_can"]) < 8), 1)))
], bg=0)
return img.pad((0, 0, 0, 2), 0)
ylabel = Image.from_column([
Image.from_text("political leaning of the South versus the North", arial(24), padding=(5,2,5,5), bg="white"),
Image.from_text("based on the ratio of Republican to Democrat votes in the South divided by the ratio in the North", arial(14), padding=(5,2,5,10), bg="white")], bg="white").transpose(Image.ROTATE_90)
title = Image.from_column([
Image.from_text("From ‘Solid South’ to Republican heartland".upper(), arial(60, bold=True), bg="white")
, Image.from_text("the political transformation of the U.S. South in presidential elections".upper(), arial(36), bg="white")
], bg="white", padding=(0, 5)).pad((0,0,0,10), "white")
img = bar_chart(votes[["leaning"]], 62, 1000, spacing=2, colors=color_fn, clabels={ BarChartLabelPosition.OUTSIDE : clabel_fn},
ymin=-1, ymax=1, grid_interval=0.125, label_font=arial(FONT_SIZE), ylabels=ylabel_fn, rlabels=None, ylabel=ylabel, title=title)
# Add time chart
events = pd.DataFrame([{ "group": "event", "name": n, "start": dateparser.parse(s).date(), "end": dateparser.parse(e).date() } for n,s,e in EVENTS])
chronology = time_chart(2620, 50, events, "start", "end", lambda _: EVENTBG, interval_label_key=lambda d: Image.from_text(d['name'], arial(16), fg="white", bg=EVENTBG), bg="white")
img = img.place(chronology, (0, 1), padding=(150,100))
# Add state map
UNION = ["District of Columbia" if x == "Dist. of Col." else x for x in UNION]
def state_color_fn(c):
if c == "Borders": return "white"
elif c in UNION: return DCOL
elif c in CONFEDERATE: return RCOL
elif c in BORDER: return "grey"
else: return "#BBBBBB"
map = map_chart("maps/USA.png", state_color_fn)
map_thumb = Image.from_column([
Image.from_text("‘South’ refers to ex-Confederate states\n‘North’ refers to ex-Union non-border states", arial(60, bold=True), line_spacing=5),
map
], bg="white").resize_fixed_aspect(width=500)
img = img.place(map_thumb, (1, 0), padding=(80, 200))
# Save
img = img.pad((10,0), "white")
img.place(Image.from_text("/u/Udzu", font("arial", 16), fg="black", bg="white", padding=5).pad((1,1,0,0), "black"), align=1, padding=10, copy=False)
img.save("output/politics_usnorthsouth.png") | 0.341253 | 0.235185 |
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: arubaoss_aaa_authentication
short_description: implements rest api for AAA Authentication configuration
version_added: "2.4"
description:
- "This implements rest apis which can be used to configure authentication"
options:
command:
description: Function name calls according to configuration required
choices: config_authentication, config_authentication_console, config_authentication_ssh
required: False
is_privilege_mode_enabled:
description: To enable/disable privilaged mode
required: False
primary_method:
description: The primary authentication method
choices: PAM_LOCAL, PAM_TACACS
required: False
secondary_method
description: The secondary authentication method
choices: SAM_NONE, SAM_LOCAL
required: False
author:
- <NAME> (@hpe)
'''
EXAMPLES = '''
- name: Updates the given console authentication configuration to the system
arubaoss_aaa_authentication:
primary_method: "PAM_TACACS"
secondary_method: "SAM_LOCAL"
command: config_authentication_console
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.arubaoss.arubaoss import run_commands
from ansible.module_utils.network.arubaoss.arubaoss import arubaoss_argument_spec
"""
-------
Name: config_authentication
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication(module):
params = module.params
url = "/authentication"
if params['config'] == "create":
data = {'is_privilege_mode_enabled': True}
else:
data = {'is_privilege_mode_enabled': False}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: config_authentication_console
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication_console(module):
params = module.params
url = "/authentication/console"
data = {}
data['auth_console_login'] = {'primary_method': params['primary_method'], 'secondary_method': params['secondary_method']}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: config_authentication_ssh
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication_ssh(module):
params = module.params
url = "/authentication/ssh"
data = {}
data['auth_ssh_login'] = {'primary_method': params['primary_method'], 'secondary_method': params['secondary_method']}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: run_module()
The main module invoked
Returns
Configure the switch with params sent
-------
"""
def run_module():
module_args = dict(
command=dict(type='str', required=False,default='config_authentication',
choices=['config_authentication','config_authentication_console','config_authentication_ssh']),
config=dict(type='str', required=False, default='create', choices=["create","delete"]),
is_privilege_mode_enabled=dict(type='bool', required=False, default=False),
primary_method=dict(type='str', required=False, default="PAM_LOCAL",
choices=["PAM_LOCAL", "PAM_TACACS"]),
secondary_method=dict(type='str', required=False, default="SAM_NONE",
choices=["SAM_NONE", "SAM_LOCAL"]),
)
module_args.update(arubaoss_argument_spec)
result = dict(changed=False,warnings='Not Supported')
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
if module.check_mode:
module.exit_json(**result)
try:
if module.params['command'] == "config_authentication":
result = config_authentication(module)
elif module.params['command'] == "config_authentication_console":
result = config_authentication_console(module)
else:
result = config_authentication_ssh(module)
except Exception as err:
return module.fail_json(msg=err)
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main() | aruba_module_installer/library/modules/network/arubaoss/arubaoss_aaa_authentication.py |
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: arubaoss_aaa_authentication
short_description: implements rest api for AAA Authentication configuration
version_added: "2.4"
description:
- "This implements rest apis which can be used to configure authentication"
options:
command:
description: Function name calls according to configuration required
choices: config_authentication, config_authentication_console, config_authentication_ssh
required: False
is_privilege_mode_enabled:
description: To enable/disable privilaged mode
required: False
primary_method:
description: The primary authentication method
choices: PAM_LOCAL, PAM_TACACS
required: False
secondary_method
description: The secondary authentication method
choices: SAM_NONE, SAM_LOCAL
required: False
author:
- <NAME> (@hpe)
'''
EXAMPLES = '''
- name: Updates the given console authentication configuration to the system
arubaoss_aaa_authentication:
primary_method: "PAM_TACACS"
secondary_method: "SAM_LOCAL"
command: config_authentication_console
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.arubaoss.arubaoss import run_commands
from ansible.module_utils.network.arubaoss.arubaoss import arubaoss_argument_spec
"""
-------
Name: config_authentication
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication(module):
params = module.params
url = "/authentication"
if params['config'] == "create":
data = {'is_privilege_mode_enabled': True}
else:
data = {'is_privilege_mode_enabled': False}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: config_authentication_console
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication_console(module):
params = module.params
url = "/authentication/console"
data = {}
data['auth_console_login'] = {'primary_method': params['primary_method'], 'secondary_method': params['secondary_method']}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: config_authentication_ssh
Configures port with authentication config
param request: module
Returns
Configure the switch with params sent
-------
"""
def config_authentication_ssh(module):
params = module.params
url = "/authentication/ssh"
data = {}
data['auth_ssh_login'] = {'primary_method': params['primary_method'], 'secondary_method': params['secondary_method']}
method = 'PUT'
result = run_commands(module, url, data, method, check=url)
return result
"""
-------
Name: run_module()
The main module invoked
Returns
Configure the switch with params sent
-------
"""
def run_module():
module_args = dict(
command=dict(type='str', required=False,default='config_authentication',
choices=['config_authentication','config_authentication_console','config_authentication_ssh']),
config=dict(type='str', required=False, default='create', choices=["create","delete"]),
is_privilege_mode_enabled=dict(type='bool', required=False, default=False),
primary_method=dict(type='str', required=False, default="PAM_LOCAL",
choices=["PAM_LOCAL", "PAM_TACACS"]),
secondary_method=dict(type='str', required=False, default="SAM_NONE",
choices=["SAM_NONE", "SAM_LOCAL"]),
)
module_args.update(arubaoss_argument_spec)
result = dict(changed=False,warnings='Not Supported')
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
if module.check_mode:
module.exit_json(**result)
try:
if module.params['command'] == "config_authentication":
result = config_authentication(module)
elif module.params['command'] == "config_authentication_console":
result = config_authentication_console(module)
else:
result = config_authentication_ssh(module)
except Exception as err:
return module.fail_json(msg=err)
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main() | 0.641759 | 0.2918 |
import argparse
import hashlib
import logging
import os
import subprocess
import sys
import urllib.request
from functools import partial
from typing import Union
log_level = logging.INFO
log_format = "%(message)s"
logging.basicConfig(level=log_level, format=log_format)
logger = logging.getLogger(__name__)
tools_dir = os.path.dirname(sys.argv[0])
SCHEMA_FILE = os.path.join(tools_dir, "service-2.json")
SCHEMA_URL = "https://raw.githubusercontent.com/boto/botocore/master/botocore/data/securityhub/2018-10-26/service-2.json"
GENERATE_CLASS = os.path.join(tools_dir, "generate_class.py")
def sha256sum(file: str, block_size: int = 2 ** 16) -> str:
h = hashlib.new("sha256")
with open(file, "rb") as f:
for block in iter(partial(f.read, block_size), b""):
h.update(block)
return h.hexdigest()
def download_file(url: str, dest: str) -> None:
urllib.request.urlretrieve(url=url, filename=dest)
def update_generated_asff_class() -> Union[
subprocess.CompletedProcess, subprocess.CompletedProcess[bytes]
]:
logger.info(f"Running {GENERATE_CLASS}...")
return subprocess.run(GENERATE_CLASS)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--schema", default=SCHEMA_FILE, help="Security Hub schema file"
)
parser.add_argument("--url", default=SCHEMA_URL, help="Security Hub schema URL")
return parser.parse_args()
def main() -> None:
args = parse_args()
schema_file = args.schema
schema_url = args.url
old_checksum = sha256sum(file=schema_file)
try:
download_file(url=schema_url, dest=schema_file)
except Exception as e:
logger.error(f"Error while downloading schema: {e}")
exit(1)
new_checksum = sha256sum(file=schema_file)
logger.info(f"Old checksum: {old_checksum}")
logger.info(f"New checksum: {new_checksum}")
if old_checksum != new_checksum:
update_generated_asff_class()
logger.info("It is a good idea to review the changes")
if __name__ == "__main__":
main() | tools/update_schema.py |
import argparse
import hashlib
import logging
import os
import subprocess
import sys
import urllib.request
from functools import partial
from typing import Union
log_level = logging.INFO
log_format = "%(message)s"
logging.basicConfig(level=log_level, format=log_format)
logger = logging.getLogger(__name__)
tools_dir = os.path.dirname(sys.argv[0])
SCHEMA_FILE = os.path.join(tools_dir, "service-2.json")
SCHEMA_URL = "https://raw.githubusercontent.com/boto/botocore/master/botocore/data/securityhub/2018-10-26/service-2.json"
GENERATE_CLASS = os.path.join(tools_dir, "generate_class.py")
def sha256sum(file: str, block_size: int = 2 ** 16) -> str:
h = hashlib.new("sha256")
with open(file, "rb") as f:
for block in iter(partial(f.read, block_size), b""):
h.update(block)
return h.hexdigest()
def download_file(url: str, dest: str) -> None:
urllib.request.urlretrieve(url=url, filename=dest)
def update_generated_asff_class() -> Union[
subprocess.CompletedProcess, subprocess.CompletedProcess[bytes]
]:
logger.info(f"Running {GENERATE_CLASS}...")
return subprocess.run(GENERATE_CLASS)
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--schema", default=SCHEMA_FILE, help="Security Hub schema file"
)
parser.add_argument("--url", default=SCHEMA_URL, help="Security Hub schema URL")
return parser.parse_args()
def main() -> None:
args = parse_args()
schema_file = args.schema
schema_url = args.url
old_checksum = sha256sum(file=schema_file)
try:
download_file(url=schema_url, dest=schema_file)
except Exception as e:
logger.error(f"Error while downloading schema: {e}")
exit(1)
new_checksum = sha256sum(file=schema_file)
logger.info(f"Old checksum: {old_checksum}")
logger.info(f"New checksum: {new_checksum}")
if old_checksum != new_checksum:
update_generated_asff_class()
logger.info("It is a good idea to review the changes")
if __name__ == "__main__":
main() | 0.427397 | 0.071494 |
from pyminion.expansions.base import smithy, throne_room, village
from pyminion.game import Game
from pyminion.players import Human
def test_throne_room_no_action(human: Human, game: Game):
human.hand.add(throne_room)
human.hand.cards[0].play(human, game)
assert len(human.hand) == 0
assert len(human.playmat) == 1
assert len(human.discard_pile) == 0
assert human.state.actions == 0
assert human.state.money == 0
def test_throne_room_smithy(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(smithy)
assert len(human.hand) == 2
monkeypatch.setattr("builtins.input", lambda _: "Smithy")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 6
assert len(human.discard_pile) == 0
assert human.state.actions == 0
assert human.state.money == 0
def test_throne_room_village(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(village)
assert len(human.hand) == 2
assert human.state.actions == 1
monkeypatch.setattr("builtins.input", lambda _: "Village")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 2
assert human.state.actions == 4
assert human.state.money == 0
def test_two_separate_thrones(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(village)
assert len(human.hand) == 2
assert human.state.actions == 1
monkeypatch.setattr("builtins.input", lambda _: "Village")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 2
assert human.state.actions == 4
human.hand.add(throne_room)
human.hand.add(smithy)
assert len(human.hand) == 4
monkeypatch.setattr("builtins.input", lambda _: "Smithy")
human.play(throne_room, game=game)
assert len(human.playmat) == 4
assert len(human.hand) == 8
assert human.state.actions == 3
assert human.state.money == 0
def test_throne_a_throne(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(throne_room)
human.hand.add(smithy)
human.hand.add(smithy)
assert len(human.hand) == 4
for i in range(20):
human.deck.add(throne_room)
responses = iter(["throne room", "smithy", "smithy"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(target_card=throne_room, game=game)
assert len(human.playmat) == 4
assert len(human.hand) == 12 # +3 cards played 4 times = 12 cards | tests/test_cards/test_actions/test_throne_room.py | from pyminion.expansions.base import smithy, throne_room, village
from pyminion.game import Game
from pyminion.players import Human
def test_throne_room_no_action(human: Human, game: Game):
human.hand.add(throne_room)
human.hand.cards[0].play(human, game)
assert len(human.hand) == 0
assert len(human.playmat) == 1
assert len(human.discard_pile) == 0
assert human.state.actions == 0
assert human.state.money == 0
def test_throne_room_smithy(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(smithy)
assert len(human.hand) == 2
monkeypatch.setattr("builtins.input", lambda _: "Smithy")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 6
assert len(human.discard_pile) == 0
assert human.state.actions == 0
assert human.state.money == 0
def test_throne_room_village(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(village)
assert len(human.hand) == 2
assert human.state.actions == 1
monkeypatch.setattr("builtins.input", lambda _: "Village")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 2
assert human.state.actions == 4
assert human.state.money == 0
def test_two_separate_thrones(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(village)
assert len(human.hand) == 2
assert human.state.actions == 1
monkeypatch.setattr("builtins.input", lambda _: "Village")
human.hand.cards[0].play(human, game)
assert len(human.playmat) == 2
assert len(human.hand) == 2
assert human.state.actions == 4
human.hand.add(throne_room)
human.hand.add(smithy)
assert len(human.hand) == 4
monkeypatch.setattr("builtins.input", lambda _: "Smithy")
human.play(throne_room, game=game)
assert len(human.playmat) == 4
assert len(human.hand) == 8
assert human.state.actions == 3
assert human.state.money == 0
def test_throne_a_throne(human: Human, game: Game, monkeypatch):
human.hand.add(throne_room)
human.hand.add(throne_room)
human.hand.add(smithy)
human.hand.add(smithy)
assert len(human.hand) == 4
for i in range(20):
human.deck.add(throne_room)
responses = iter(["throne room", "smithy", "smithy"])
monkeypatch.setattr("builtins.input", lambda input: next(responses))
human.play(target_card=throne_room, game=game)
assert len(human.playmat) == 4
assert len(human.hand) == 12 # +3 cards played 4 times = 12 cards | 0.639961 | 0.804866 |
import matplotlib.pyplot as plt # Plotting library
from scipy.integrate import odeint # SciPy ODE integration
from scipy.interpolate import interp1d # SciPy 1D interpolant
from numpy import linspace, pi, sqrt, exp # numpy functions and constants
def sheathFunc(f, x, vs):
# f is an array of all evolving variables
phi = f[0] # First element is phi
E = f[1] # Second element is E
# Calculate intermediate results
ne = exp(phi)
vi = sqrt(vs**2 - 2*phi)
ni = vs/vi
# Calculate the time derivatives
dphi_by_dx = -E
dE_by_dx = ni - ne
# Return the time derivatives in the same order as in the input f
return [dphi_by_dx, dE_by_dx]
def solve(x, vs, phi_Initial, E_Initial):
# Integrate using Sheath Function
y = odeint(sheathFunc, [phi_Initial, E_Initial], x, args = (vs,))
# Extract phi
phi = y[:,0]
# Calculate j
j = sqrt(1840.0/ (2.0*pi)) * exp(phi) - 1.0
# Return the value of phi and j
return phi, j
if __name__ == "__main__":
# Define all parameter values
vsTuple = (1, 1.5, 2) # Allows for adding additional vs
phi_Initial = 0.0
E_Initial = 0.001
x = linspace(0, 40, 100)
# Loop through each vs
for vs in vsTuple:
phi, j = solve(x, vs, phi_Initial, E_Initial) # Solve to obtain j
xAsFunctionOfj = interp1d(j, x) # Get distance as an interpolant function of j
xWall = xAsFunctionOfj(0) # Interpolate to find x where j is 0
xShifted = x-xWall # Shifted distance coordinates such that j is alway 0 for distance 0
plt.plot(xShifted, j) # Plot
plt.grid(True) # Add a background grid
plt.xlabel(r'Distance [$\hat{x}$, Debye Length $λ_D$] (Offset such that Current Density is 0 at 0 distance)')
plt.ylabel(r'Current Density [Normalised to ion current density at wall $\hat{j}$]')
plt.title(r'Assignment2: Comparison of Sheath Conditions')
plt.legend([r'$\hat{V}_s$ = 1.0',r'$\hat{V}_s$ = 1.5',r'$\hat{V}_s$ = 2.0'])
plt.ylim(-5,20)
plt.xlim(-40,30)
plt.show() | CompuTech/mly509_a2.py | import matplotlib.pyplot as plt # Plotting library
from scipy.integrate import odeint # SciPy ODE integration
from scipy.interpolate import interp1d # SciPy 1D interpolant
from numpy import linspace, pi, sqrt, exp # numpy functions and constants
def sheathFunc(f, x, vs):
# f is an array of all evolving variables
phi = f[0] # First element is phi
E = f[1] # Second element is E
# Calculate intermediate results
ne = exp(phi)
vi = sqrt(vs**2 - 2*phi)
ni = vs/vi
# Calculate the time derivatives
dphi_by_dx = -E
dE_by_dx = ni - ne
# Return the time derivatives in the same order as in the input f
return [dphi_by_dx, dE_by_dx]
def solve(x, vs, phi_Initial, E_Initial):
# Integrate using Sheath Function
y = odeint(sheathFunc, [phi_Initial, E_Initial], x, args = (vs,))
# Extract phi
phi = y[:,0]
# Calculate j
j = sqrt(1840.0/ (2.0*pi)) * exp(phi) - 1.0
# Return the value of phi and j
return phi, j
if __name__ == "__main__":
# Define all parameter values
vsTuple = (1, 1.5, 2) # Allows for adding additional vs
phi_Initial = 0.0
E_Initial = 0.001
x = linspace(0, 40, 100)
# Loop through each vs
for vs in vsTuple:
phi, j = solve(x, vs, phi_Initial, E_Initial) # Solve to obtain j
xAsFunctionOfj = interp1d(j, x) # Get distance as an interpolant function of j
xWall = xAsFunctionOfj(0) # Interpolate to find x where j is 0
xShifted = x-xWall # Shifted distance coordinates such that j is alway 0 for distance 0
plt.plot(xShifted, j) # Plot
plt.grid(True) # Add a background grid
plt.xlabel(r'Distance [$\hat{x}$, Debye Length $λ_D$] (Offset such that Current Density is 0 at 0 distance)')
plt.ylabel(r'Current Density [Normalised to ion current density at wall $\hat{j}$]')
plt.title(r'Assignment2: Comparison of Sheath Conditions')
plt.legend([r'$\hat{V}_s$ = 1.0',r'$\hat{V}_s$ = 1.5',r'$\hat{V}_s$ = 2.0'])
plt.ylim(-5,20)
plt.xlim(-40,30)
plt.show() | 0.851675 | 0.843251 |
import pandas as pd
import numpy as np
import time
from datetime import datetime, timedelta
import math
import os
import matplotlib.pyplot as plt
from matplotlib.dates import bytespdate2num, num2date
from matplotlib.ticker import FuncFormatter
import multiprocessing
from multiprocessing import Pool, cpu_count
from functools import partial
def slice_data(event_log_csv):
df = pd.read_csv(event_log_csv,encoding='euc_kr')
df['Complete Timestamp'] = pd.to_datetime(df['Complete Timestamp'])
groups = df.groupby('Case ID')
return groups
def work(kek):
case, group = kek
early_morning = '10:30:00'
early_morning = datetime.strptime(early_morning,'%H:%M:%S').time()
late_morning = '13:00:00'
late_morning = datetime.strptime(late_morning,'%H:%M:%S').time()
early_afternoon = '16:30:00'
early_afternoon = datetime.strptime(early_afternoon,'%H:%M:%S').time()
if len(group['Activity']) >3:
timelist = list(group['Complete Timestamp'])
em =0
lm =0
ea =0
la =0
word_bag =''
for time in timelist:
time = time.time()
if time < early_morning:
em +=1
word_bag +='em_'
elif time > early_morning and time < late_morning:
lm +=1
word_bag +='lm_'
elif time > late_morning and time < early_afternoon:
ea +=1
word_bag +='ea_'
else:
la +=1
word_bag +='la_'
activitylist = list(group['Activity'])
d = {x:activitylist.count(x) for x in set(activitylist)}
rr = 0
for x in d.values():
if x>=2:
rr +=1
return [case,em,lm,ea,la,rr,word_bag[:-1]]
return []
def integrate(results):
return results
if __name__ == '__main__':
print('Start slicing...')
start = time.time()
slices = slice_data('../../m08_pandas_two/datastore/event-log.csv')
pool = Pool(cpu_count())
print('Start work...')
results = pool.map(work, slices)
pool.close()
pool.join()
results = integrate(results)
print ('Finished in %f seconds' % (time.time() - start)) | data-science-not/weeks/m09_multiproc/p02/task2_multiproc.py | import pandas as pd
import numpy as np
import time
from datetime import datetime, timedelta
import math
import os
import matplotlib.pyplot as plt
from matplotlib.dates import bytespdate2num, num2date
from matplotlib.ticker import FuncFormatter
import multiprocessing
from multiprocessing import Pool, cpu_count
from functools import partial
def slice_data(event_log_csv):
df = pd.read_csv(event_log_csv,encoding='euc_kr')
df['Complete Timestamp'] = pd.to_datetime(df['Complete Timestamp'])
groups = df.groupby('Case ID')
return groups
def work(kek):
case, group = kek
early_morning = '10:30:00'
early_morning = datetime.strptime(early_morning,'%H:%M:%S').time()
late_morning = '13:00:00'
late_morning = datetime.strptime(late_morning,'%H:%M:%S').time()
early_afternoon = '16:30:00'
early_afternoon = datetime.strptime(early_afternoon,'%H:%M:%S').time()
if len(group['Activity']) >3:
timelist = list(group['Complete Timestamp'])
em =0
lm =0
ea =0
la =0
word_bag =''
for time in timelist:
time = time.time()
if time < early_morning:
em +=1
word_bag +='em_'
elif time > early_morning and time < late_morning:
lm +=1
word_bag +='lm_'
elif time > late_morning and time < early_afternoon:
ea +=1
word_bag +='ea_'
else:
la +=1
word_bag +='la_'
activitylist = list(group['Activity'])
d = {x:activitylist.count(x) for x in set(activitylist)}
rr = 0
for x in d.values():
if x>=2:
rr +=1
return [case,em,lm,ea,la,rr,word_bag[:-1]]
return []
def integrate(results):
return results
if __name__ == '__main__':
print('Start slicing...')
start = time.time()
slices = slice_data('../../m08_pandas_two/datastore/event-log.csv')
pool = Pool(cpu_count())
print('Start work...')
results = pool.map(work, slices)
pool.close()
pool.join()
results = integrate(results)
print ('Finished in %f seconds' % (time.time() - start)) | 0.187914 | 0.12408 |
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from libs import model_common
def placeholder(Batch_Size, P, Q, N, F_in, F_out):
labels = tf.compat.v1.placeholder(shape = (Batch_Size, Q, N, F_out), dtype = tf.float32,name="lables")
samples = tf.compat.v1.placeholder(shape = (Batch_Size, P, N, F_in), dtype = tf.float32,name="samples")
is_training = tf.compat.v1.placeholder(shape = (), dtype = tf.bool, name='is_training')
return labels, samples, is_training
# X:(B,P,N,1), adj:(N,N)
# output:(B,P,N,1)
def adjacent(args, X, La):
#output = tf.matmul(La, X)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
output = model_common.multi_gcn(La, X, args.activations_gcn, units_gcn, args.Ks_gcn)
return output
# X:(B,P,N,1), adj:(Q,P,N,N)
# output:(B,P,N,Q)
def reachable_en(args, X, Lr):
Q,P,N,_ = Lr.shape
output = []
for p in range(P):
xp = X[:,p,...] #(B,N,1)
xq = []
for q in range(Q):
L = Lr[q,p,...] #(N,N)
L = tf.nn.softmax(L, axis=-1)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
x = model_common.multi_gcn(L, xp, args.activations_gcn, units_gcn, args.Ks_gcn)
xq.append(x)
xq = tf.concat(xq,axis=-1) #(B,N,Q)
output.append(xq)
output = tf.stack(output, axis=1) #(B,P,N,Q)
return output
def XST(dw_mat,X, TE, SE, D,T):
SE = model_common.s_embbing_static(SE, D, activations=['relu', None]) #(N,Fs)=>(1,1,N,D)
B,P,N,_ = X.shape
inputs = X
TE = model_common.t_embbing_static(dw_mat,2, TE, T, D, activations=['relu', None]) #(B,P,N,D)
X = model_common.x_embedding(X, D, activations=['relu', None])
is_TE = True
is_SE = True
X = model_common.x_SE_TE(X, SE, TE, is_X=True,is_SE=is_SE , is_TE=is_TE)
return inputs, X
# X:(B,P,N,1),SE:(N,F), T:(B,P,N,2)
# output: (B,P,N,1)
def dynamic(args, X,D):
# (B,P,N,2D)=> (B,P,N,D)
query = model_common.multi_fc(X, activations=['relu'], units=[D])
key = model_common.multi_fc(X, activations=['relu'], units=[D])
value = model_common.multi_fc(X, activations=['relu'], units=[D])
# (B,P,N,D)*(B,P,N,D)=>(B,P,N,N)
attention = tf.matmul(query, key, transpose_b = True)
attention /= (D ** 0.5)
attention = tf.nn.softmax(attention, axis = -1)
# (B,P,N,N) * (B,P,N,1)=> (B,P,N,1)
#inputs = tf.matmul(attention, value)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
inputs = model_common.multi_gcn(attention, value, args.activations_gcn,units_gcn , args.Ks_gcn)
return inputs
def is_train(labels, q, output):
x = labels[:, q, ...]
# Return either the model's prediction or the previous ground truth in training.
Use_Curriculum_Learning = True
Cl_Decay_Steps = 2000
if Use_Curriculum_Learning:
c = tf.random_uniform((), minval=0., maxval=1.)
k = Cl_Decay_Steps * 1.0
global_step = tf.cast(tf.train.get_or_create_global_step(), tf.float32)
threshold = tf.cast(k / (k + tf.exp(global_step / k)), tf.float32)
x = tf.cond(tf.less(c, threshold), lambda: labels[:, q, ...], lambda: output)
return x
def is_test(output):
return output
'''
X: (B,P,N,3) = (B,P,N,1)[X] + (B,P,N,2)[TE]
Labels:(B,P,N,1)
La:(N,N)
Lr:(Q,P,N,N)
SE:(N,F)
'''
def Model(args, mean, std, X, labels, La, Lr, SE, is_training,dw_mat):
TE = tf.cast(X[..., -2:], tf.int32) # (B,P,N,2)
X = X[..., :-2] # (B,P,N,1)
D, T = args.D, args.T
num_units = [int(i) for i in args.num_units.split("-")]
print(num_units)
B,Q,N,F = labels.shape #(B,Q,N,1)
labels = tf.squeeze(tf.concat(tf.split(labels, N, axis=2),axis=0),axis=2) #(BN,Q,1,1)=>(BN,Q,1)
inputs, X = XST(dw_mat,X, TE, SE, D, T)
X_a = adjacent(args, X, La) # (B,P,N,1)
X_d = dynamic(args, X, D) # (B,P,N,1)
X_ad = tf.concat([X_a, X_d], axis=-1) # (B,P,N,Q+2)
outputs = X_ad
samples = tf.squeeze(tf.concat(tf.split(outputs, N, axis=2), axis=0)) #(B,P,1,Q+2)=>(BN,P,1,Q+2)=>(BN,P,Q+2)
X_r = reachable_en(args, X, Lr) # (B,P,N,Q)
X_r = tf.transpose(X_r, [0, 2, 1, 3]) # (B,N,P,Q)
(B1, N1, P1, Q1) = X_r.shape
X_r = tf.reshape(X_r, [B1 * N1, P1, Q1]) # (BN,P,Q)
# encoder
with tf.variable_scope("encoder"):
multi_gru_en = model_common.multi_cells(num_units)
# outputs:(B,T,F),last_states:tuple, [(B,F),(B,F)]
outputs, last_states = tf.nn.dynamic_rnn(cell=multi_gru_en, inputs=samples, dtype=tf.float32)
# decoder
Hs = outputs # (B,T,F)
P = outputs.shape[1]
with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE):
multi_gru_de = model_common.multi_cells(num_units)
outputs = []
inputs = tf.zeros_like(labels[:, 0, ...])
for q in range(Q):
X_rq = X_r[..., q] # (BN,P)
inputs = tf.concat([inputs, X_rq], axis=-1) # (B,2F)
HCq = []
for h in range(args.H):
# temporal attention
Sq_1 = last_states[1] # (B,F)
eh = []
for p in range(P):
Hp = Hs[:, p, :] # (B,F)
shp = tf.concat([Hp, Sq_1], axis=-1) # (B, 2F)
w = tf.get_variable('w_%d' % h, [shp.shape[-1], args.Fe], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (2F,Fe)
ehp = tf.einsum('ab,bc->ac', tf.nn.tanh(shp), w) # (B, 2F)*(2F,Fe)=>(B,Fe)
v = tf.get_variable('v_%d' % h, [args.Fe, 1], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (Fe,1)
ep = tf.einsum('ab,bc->ac', tf.nn.tanh(ehp), v) # (B, Fe)*(Fe,1)=>(B,1)
eh.append(ep)
eh = tf.concat(eh, axis=-1) # (B,P)
eh = tf.nn.softmax(eh, axis=-1) # (B,P)
eh = tf.expand_dims(eh, axis=-1) # (B,P,1)
Cq = tf.zeros_like(Hs[:, 0, :]) # (B,F)
for p in range(P):
ap = eh[:, p, :] # (B,1)
Hp = Hs[:, p, :] # (B,F)
Hap = Hp * ap # (B,F)
Cq += Hap
HCq.append(Cq)
HCq = tf.concat(HCq, axis=-1) # (B,HF)
w = tf.get_variable('w', [HCq.shape[-1], Cq.shape[-1]], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (HF,F)
Cq = tf.matmul(HCq, w) # (B,HF)*(HF,F) =>(B,F)
inputs = tf.concat([inputs, Cq], axis=-1) # (B,2F)
output, states = multi_gru_de(inputs, last_states)
output = model_common.fc(output, F)
outputs.append(output)
last_states = states
inputs = tf.cond(is_training, lambda: is_train(labels, q, output), lambda: is_test(output))
outputs = tf.stack(outputs, axis=1) # (BN,Q,F)
outputs = tf.stack(tf.split(outputs, N, axis=0), axis=2) # (BN,Q,F)=>(B,Q,N,F)
outputs = model_common.inverse_positive(outputs, std, mean)
return outputs | models/MSGC_Seq2seq.py | import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from libs import model_common
def placeholder(Batch_Size, P, Q, N, F_in, F_out):
labels = tf.compat.v1.placeholder(shape = (Batch_Size, Q, N, F_out), dtype = tf.float32,name="lables")
samples = tf.compat.v1.placeholder(shape = (Batch_Size, P, N, F_in), dtype = tf.float32,name="samples")
is_training = tf.compat.v1.placeholder(shape = (), dtype = tf.bool, name='is_training')
return labels, samples, is_training
# X:(B,P,N,1), adj:(N,N)
# output:(B,P,N,1)
def adjacent(args, X, La):
#output = tf.matmul(La, X)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
output = model_common.multi_gcn(La, X, args.activations_gcn, units_gcn, args.Ks_gcn)
return output
# X:(B,P,N,1), adj:(Q,P,N,N)
# output:(B,P,N,Q)
def reachable_en(args, X, Lr):
Q,P,N,_ = Lr.shape
output = []
for p in range(P):
xp = X[:,p,...] #(B,N,1)
xq = []
for q in range(Q):
L = Lr[q,p,...] #(N,N)
L = tf.nn.softmax(L, axis=-1)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
x = model_common.multi_gcn(L, xp, args.activations_gcn, units_gcn, args.Ks_gcn)
xq.append(x)
xq = tf.concat(xq,axis=-1) #(B,N,Q)
output.append(xq)
output = tf.stack(output, axis=1) #(B,P,N,Q)
return output
def XST(dw_mat,X, TE, SE, D,T):
SE = model_common.s_embbing_static(SE, D, activations=['relu', None]) #(N,Fs)=>(1,1,N,D)
B,P,N,_ = X.shape
inputs = X
TE = model_common.t_embbing_static(dw_mat,2, TE, T, D, activations=['relu', None]) #(B,P,N,D)
X = model_common.x_embedding(X, D, activations=['relu', None])
is_TE = True
is_SE = True
X = model_common.x_SE_TE(X, SE, TE, is_X=True,is_SE=is_SE , is_TE=is_TE)
return inputs, X
# X:(B,P,N,1),SE:(N,F), T:(B,P,N,2)
# output: (B,P,N,1)
def dynamic(args, X,D):
# (B,P,N,2D)=> (B,P,N,D)
query = model_common.multi_fc(X, activations=['relu'], units=[D])
key = model_common.multi_fc(X, activations=['relu'], units=[D])
value = model_common.multi_fc(X, activations=['relu'], units=[D])
# (B,P,N,D)*(B,P,N,D)=>(B,P,N,N)
attention = tf.matmul(query, key, transpose_b = True)
attention /= (D ** 0.5)
attention = tf.nn.softmax(attention, axis = -1)
# (B,P,N,N) * (B,P,N,1)=> (B,P,N,1)
#inputs = tf.matmul(attention, value)
units_gcn = [int(i) for i in args.units_gcn.split("-")]
inputs = model_common.multi_gcn(attention, value, args.activations_gcn,units_gcn , args.Ks_gcn)
return inputs
def is_train(labels, q, output):
x = labels[:, q, ...]
# Return either the model's prediction or the previous ground truth in training.
Use_Curriculum_Learning = True
Cl_Decay_Steps = 2000
if Use_Curriculum_Learning:
c = tf.random_uniform((), minval=0., maxval=1.)
k = Cl_Decay_Steps * 1.0
global_step = tf.cast(tf.train.get_or_create_global_step(), tf.float32)
threshold = tf.cast(k / (k + tf.exp(global_step / k)), tf.float32)
x = tf.cond(tf.less(c, threshold), lambda: labels[:, q, ...], lambda: output)
return x
def is_test(output):
return output
'''
X: (B,P,N,3) = (B,P,N,1)[X] + (B,P,N,2)[TE]
Labels:(B,P,N,1)
La:(N,N)
Lr:(Q,P,N,N)
SE:(N,F)
'''
def Model(args, mean, std, X, labels, La, Lr, SE, is_training,dw_mat):
TE = tf.cast(X[..., -2:], tf.int32) # (B,P,N,2)
X = X[..., :-2] # (B,P,N,1)
D, T = args.D, args.T
num_units = [int(i) for i in args.num_units.split("-")]
print(num_units)
B,Q,N,F = labels.shape #(B,Q,N,1)
labels = tf.squeeze(tf.concat(tf.split(labels, N, axis=2),axis=0),axis=2) #(BN,Q,1,1)=>(BN,Q,1)
inputs, X = XST(dw_mat,X, TE, SE, D, T)
X_a = adjacent(args, X, La) # (B,P,N,1)
X_d = dynamic(args, X, D) # (B,P,N,1)
X_ad = tf.concat([X_a, X_d], axis=-1) # (B,P,N,Q+2)
outputs = X_ad
samples = tf.squeeze(tf.concat(tf.split(outputs, N, axis=2), axis=0)) #(B,P,1,Q+2)=>(BN,P,1,Q+2)=>(BN,P,Q+2)
X_r = reachable_en(args, X, Lr) # (B,P,N,Q)
X_r = tf.transpose(X_r, [0, 2, 1, 3]) # (B,N,P,Q)
(B1, N1, P1, Q1) = X_r.shape
X_r = tf.reshape(X_r, [B1 * N1, P1, Q1]) # (BN,P,Q)
# encoder
with tf.variable_scope("encoder"):
multi_gru_en = model_common.multi_cells(num_units)
# outputs:(B,T,F),last_states:tuple, [(B,F),(B,F)]
outputs, last_states = tf.nn.dynamic_rnn(cell=multi_gru_en, inputs=samples, dtype=tf.float32)
# decoder
Hs = outputs # (B,T,F)
P = outputs.shape[1]
with tf.variable_scope("decoder", reuse=tf.AUTO_REUSE):
multi_gru_de = model_common.multi_cells(num_units)
outputs = []
inputs = tf.zeros_like(labels[:, 0, ...])
for q in range(Q):
X_rq = X_r[..., q] # (BN,P)
inputs = tf.concat([inputs, X_rq], axis=-1) # (B,2F)
HCq = []
for h in range(args.H):
# temporal attention
Sq_1 = last_states[1] # (B,F)
eh = []
for p in range(P):
Hp = Hs[:, p, :] # (B,F)
shp = tf.concat([Hp, Sq_1], axis=-1) # (B, 2F)
w = tf.get_variable('w_%d' % h, [shp.shape[-1], args.Fe], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (2F,Fe)
ehp = tf.einsum('ab,bc->ac', tf.nn.tanh(shp), w) # (B, 2F)*(2F,Fe)=>(B,Fe)
v = tf.get_variable('v_%d' % h, [args.Fe, 1], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (Fe,1)
ep = tf.einsum('ab,bc->ac', tf.nn.tanh(ehp), v) # (B, Fe)*(Fe,1)=>(B,1)
eh.append(ep)
eh = tf.concat(eh, axis=-1) # (B,P)
eh = tf.nn.softmax(eh, axis=-1) # (B,P)
eh = tf.expand_dims(eh, axis=-1) # (B,P,1)
Cq = tf.zeros_like(Hs[:, 0, :]) # (B,F)
for p in range(P):
ap = eh[:, p, :] # (B,1)
Hp = Hs[:, p, :] # (B,F)
Hap = Hp * ap # (B,F)
Cq += Hap
HCq.append(Cq)
HCq = tf.concat(HCq, axis=-1) # (B,HF)
w = tf.get_variable('w', [HCq.shape[-1], Cq.shape[-1]], dtype=tf.float32,
initializer=tf.glorot_uniform_initializer()) # (HF,F)
Cq = tf.matmul(HCq, w) # (B,HF)*(HF,F) =>(B,F)
inputs = tf.concat([inputs, Cq], axis=-1) # (B,2F)
output, states = multi_gru_de(inputs, last_states)
output = model_common.fc(output, F)
outputs.append(output)
last_states = states
inputs = tf.cond(is_training, lambda: is_train(labels, q, output), lambda: is_test(output))
outputs = tf.stack(outputs, axis=1) # (BN,Q,F)
outputs = tf.stack(tf.split(outputs, N, axis=0), axis=2) # (BN,Q,F)=>(B,Q,N,F)
outputs = model_common.inverse_positive(outputs, std, mean)
return outputs | 0.606964 | 0.439026 |
from sebs.cache import Cache
from sebs.faas.config import Credentials, Resources, Config
from sebs.utils import LoggingHandlers
from sebs.storage.config import MinioConfig
from typing import cast, Optional
class OpenWhiskCredentials(Credentials):
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Credentials:
return OpenWhiskCredentials()
def serialize(self) -> dict:
return {}
class OpenWhiskResources(Resources):
def __init__(
self,
registry: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
registry_updated: bool = False,
):
super().__init__()
self._docker_registry = registry if registry != "" else None
self._docker_username = username if username != "" else None
self._docker_password = password if password != "" else None
self._registry_updated = registry_updated
self._storage: Optional[MinioConfig] = None
self._storage_updated = False
@staticmethod
def typename() -> str:
return "OpenWhisk.Resources"
@property
def docker_registry(self) -> Optional[str]:
return self._docker_registry
@property
def docker_username(self) -> Optional[str]:
return self._docker_username
@property
def docker_password(self) -> Optional[str]:
return self._docker_password
@property
def storage_config(self) -> Optional[MinioConfig]:
return self._storage
@property
def storage_updated(self) -> bool:
return self._storage_updated
@property
def registry_updated(self) -> bool:
return self._registry_updated
@staticmethod
def initialize(dct: dict) -> Resources:
return OpenWhiskResources(dct["registry"], dct["username"], dct["password"])
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources:
cached_config = cache.get_config("openwhisk")
ret: OpenWhiskResources
# Check for new config - overrides but check if it's different
if "docker_registry" in config:
ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"]))
ret.logging.info("Using user-provided Docker registry for OpenWhisk.")
ret.logging_handlers = handlers
# check if there has been an update
if not (
cached_config
and "resources" in cached_config
and "docker" in cached_config["resources"]
and cached_config["resources"]["docker"] == config["docker_registry"]
):
ret._registry_updated = True
# Load cached values
elif (
cached_config
and "resources" in cached_config
and "docker" in cached_config["resources"]
):
ret = cast(
OpenWhiskResources,
OpenWhiskResources.initialize(cached_config["resources"]["docker"]),
)
ret.logging_handlers = handlers
ret.logging.info("Using cached Docker registry for OpenWhisk")
else:
ret = OpenWhiskResources()
ret.logging.info("Using default Docker registry for OpenWhisk.")
ret.logging_handlers = handlers
ret._registry_updated = True
# Check for new config
if "storage" in config:
ret._storage = MinioConfig.deserialize(config["storage"])
ret.logging.info("Using user-provided configuration of storage for OpenWhisk.")
# check if there has been an update
if not (
cached_config
and "resources" in cached_config
and "storage" in cached_config["resources"]
and cached_config["resources"]["storage"] == config["storage"]
):
ret.logging.info(
"User-provided configuration is different from cached storage, "
"we will update existing OpenWhisk actions."
)
ret._storage_updated = True
# Load cached values
elif (
cached_config
and "resources" in cached_config
and "storage" in cached_config["resources"]
):
ret._storage = MinioConfig.deserialize(cached_config["resources"]["storage"])
ret.logging.info("Using cached configuration of storage for OpenWhisk.")
return ret
def update_cache(self, cache: Cache):
cache.update_config(
val=self.docker_registry, keys=["openwhisk", "resources", "docker", "registry"]
)
cache.update_config(
val=self.docker_username, keys=["openwhisk", "resources", "docker", "username"]
)
cache.update_config(
val=self.docker_password, keys=["openwhisk", "resources", "docker", "password"]
)
if self._storage:
self._storage.update_cache(["openwhisk", "resources", "storage"], cache)
def serialize(self) -> dict:
out: dict = {
"docker_registry": self.docker_registry,
"docker_username": self.docker_username,
"docker_password": self.docker_password,
}
if self._storage:
out = {**out, "storage": self._storage.serialize()}
return out
class OpenWhiskConfig(Config):
name: str
shutdownStorage: bool
cache: Cache
def __init__(self, config: dict, cache: Cache):
super().__init__()
self._credentials = OpenWhiskCredentials()
self._resources = OpenWhiskResources()
self.shutdownStorage = config["shutdownStorage"]
self.removeCluster = config["removeCluster"]
self.wsk_exec = config["wskExec"]
self.wsk_bypass_security = config["wskBypassSecurity"]
self.experimentalManifest = config["experimentalManifest"]
self.cache = cache
@property
def credentials(self) -> OpenWhiskCredentials:
return self._credentials
@property
def resources(self) -> OpenWhiskResources:
return self._resources
@staticmethod
def initialize(cfg: Config, dct: dict):
pass
def serialize(self) -> dict:
return {
"name": "openwhisk",
"shutdownStorage": self.shutdownStorage,
"removeCluster": self.removeCluster,
"wskExec": self.wsk_exec,
"wskBypassSecurity": self.wsk_bypass_security,
"experimentalManifest": self.experimentalManifest,
"credentials": self._credentials.serialize(),
"resources": self._resources.serialize(),
}
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config:
cached_config = cache.get_config("openwhisk")
resources = cast(
OpenWhiskResources, OpenWhiskResources.deserialize(config, cache, handlers)
)
res = OpenWhiskConfig(config, cached_config)
res.logging_handlers = handlers
res._resources = resources
return res
def update_cache(self, cache: Cache):
cache.update_config(val=self.shutdownStorage, keys=["openwhisk", "shutdownStorage"])
cache.update_config(val=self.removeCluster, keys=["openwhisk", "removeCluster"])
cache.update_config(val=self.wsk_exec, keys=["openwhisk", "wskExec"])
cache.update_config(val=self.wsk_bypass_security, keys=["openwhisk", "wskBypassSecurity"])
cache.update_config(
val=self.experimentalManifest, keys=["openwhisk", "experimentalManifest"]
)
self.resources.update_cache(cache) | sebs/openwhisk/config.py | from sebs.cache import Cache
from sebs.faas.config import Credentials, Resources, Config
from sebs.utils import LoggingHandlers
from sebs.storage.config import MinioConfig
from typing import cast, Optional
class OpenWhiskCredentials(Credentials):
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Credentials:
return OpenWhiskCredentials()
def serialize(self) -> dict:
return {}
class OpenWhiskResources(Resources):
def __init__(
self,
registry: Optional[str] = None,
username: Optional[str] = None,
password: Optional[str] = None,
registry_updated: bool = False,
):
super().__init__()
self._docker_registry = registry if registry != "" else None
self._docker_username = username if username != "" else None
self._docker_password = password if password != "" else None
self._registry_updated = registry_updated
self._storage: Optional[MinioConfig] = None
self._storage_updated = False
@staticmethod
def typename() -> str:
return "OpenWhisk.Resources"
@property
def docker_registry(self) -> Optional[str]:
return self._docker_registry
@property
def docker_username(self) -> Optional[str]:
return self._docker_username
@property
def docker_password(self) -> Optional[str]:
return self._docker_password
@property
def storage_config(self) -> Optional[MinioConfig]:
return self._storage
@property
def storage_updated(self) -> bool:
return self._storage_updated
@property
def registry_updated(self) -> bool:
return self._registry_updated
@staticmethod
def initialize(dct: dict) -> Resources:
return OpenWhiskResources(dct["registry"], dct["username"], dct["password"])
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Resources:
cached_config = cache.get_config("openwhisk")
ret: OpenWhiskResources
# Check for new config - overrides but check if it's different
if "docker_registry" in config:
ret = cast(OpenWhiskResources, OpenWhiskResources.initialize(config["docker_registry"]))
ret.logging.info("Using user-provided Docker registry for OpenWhisk.")
ret.logging_handlers = handlers
# check if there has been an update
if not (
cached_config
and "resources" in cached_config
and "docker" in cached_config["resources"]
and cached_config["resources"]["docker"] == config["docker_registry"]
):
ret._registry_updated = True
# Load cached values
elif (
cached_config
and "resources" in cached_config
and "docker" in cached_config["resources"]
):
ret = cast(
OpenWhiskResources,
OpenWhiskResources.initialize(cached_config["resources"]["docker"]),
)
ret.logging_handlers = handlers
ret.logging.info("Using cached Docker registry for OpenWhisk")
else:
ret = OpenWhiskResources()
ret.logging.info("Using default Docker registry for OpenWhisk.")
ret.logging_handlers = handlers
ret._registry_updated = True
# Check for new config
if "storage" in config:
ret._storage = MinioConfig.deserialize(config["storage"])
ret.logging.info("Using user-provided configuration of storage for OpenWhisk.")
# check if there has been an update
if not (
cached_config
and "resources" in cached_config
and "storage" in cached_config["resources"]
and cached_config["resources"]["storage"] == config["storage"]
):
ret.logging.info(
"User-provided configuration is different from cached storage, "
"we will update existing OpenWhisk actions."
)
ret._storage_updated = True
# Load cached values
elif (
cached_config
and "resources" in cached_config
and "storage" in cached_config["resources"]
):
ret._storage = MinioConfig.deserialize(cached_config["resources"]["storage"])
ret.logging.info("Using cached configuration of storage for OpenWhisk.")
return ret
def update_cache(self, cache: Cache):
cache.update_config(
val=self.docker_registry, keys=["openwhisk", "resources", "docker", "registry"]
)
cache.update_config(
val=self.docker_username, keys=["openwhisk", "resources", "docker", "username"]
)
cache.update_config(
val=self.docker_password, keys=["openwhisk", "resources", "docker", "password"]
)
if self._storage:
self._storage.update_cache(["openwhisk", "resources", "storage"], cache)
def serialize(self) -> dict:
out: dict = {
"docker_registry": self.docker_registry,
"docker_username": self.docker_username,
"docker_password": self.docker_password,
}
if self._storage:
out = {**out, "storage": self._storage.serialize()}
return out
class OpenWhiskConfig(Config):
name: str
shutdownStorage: bool
cache: Cache
def __init__(self, config: dict, cache: Cache):
super().__init__()
self._credentials = OpenWhiskCredentials()
self._resources = OpenWhiskResources()
self.shutdownStorage = config["shutdownStorage"]
self.removeCluster = config["removeCluster"]
self.wsk_exec = config["wskExec"]
self.wsk_bypass_security = config["wskBypassSecurity"]
self.experimentalManifest = config["experimentalManifest"]
self.cache = cache
@property
def credentials(self) -> OpenWhiskCredentials:
return self._credentials
@property
def resources(self) -> OpenWhiskResources:
return self._resources
@staticmethod
def initialize(cfg: Config, dct: dict):
pass
def serialize(self) -> dict:
return {
"name": "openwhisk",
"shutdownStorage": self.shutdownStorage,
"removeCluster": self.removeCluster,
"wskExec": self.wsk_exec,
"wskBypassSecurity": self.wsk_bypass_security,
"experimentalManifest": self.experimentalManifest,
"credentials": self._credentials.serialize(),
"resources": self._resources.serialize(),
}
@staticmethod
def deserialize(config: dict, cache: Cache, handlers: LoggingHandlers) -> Config:
cached_config = cache.get_config("openwhisk")
resources = cast(
OpenWhiskResources, OpenWhiskResources.deserialize(config, cache, handlers)
)
res = OpenWhiskConfig(config, cached_config)
res.logging_handlers = handlers
res._resources = resources
return res
def update_cache(self, cache: Cache):
cache.update_config(val=self.shutdownStorage, keys=["openwhisk", "shutdownStorage"])
cache.update_config(val=self.removeCluster, keys=["openwhisk", "removeCluster"])
cache.update_config(val=self.wsk_exec, keys=["openwhisk", "wskExec"])
cache.update_config(val=self.wsk_bypass_security, keys=["openwhisk", "wskBypassSecurity"])
cache.update_config(
val=self.experimentalManifest, keys=["openwhisk", "experimentalManifest"]
)
self.resources.update_cache(cache) | 0.853272 | 0.141015 |
from tests.utils import assert_bindings
def test_ipo6_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 6
"""
assert_bindings(
schema="boeingData/ipo6/ipo.xsd",
instance="boeingData/ipo6/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo6_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 6
"""
assert_bindings(
schema="boeingData/ipo6/ipo.xsd",
instance="boeingData/ipo6/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo5_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 5
"""
assert_bindings(
schema="boeingData/ipo5/ipo.xsd",
instance="boeingData/ipo5/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo5_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 5
"""
assert_bindings(
schema="boeingData/ipo5/ipo.xsd",
instance="boeingData/ipo5/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo4_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 4
"""
assert_bindings(
schema="boeingData/ipo4/ipo.xsd",
instance="boeingData/ipo4/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo4_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 4
"""
assert_bindings(
schema="boeingData/ipo4/ipo.xsd",
instance="boeingData/ipo4/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo3_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 3
"""
assert_bindings(
schema="boeingData/ipo3/ipo.xsd",
instance="boeingData/ipo3/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo3_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 3
"""
assert_bindings(
schema="boeingData/ipo3/ipo.xsd",
instance="boeingData/ipo3/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo2_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 2
"""
assert_bindings(
schema="boeingData/ipo2/ipo.xsd",
instance="boeingData/ipo2/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo2_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 2
"""
assert_bindings(
schema="boeingData/ipo2/ipo.xsd",
instance="boeingData/ipo2/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo1_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 1
"""
assert_bindings(
schema="boeingData/ipo1/ipo.xsd",
instance="boeingData/ipo1/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo1_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 1
"""
assert_bindings(
schema="boeingData/ipo1/ipo.xsd",
instance="boeingData/ipo1/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
) | tests/test_boeing_meta_12.py | from tests.utils import assert_bindings
def test_ipo6_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 6
"""
assert_bindings(
schema="boeingData/ipo6/ipo.xsd",
instance="boeingData/ipo6/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo6_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 6
"""
assert_bindings(
schema="boeingData/ipo6/ipo.xsd",
instance="boeingData/ipo6/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo5_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 5
"""
assert_bindings(
schema="boeingData/ipo5/ipo.xsd",
instance="boeingData/ipo5/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo5_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 5
"""
assert_bindings(
schema="boeingData/ipo5/ipo.xsd",
instance="boeingData/ipo5/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo4_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 4
"""
assert_bindings(
schema="boeingData/ipo4/ipo.xsd",
instance="boeingData/ipo4/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo4_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 4
"""
assert_bindings(
schema="boeingData/ipo4/ipo.xsd",
instance="boeingData/ipo4/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo3_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 3
"""
assert_bindings(
schema="boeingData/ipo3/ipo.xsd",
instance="boeingData/ipo3/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo3_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 3
"""
assert_bindings(
schema="boeingData/ipo3/ipo.xsd",
instance="boeingData/ipo3/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo2_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 2
"""
assert_bindings(
schema="boeingData/ipo2/ipo.xsd",
instance="boeingData/ipo2/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo2_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 2
"""
assert_bindings(
schema="boeingData/ipo2/ipo.xsd",
instance="boeingData/ipo2/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo1_ipo_1(mode, save_output, output_format):
"""
International Purchase Order 1
"""
assert_bindings(
schema="boeingData/ipo1/ipo.xsd",
instance="boeingData/ipo1/ipo_1.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
)
def test_ipo1_ipo_2(mode, save_output, output_format):
"""
International Purchase Order 1
"""
assert_bindings(
schema="boeingData/ipo1/ipo.xsd",
instance="boeingData/ipo1/ipo_2.xml",
class_name="PurchaseOrder",
version="1.1",
mode=mode,
save_output=save_output,
output_format=output_format,
structure_style="filenames",
) | 0.669853 | 0.298932 |
from maskgen import tool_set
import unittest
import numpy as np
from maskgen import image_wrap
from test_support import TestSupport
import sys
class TestToolSet(TestSupport):
def test_diff(self):
args = {'smoothing': 3, 'mode':'bgr', 'aggregate':'max','filling':'morphology'}
a = np.random.randint(0,255,(255,255,3)).astype('int16')
b = np.random.randint(0, 255, (255, 255, 3)).astype('int16')
m = tool_set.mediatedCompare(a,b, arguments= args)
def test_filetype(self):
self.assertEquals(tool_set.fileType(self.locateFile('images/hat.jpg')), 'image')
self.assertEquals(tool_set.fileType(self.locateFile('images/sample.json')), 'text')
f = open('test.log', 'w+')
f.close()
self.addFileToRemove('test.log')
self.assertEquals(tool_set.fileType(self.locateFile('test.log')), 'text')
self.assertEquals(tool_set.fileType(self.locateFile('tests/videos/sample1.mov')), 'video')
self.assertEquals(tool_set.fileType('foo.dng.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.jpg.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.png.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.oh.zip'), 'collection')
self.assertEquals(tool_set.fileType('foo.newgate.zip'), 'collection')
def test_md5(self):
all_md5 = tool_set.md5_of_file(self.locateFile('tests/videos/sample1.mov'))
parts_md5 = tool_set.md5_of_file(self.locateFile('tests/videos/sample1.mov'),load_size=1000)
self.assertEqual(all_md5,parts_md5)
def test_filetypes(self):
self.assertTrue(("mov files", "*.mov") in tool_set.getFileTypes())
self.assertTrue(("zipped masks", "*.tgz") in tool_set.getMaskFileTypes())
def test_zip(self):
import os
filename = self.locateFile('tests/zips/raw.zip')
self.addFileToRemove(os.path.join(os.path.dirname(filename), 'raw.png'))
img = tool_set.openImage(filename,tool_set.getMilliSecondsAndFrameCount('2'),preserveSnapshot=True)
self.assertEqual((5796, 3870),img.size)
tool_set.condenseZip(filename,keep=1)
self.addFileToRemove(os.path.join(os.path.dirname(filename),'raw_c.zip'))
contents = tool_set.getContentsOfZip(os.path.join(os.path.dirname(filename),'raw_c.zip'))
self.assertTrue('59487443539401a4d83512edaab3c1b2.cr2' in contents)
self.assertTrue('7d1800a38ca7a22021bd94e71b6e0f42.cr2' in contents)
self.assertTrue(len(contents) == 2)
def test_rotate(self):
import cv2
from maskgen import cv2api
img1 = np.zeros((100,100),dtype=np.uint8)
img1[20:50,40:50] = 1
mask = np.ones((100,100),dtype=np.uint8)*255
img1[20:50,40] = 2
img = tool_set.applyRotateToCompositeImage(img1, 90, (50,50))
self.assertTrue(sum(sum(img1-img))>40)
img = tool_set.applyRotateToCompositeImage(img,-90,(50,50))
self.assertTrue(sum(sum(img1-img)) <2)
img = tool_set.applyRotateToComposite(-90, img1, np.zeros((100,100),dtype=np.uint8), img1.shape, local=True)
self.assertTrue(sum(img[40,:]) == sum(img1[:,40]))
self.assertTrue(sum(img[40, :]) == 60)
M = cv2.getRotationMatrix2D((35,45), -90, 1.0)
img = cv2.warpAffine(img1, M, (img.shape[1], img.shape[0]),
flags=cv2api.cv2api_delegate.inter_linear)
mask[abs(img - img1) > 0] = 0
img[10:15,10:15]=3
img3 = tool_set.applyRotateToComposite(90, img, mask, img1.shape, local=True)
self.assertTrue(np.all(img3[10:15,10:15]==3))
img3[10:15, 10:15] = 0
def testCropCompare(self):
import cv2
pre = tool_set.openImageFile(self.locateFile('tests/images/prefill.png')).to_array()
post = pre[10:-10,10:-10]
resized_post = cv2.resize(post, (pre.shape[1],pre.shape[0]))
mask, analysis = tool_set.cropResizeCompare(pre,resized_post, arguments={'crop width':pre.shape[1]-20,'crop height':pre.shape[0]-20})
self.assertEquals((10,10), tool_set.toIntTuple(analysis['location']))
def test_fileMask(self):
pre = tool_set.openImageFile(self.locateFile('tests/images/prefill.png'))
post = tool_set.openImageFile(self.locateFile('tests/images/postfill.png'))
mask,analysis,error = tool_set.createMask(pre,post,invert=False,arguments={'tolerance' : 2500})
withtolerance = sum(sum(mask.image_array))
mask.save(self.locateFile('tests/images/maskfill.png'))
mask, analysis,error = tool_set.createMask(pre, post, invert=False)
withouttolerance = sum(sum(mask.image_array))
mask, analysis ,error= tool_set.createMask(pre, post, invert=False, arguments={'tolerance': 2500,'equalize_colors':True})
mask.save(self.locateFile('tests/images/maskfillt.png'))
withtoleranceandqu = sum(sum(mask.image_array))
self.assertTrue(withouttolerance < withtolerance)
self.assertTrue(withtolerance <= withtoleranceandqu)
def test_map(self):
img1 = np.random.randint(0,255,size=(100,120)).astype('uint8')
mask = np.ones((100,120))
src_pts = [(x, y) for x in xrange(20, 30, 1) for y in xrange(50, 60, 1)]
dst_pts = [(x, y) for x in xrange(55, 65, 1) for y in xrange(15, 25, 1)]
result =tool_set._remap(img1,mask,src_pts,dst_pts)
self.assertTrue(np.all(result[55:65,15:25] == img1[20:30,50:60]))
def test_time_format(self):
t = tool_set.getDurationStringFromMilliseconds(100001.111)
self.assertEqual('00:01:40.001111',t)
def test_timeparse(self):
t, f = tool_set.getMilliSecondsAndFrameCount('00:00:00')
self.assertEqual(1, f)
self.assertEqual(0, t)
t, f = tool_set.getMilliSecondsAndFrameCount('1')
self.assertEqual(1, f)
self.assertEqual(0, t)
self.assertTrue(tool_set.validateTimeString('03:10:10.434'))
t,f = tool_set.getMilliSecondsAndFrameCount('03:10:10.434')
self.assertEqual(0, f)
self.assertEqual(1690434, t)
t, f = tool_set.getMilliSecondsAndFrameCount('03:10:10:23')
self.assertFalse(tool_set.validateTimeString('03:10:10:23'))
self.assertEqual(23,f)
self.assertEqual(1690000, t)
t, f = tool_set.getMilliSecondsAndFrameCount('03:10:10:A', defaultValue=(0,0))
self.assertFalse(tool_set.validateTimeString('03:10:10:A'))
self.assertEqual((0,0), (t,f))
time_manager = tool_set.VidTimeManager(startTimeandFrame=(1000,2),stopTimeandFrame=(1003,4))
time_manager.updateToNow(999)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1000)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1001)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1002)
self.assertFalse(time_manager.isBeforeTime())
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1003)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1004)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1005)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1006)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1007)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1008)
self.assertTrue(time_manager.isPastTime())
self.assertEqual(9,time_manager.getEndFrame() )
self.assertEqual(4, time_manager.getStartFrame())
time_manager = tool_set.VidTimeManager(startTimeandFrame=(999, 2), stopTimeandFrame=None)
time_manager.updateToNow(999)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1000)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1001)
self.assertFalse(time_manager.isBeforeTime())
self.assertEqual(3, time_manager.getEndFrame())
self.assertEqual(3, time_manager.getStartFrame())
def test_opacity_analysis(self):
# need to redo with generated data.
initialImage = image_wrap.openImageFile(self.locateFile('tests/images/pre_blend.png'))
finalImage = image_wrap.openImageFile(self.locateFile('tests/images/post_blend.png'))
mask = image_wrap.openImageFile(self.locateFile('tests/images/blend_mask.png'))
donorMask = image_wrap.openImageFile(self.locateFile('tests/images/donor_to_blend_mask.png'))
donorImage = image_wrap.openImageFile(self.locateFile('tests/images/donor_to_blend.png'))
result = tool_set.generateOpacityImage(initialImage.to_array(), donorImage.to_array(), finalImage.to_array(), mask.to_array(),
donorMask.to_array(),None)
min = np.min(result)
max = np.max(result)
result = (result - min)/(max-min) * 255.0
def test_gray_writing(self):
import os
import sys
import time
s = time.clock()
writer = tool_set.GrayBlockWriter('test_ts_gw', 29.97002997)
mask_set = list()
for i in range(255):
mask = np.random.randint(255, size=(1090, 1920)).astype('uint8')
mask_set.append(mask)
writer.write(mask, 33.3666666667*i,i+1)
for i in range(300,350):
mask = np.random.randint(255, size=(1090, 1920)).astype('uint8')
mask_set.append(mask)
writer.write(mask, 33.3666666667*i, i + 1)
writer.close()
fn = writer.get_file_name()
reader = tool_set.GrayBlockReader(fn, end_frame=305)
pos = 0
while True:
mask = reader.read()
if mask is None:
break
compare = mask == mask_set[pos]
self.assertEqual(mask.size,sum(sum(compare)))
if pos == 255:
self.assertEqual(301,reader.current_frame()-1)
pos += 1
reader.close()
self.assertEqual(305, pos)
print time.clock()- s
suffix = 'm4v'
if sys.platform.startswith('win'):
suffix = 'avi'
filename = tool_set.convertToVideo(fn)
self.assertEquals('test_ts_gw_mask_0.0.' + suffix, filename)
self.assertTrue(os.path.exists(filename))
size = tool_set.openImage(filename, tool_set.getMilliSecondsAndFrameCount('00:00:01')).size
self.assertTrue(size == (1920,1090))
os.remove(filename)
os.remove(fn)
def test_global_transform_analysis(self):
from maskgen.image_wrap import ImageWrapper
analysis = {}
mask = np.random.randint(0,2,(1000, 1000), dtype=np.uint8)
mask[mask>0] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask,
linktype='image.image',
arguments={}, directory='.')
self.assertEquals('yes', analysis['global'])
mask = np.zeros((1000,1000),dtype=np.uint8)
mask[0:30,0:30] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image', arguments={}, directory='.')
self.assertEquals('no',analysis['global'])
self.assertEquals('small', analysis['change size category'])
mask = np.zeros((1000, 1000), dtype=np.uint8)
mask[0:75, 0:75] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
self.assertEquals('no', analysis['global'])
self.assertEquals('medium', analysis['change size category'])
mask[0:100, 0:100] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
self.assertEquals('no', analysis['global'])
self.assertEquals('large', analysis['change size category'])
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
def test_SIFT(self):
from maskgen.image_wrap import ImageWrapper
img1 = ImageWrapper(np.random.randint(0,255,(4000,5000,3),dtype='uint8'))
img2 = ImageWrapper(np.random.randint(0,255,(8000,8000,3),dtype='uint8'))
img2.image_array[1000:2000,1000:2000,:] = img1.image_array[2000:3000,2000:3000,:]
mask1 = ImageWrapper(np.zeros((4000,5000),dtype='uint8'))
mask1.image_array[2000:3000,2000:3000] = 255
mask2 = ImageWrapper(np.zeros((8000, 8000), dtype='uint8'))
mask2.image_array[1000:2000,1000:2000] = 255
features = tool_set.getMatchedSIFeatures(img1, img2, mask1=mask1, mask2=mask2, arguments={'homography max matches': '2000', 'homography': 'RANSAC-4'})
img1 = ImageWrapper(np.random.randint(0, 65535, (4000, 5000, 3), dtype='uint16'))
img2 = ImageWrapper(np.random.randint(0, 65535, (8000, 8000, 3), dtype='uint16'))
img2.image_array[1000:2000, 1000:2000, :] = img1.image_array[2000:3000, 2000:3000, :]
mask1 = ImageWrapper(np.zeros((4000, 5000), dtype='uint8'))
mask1.image_array[2000:3000, 2000:3000] = 255
mask2 = ImageWrapper(np.zeros((8000, 8000), dtype='uint8'))
mask2.image_array[1000:2000, 1000:2000] = 255
features = tool_set.getMatchedSIFeatures(img1, img2, mask1=mask1, mask2=mask2, arguments={'homography max matches': '2000', 'homography': 'RANSAC-4'})
def testSIFCheck(self):
good_transform = {
'c': 3,
'r': 3,
'r0': [0.00081380729604268976, -1.0000367374350523, 449.94975699899271],
'r1': [1.0031702728345473, 0.0016183966076946312, -0.30844081957395447],
'r2': [3.1676664384933143e-06, 9.8915322781393527e-06, 1.0]
}
bad_transform = {
"c": 3,
"r": 3,
"r0": [-3.0764931522976067, 3.2522108810844577, 6167.618028229406],
"r1": [-1.0467579456165736, 1.1073481736839244, 2098.303251843684],
"r2": [-0.0004988685498607748, 0.0005275910530971817, 1.0]
}
self.assertTrue(tool_set.isHomographyOk(tool_set.deserializeMatrix(good_transform),450,450))
self.assertFalse(tool_set.isHomographyOk( tool_set.deserializeMatrix(bad_transform),8000,5320))
def test_time_stamp(self):
v1 = self.locateFile('tests/images/test.png')
v2 = self.locateFile('tests/images/donor_to_blend.png')
v3 = self.locateFile('tests/images/test_time_change.png')
self.assertTrue(len(tool_set.dateTimeStampCompare(v1, v1))==0)
self.assertFalse(len(tool_set.dateTimeStampCompare(v1, v2))==0)
self.assertTrue(len(tool_set.dateTimeStampCompare(v1, v3))==0)
def test_compare(self):
from maskgen import tool_set
wrapper1 = image_wrap.openImageFile(self.locateFile('tests/images/pre_blend.png'))
arr2 = np.copy(wrapper1.image_array)
for x in np.random.randint(1,arr2.shape[0]-1,100):
for y in np.random.randint(1, arr2.shape[1] - 1, 100):
arr2[x,y,1] = arr2[x,y,1] + np.random.randint(-20,20)
arr2[100:200,100:200,2] = arr2[100:200,100:200,2] - 25
wrapper2 = image_wrap.ImageWrapper(arr2)
args = [{'aggregate': 'luminance', 'minimum threshold': 3, "weight": 4},
{'aggregate': 'luminance', 'minimum threshold': 3, "weight": 1},
{'aggregate': 'max'}]
for arg in args:
result = tool_set.mediatedCompare(wrapper1.to_array().astype('int16'),
wrapper2.to_array().astype('int16'),
arguments=arg)
self.assertTrue(np.all(result[0][100:200,100:200] == 255))
result[0][100:200, 100:200] = 0
self.assertTrue(np.all(result[0] == 0))
#image_wrap.ImageWrapper(result[0]).save('/Users/ericrobertson/Downloads/foo_max.png')
if __name__ == '__main__':
unittest.main() | tests/test_tool_set.py | from maskgen import tool_set
import unittest
import numpy as np
from maskgen import image_wrap
from test_support import TestSupport
import sys
class TestToolSet(TestSupport):
def test_diff(self):
args = {'smoothing': 3, 'mode':'bgr', 'aggregate':'max','filling':'morphology'}
a = np.random.randint(0,255,(255,255,3)).astype('int16')
b = np.random.randint(0, 255, (255, 255, 3)).astype('int16')
m = tool_set.mediatedCompare(a,b, arguments= args)
def test_filetype(self):
self.assertEquals(tool_set.fileType(self.locateFile('images/hat.jpg')), 'image')
self.assertEquals(tool_set.fileType(self.locateFile('images/sample.json')), 'text')
f = open('test.log', 'w+')
f.close()
self.addFileToRemove('test.log')
self.assertEquals(tool_set.fileType(self.locateFile('test.log')), 'text')
self.assertEquals(tool_set.fileType(self.locateFile('tests/videos/sample1.mov')), 'video')
self.assertEquals(tool_set.fileType('foo.dng.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.jpg.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.png.zip'), 'zip')
self.assertEquals(tool_set.fileType('foo.oh.zip'), 'collection')
self.assertEquals(tool_set.fileType('foo.newgate.zip'), 'collection')
def test_md5(self):
all_md5 = tool_set.md5_of_file(self.locateFile('tests/videos/sample1.mov'))
parts_md5 = tool_set.md5_of_file(self.locateFile('tests/videos/sample1.mov'),load_size=1000)
self.assertEqual(all_md5,parts_md5)
def test_filetypes(self):
self.assertTrue(("mov files", "*.mov") in tool_set.getFileTypes())
self.assertTrue(("zipped masks", "*.tgz") in tool_set.getMaskFileTypes())
def test_zip(self):
import os
filename = self.locateFile('tests/zips/raw.zip')
self.addFileToRemove(os.path.join(os.path.dirname(filename), 'raw.png'))
img = tool_set.openImage(filename,tool_set.getMilliSecondsAndFrameCount('2'),preserveSnapshot=True)
self.assertEqual((5796, 3870),img.size)
tool_set.condenseZip(filename,keep=1)
self.addFileToRemove(os.path.join(os.path.dirname(filename),'raw_c.zip'))
contents = tool_set.getContentsOfZip(os.path.join(os.path.dirname(filename),'raw_c.zip'))
self.assertTrue('59487443539401a4d83512edaab3c1b2.cr2' in contents)
self.assertTrue('7d1800a38ca7a22021bd94e71b6e0f42.cr2' in contents)
self.assertTrue(len(contents) == 2)
def test_rotate(self):
import cv2
from maskgen import cv2api
img1 = np.zeros((100,100),dtype=np.uint8)
img1[20:50,40:50] = 1
mask = np.ones((100,100),dtype=np.uint8)*255
img1[20:50,40] = 2
img = tool_set.applyRotateToCompositeImage(img1, 90, (50,50))
self.assertTrue(sum(sum(img1-img))>40)
img = tool_set.applyRotateToCompositeImage(img,-90,(50,50))
self.assertTrue(sum(sum(img1-img)) <2)
img = tool_set.applyRotateToComposite(-90, img1, np.zeros((100,100),dtype=np.uint8), img1.shape, local=True)
self.assertTrue(sum(img[40,:]) == sum(img1[:,40]))
self.assertTrue(sum(img[40, :]) == 60)
M = cv2.getRotationMatrix2D((35,45), -90, 1.0)
img = cv2.warpAffine(img1, M, (img.shape[1], img.shape[0]),
flags=cv2api.cv2api_delegate.inter_linear)
mask[abs(img - img1) > 0] = 0
img[10:15,10:15]=3
img3 = tool_set.applyRotateToComposite(90, img, mask, img1.shape, local=True)
self.assertTrue(np.all(img3[10:15,10:15]==3))
img3[10:15, 10:15] = 0
def testCropCompare(self):
import cv2
pre = tool_set.openImageFile(self.locateFile('tests/images/prefill.png')).to_array()
post = pre[10:-10,10:-10]
resized_post = cv2.resize(post, (pre.shape[1],pre.shape[0]))
mask, analysis = tool_set.cropResizeCompare(pre,resized_post, arguments={'crop width':pre.shape[1]-20,'crop height':pre.shape[0]-20})
self.assertEquals((10,10), tool_set.toIntTuple(analysis['location']))
def test_fileMask(self):
pre = tool_set.openImageFile(self.locateFile('tests/images/prefill.png'))
post = tool_set.openImageFile(self.locateFile('tests/images/postfill.png'))
mask,analysis,error = tool_set.createMask(pre,post,invert=False,arguments={'tolerance' : 2500})
withtolerance = sum(sum(mask.image_array))
mask.save(self.locateFile('tests/images/maskfill.png'))
mask, analysis,error = tool_set.createMask(pre, post, invert=False)
withouttolerance = sum(sum(mask.image_array))
mask, analysis ,error= tool_set.createMask(pre, post, invert=False, arguments={'tolerance': 2500,'equalize_colors':True})
mask.save(self.locateFile('tests/images/maskfillt.png'))
withtoleranceandqu = sum(sum(mask.image_array))
self.assertTrue(withouttolerance < withtolerance)
self.assertTrue(withtolerance <= withtoleranceandqu)
def test_map(self):
img1 = np.random.randint(0,255,size=(100,120)).astype('uint8')
mask = np.ones((100,120))
src_pts = [(x, y) for x in xrange(20, 30, 1) for y in xrange(50, 60, 1)]
dst_pts = [(x, y) for x in xrange(55, 65, 1) for y in xrange(15, 25, 1)]
result =tool_set._remap(img1,mask,src_pts,dst_pts)
self.assertTrue(np.all(result[55:65,15:25] == img1[20:30,50:60]))
def test_time_format(self):
t = tool_set.getDurationStringFromMilliseconds(100001.111)
self.assertEqual('00:01:40.001111',t)
def test_timeparse(self):
t, f = tool_set.getMilliSecondsAndFrameCount('00:00:00')
self.assertEqual(1, f)
self.assertEqual(0, t)
t, f = tool_set.getMilliSecondsAndFrameCount('1')
self.assertEqual(1, f)
self.assertEqual(0, t)
self.assertTrue(tool_set.validateTimeString('03:10:10.434'))
t,f = tool_set.getMilliSecondsAndFrameCount('03:10:10.434')
self.assertEqual(0, f)
self.assertEqual(1690434, t)
t, f = tool_set.getMilliSecondsAndFrameCount('03:10:10:23')
self.assertFalse(tool_set.validateTimeString('03:10:10:23'))
self.assertEqual(23,f)
self.assertEqual(1690000, t)
t, f = tool_set.getMilliSecondsAndFrameCount('03:10:10:A', defaultValue=(0,0))
self.assertFalse(tool_set.validateTimeString('03:10:10:A'))
self.assertEqual((0,0), (t,f))
time_manager = tool_set.VidTimeManager(startTimeandFrame=(1000,2),stopTimeandFrame=(1003,4))
time_manager.updateToNow(999)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1000)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1001)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1002)
self.assertFalse(time_manager.isBeforeTime())
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1003)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1004)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1005)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1006)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1007)
self.assertFalse(time_manager.isPastTime())
time_manager.updateToNow(1008)
self.assertTrue(time_manager.isPastTime())
self.assertEqual(9,time_manager.getEndFrame() )
self.assertEqual(4, time_manager.getStartFrame())
time_manager = tool_set.VidTimeManager(startTimeandFrame=(999, 2), stopTimeandFrame=None)
time_manager.updateToNow(999)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1000)
self.assertTrue(time_manager.isBeforeTime())
time_manager.updateToNow(1001)
self.assertFalse(time_manager.isBeforeTime())
self.assertEqual(3, time_manager.getEndFrame())
self.assertEqual(3, time_manager.getStartFrame())
def test_opacity_analysis(self):
# need to redo with generated data.
initialImage = image_wrap.openImageFile(self.locateFile('tests/images/pre_blend.png'))
finalImage = image_wrap.openImageFile(self.locateFile('tests/images/post_blend.png'))
mask = image_wrap.openImageFile(self.locateFile('tests/images/blend_mask.png'))
donorMask = image_wrap.openImageFile(self.locateFile('tests/images/donor_to_blend_mask.png'))
donorImage = image_wrap.openImageFile(self.locateFile('tests/images/donor_to_blend.png'))
result = tool_set.generateOpacityImage(initialImage.to_array(), donorImage.to_array(), finalImage.to_array(), mask.to_array(),
donorMask.to_array(),None)
min = np.min(result)
max = np.max(result)
result = (result - min)/(max-min) * 255.0
def test_gray_writing(self):
import os
import sys
import time
s = time.clock()
writer = tool_set.GrayBlockWriter('test_ts_gw', 29.97002997)
mask_set = list()
for i in range(255):
mask = np.random.randint(255, size=(1090, 1920)).astype('uint8')
mask_set.append(mask)
writer.write(mask, 33.3666666667*i,i+1)
for i in range(300,350):
mask = np.random.randint(255, size=(1090, 1920)).astype('uint8')
mask_set.append(mask)
writer.write(mask, 33.3666666667*i, i + 1)
writer.close()
fn = writer.get_file_name()
reader = tool_set.GrayBlockReader(fn, end_frame=305)
pos = 0
while True:
mask = reader.read()
if mask is None:
break
compare = mask == mask_set[pos]
self.assertEqual(mask.size,sum(sum(compare)))
if pos == 255:
self.assertEqual(301,reader.current_frame()-1)
pos += 1
reader.close()
self.assertEqual(305, pos)
print time.clock()- s
suffix = 'm4v'
if sys.platform.startswith('win'):
suffix = 'avi'
filename = tool_set.convertToVideo(fn)
self.assertEquals('test_ts_gw_mask_0.0.' + suffix, filename)
self.assertTrue(os.path.exists(filename))
size = tool_set.openImage(filename, tool_set.getMilliSecondsAndFrameCount('00:00:01')).size
self.assertTrue(size == (1920,1090))
os.remove(filename)
os.remove(fn)
def test_global_transform_analysis(self):
from maskgen.image_wrap import ImageWrapper
analysis = {}
mask = np.random.randint(0,2,(1000, 1000), dtype=np.uint8)
mask[mask>0] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask,
linktype='image.image',
arguments={}, directory='.')
self.assertEquals('yes', analysis['global'])
mask = np.zeros((1000,1000),dtype=np.uint8)
mask[0:30,0:30] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image', arguments={}, directory='.')
self.assertEquals('no',analysis['global'])
self.assertEquals('small', analysis['change size category'])
mask = np.zeros((1000, 1000), dtype=np.uint8)
mask[0:75, 0:75] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
self.assertEquals('no', analysis['global'])
self.assertEquals('medium', analysis['change size category'])
mask[0:100, 0:100] = 255
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
self.assertEquals('no', analysis['global'])
self.assertEquals('large', analysis['change size category'])
tool_set.globalTransformAnalysis(analysis, ImageWrapper(mask), ImageWrapper(mask), mask=mask, linktype='image.image',
arguments={}, directory='.')
def test_SIFT(self):
from maskgen.image_wrap import ImageWrapper
img1 = ImageWrapper(np.random.randint(0,255,(4000,5000,3),dtype='uint8'))
img2 = ImageWrapper(np.random.randint(0,255,(8000,8000,3),dtype='uint8'))
img2.image_array[1000:2000,1000:2000,:] = img1.image_array[2000:3000,2000:3000,:]
mask1 = ImageWrapper(np.zeros((4000,5000),dtype='uint8'))
mask1.image_array[2000:3000,2000:3000] = 255
mask2 = ImageWrapper(np.zeros((8000, 8000), dtype='uint8'))
mask2.image_array[1000:2000,1000:2000] = 255
features = tool_set.getMatchedSIFeatures(img1, img2, mask1=mask1, mask2=mask2, arguments={'homography max matches': '2000', 'homography': 'RANSAC-4'})
img1 = ImageWrapper(np.random.randint(0, 65535, (4000, 5000, 3), dtype='uint16'))
img2 = ImageWrapper(np.random.randint(0, 65535, (8000, 8000, 3), dtype='uint16'))
img2.image_array[1000:2000, 1000:2000, :] = img1.image_array[2000:3000, 2000:3000, :]
mask1 = ImageWrapper(np.zeros((4000, 5000), dtype='uint8'))
mask1.image_array[2000:3000, 2000:3000] = 255
mask2 = ImageWrapper(np.zeros((8000, 8000), dtype='uint8'))
mask2.image_array[1000:2000, 1000:2000] = 255
features = tool_set.getMatchedSIFeatures(img1, img2, mask1=mask1, mask2=mask2, arguments={'homography max matches': '2000', 'homography': 'RANSAC-4'})
def testSIFCheck(self):
good_transform = {
'c': 3,
'r': 3,
'r0': [0.00081380729604268976, -1.0000367374350523, 449.94975699899271],
'r1': [1.0031702728345473, 0.0016183966076946312, -0.30844081957395447],
'r2': [3.1676664384933143e-06, 9.8915322781393527e-06, 1.0]
}
bad_transform = {
"c": 3,
"r": 3,
"r0": [-3.0764931522976067, 3.2522108810844577, 6167.618028229406],
"r1": [-1.0467579456165736, 1.1073481736839244, 2098.303251843684],
"r2": [-0.0004988685498607748, 0.0005275910530971817, 1.0]
}
self.assertTrue(tool_set.isHomographyOk(tool_set.deserializeMatrix(good_transform),450,450))
self.assertFalse(tool_set.isHomographyOk( tool_set.deserializeMatrix(bad_transform),8000,5320))
def test_time_stamp(self):
v1 = self.locateFile('tests/images/test.png')
v2 = self.locateFile('tests/images/donor_to_blend.png')
v3 = self.locateFile('tests/images/test_time_change.png')
self.assertTrue(len(tool_set.dateTimeStampCompare(v1, v1))==0)
self.assertFalse(len(tool_set.dateTimeStampCompare(v1, v2))==0)
self.assertTrue(len(tool_set.dateTimeStampCompare(v1, v3))==0)
def test_compare(self):
from maskgen import tool_set
wrapper1 = image_wrap.openImageFile(self.locateFile('tests/images/pre_blend.png'))
arr2 = np.copy(wrapper1.image_array)
for x in np.random.randint(1,arr2.shape[0]-1,100):
for y in np.random.randint(1, arr2.shape[1] - 1, 100):
arr2[x,y,1] = arr2[x,y,1] + np.random.randint(-20,20)
arr2[100:200,100:200,2] = arr2[100:200,100:200,2] - 25
wrapper2 = image_wrap.ImageWrapper(arr2)
args = [{'aggregate': 'luminance', 'minimum threshold': 3, "weight": 4},
{'aggregate': 'luminance', 'minimum threshold': 3, "weight": 1},
{'aggregate': 'max'}]
for arg in args:
result = tool_set.mediatedCompare(wrapper1.to_array().astype('int16'),
wrapper2.to_array().astype('int16'),
arguments=arg)
self.assertTrue(np.all(result[0][100:200,100:200] == 255))
result[0][100:200, 100:200] = 0
self.assertTrue(np.all(result[0] == 0))
#image_wrap.ImageWrapper(result[0]).save('/Users/ericrobertson/Downloads/foo_max.png')
if __name__ == '__main__':
unittest.main() | 0.336985 | 0.390883 |
from pyecharts import Bar, Kline, Map, Pie, WordCloud
class ChartFactory:
def __init__(self):
self._func = {}
self._charts = {}
def collect(self, name):
def _inject(func):
self._func[name] = func
return func
return _inject
def create(self, name):
if name in self._func:
chart = self._func[name]()
return chart
else:
raise ValueError('No Chart builder for {}'.format(name))
FACTORY = ChartFactory()
@FACTORY.collect('bar')
def create_simple_bar():
bar = Bar("我的第一个图表", "这里是副标题")
bar.add("服装", ["衬衫", "羊毛衫", "雪纺衫", "裤子", "高跟鞋", "袜子"], [5, 20, 36, 10, 75, 90])
bar.renderer = 'svg'
return bar
@FACTORY.collect('kline')
def create_simple_kline():
v1 = [[2320.26, 2320.26, 2287.3, 2362.94],
[2300, 2291.3, 2288.26, 2308.38],
[2295.35, 2346.5, 2295.35, 2345.92],
[2347.22, 2358.98, 2337.35, 2363.8],
[2360.75, 2382.48, 2347.89, 2383.76],
[2383.43, 2385.42, 2371.23, 2391.82],
[2377.41, 2419.02, 2369.57, 2421.15],
[2425.92, 2428.15, 2417.58, 2440.38],
[2411, 2433.13, 2403.3, 2437.42],
[2432.68, 2334.48, 2427.7, 2441.73],
[2430.69, 2418.53, 2394.22, 2433.89],
[2416.62, 2432.4, 2414.4, 2443.03],
[2441.91, 2421.56, 2418.43, 2444.8],
[2420.26, 2382.91, 2373.53, 2427.07],
[2383.49, 2397.18, 2370.61, 2397.94],
[2378.82, 2325.95, 2309.17, 2378.82],
[2322.94, 2314.16, 2308.76, 2330.88],
[2320.62, 2325.82, 2315.01, 2338.78],
[2313.74, 2293.34, 2289.89, 2340.71],
[2297.77, 2313.22, 2292.03, 2324.63],
[2322.32, 2365.59, 2308.92, 2366.16],
[2364.54, 2359.51, 2330.86, 2369.65],
[2332.08, 2273.4, 2259.25, 2333.54],
[2274.81, 2326.31, 2270.1, 2328.14],
[2333.61, 2347.18, 2321.6, 2351.44],
[2340.44, 2324.29, 2304.27, 2352.02],
[2326.42, 2318.61, 2314.59, 2333.67],
[2314.68, 2310.59, 2296.58, 2320.96],
[2309.16, 2286.6, 2264.83, 2333.29],
[2282.17, 2263.97, 2253.25, 2286.33],
[2255.77, 2270.28, 2253.31, 2276.22]]
kline = Kline("K 线图示例")
kline.add("日K", ["2017/7/{}".format(i + 1) for i in range(31)], v1)
kline.renderer = 'svg'
return kline
@FACTORY.collect('map')
def create_simple_map():
value = [155, 10, 66, 78]
attr = ["福建", "山东", "北京", "上海"]
map1 = Map("全国地图示例", width=1200, height=600)
map1.add("", attr, value, maptype='china')
map1.renderer = 'svg'
return map1
@FACTORY.collect('pie')
def create_simple_pie():
pie = Pie('各类电影中"好片"所占的比例', "数据来着豆瓣", title_pos='center')
pie.add("", ["剧情", ""], [25, 75], center=[10, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, )
pie.add("", ["奇幻", ""], [24, 76], center=[30, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, legend_pos='left')
pie.add("", ["爱情", ""], [14, 86], center=[50, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["惊悚", ""], [11, 89], center=[70, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["冒险", ""], [27, 73], center=[90, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["动作", ""], [15, 85], center=[10, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["喜剧", ""], [54, 46], center=[30, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["科幻", ""], [26, 74], center=[50, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["悬疑", ""], [25, 75], center=[70, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["犯罪", ""], [28, 72], center=[90, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, is_legend_show=True, legend_top="center")
pie.renderer = 'svg'
return pie
@FACTORY.collect('word_cloud')
def create_word_cloud():
name = [
'<NAME>', 'Macys', '<NAME>', '<NAME>', 'Charter Communications',
'<NAME>', 'Planet Fitness', 'Pitch Perfect', 'Express', 'Home', '<NAME>',
'<NAME>', '<NAME>', 'KXAN', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'NCAA baseball tournament', 'Point Break']
value = [
10000, 6181, 4386, 4055, 2467, 2244, 1898, 1484, 1112,
965, 847, 582, 555, 550, 462, 366, 360, 282, 273, 265]
wordcloud = WordCloud(width=800, height=400)
wordcloud.add("", name, value, word_size_range=[20, 100])
return wordcloud | example/demo/demo_data.py |
from pyecharts import Bar, Kline, Map, Pie, WordCloud
class ChartFactory:
def __init__(self):
self._func = {}
self._charts = {}
def collect(self, name):
def _inject(func):
self._func[name] = func
return func
return _inject
def create(self, name):
if name in self._func:
chart = self._func[name]()
return chart
else:
raise ValueError('No Chart builder for {}'.format(name))
FACTORY = ChartFactory()
@FACTORY.collect('bar')
def create_simple_bar():
bar = Bar("我的第一个图表", "这里是副标题")
bar.add("服装", ["衬衫", "羊毛衫", "雪纺衫", "裤子", "高跟鞋", "袜子"], [5, 20, 36, 10, 75, 90])
bar.renderer = 'svg'
return bar
@FACTORY.collect('kline')
def create_simple_kline():
v1 = [[2320.26, 2320.26, 2287.3, 2362.94],
[2300, 2291.3, 2288.26, 2308.38],
[2295.35, 2346.5, 2295.35, 2345.92],
[2347.22, 2358.98, 2337.35, 2363.8],
[2360.75, 2382.48, 2347.89, 2383.76],
[2383.43, 2385.42, 2371.23, 2391.82],
[2377.41, 2419.02, 2369.57, 2421.15],
[2425.92, 2428.15, 2417.58, 2440.38],
[2411, 2433.13, 2403.3, 2437.42],
[2432.68, 2334.48, 2427.7, 2441.73],
[2430.69, 2418.53, 2394.22, 2433.89],
[2416.62, 2432.4, 2414.4, 2443.03],
[2441.91, 2421.56, 2418.43, 2444.8],
[2420.26, 2382.91, 2373.53, 2427.07],
[2383.49, 2397.18, 2370.61, 2397.94],
[2378.82, 2325.95, 2309.17, 2378.82],
[2322.94, 2314.16, 2308.76, 2330.88],
[2320.62, 2325.82, 2315.01, 2338.78],
[2313.74, 2293.34, 2289.89, 2340.71],
[2297.77, 2313.22, 2292.03, 2324.63],
[2322.32, 2365.59, 2308.92, 2366.16],
[2364.54, 2359.51, 2330.86, 2369.65],
[2332.08, 2273.4, 2259.25, 2333.54],
[2274.81, 2326.31, 2270.1, 2328.14],
[2333.61, 2347.18, 2321.6, 2351.44],
[2340.44, 2324.29, 2304.27, 2352.02],
[2326.42, 2318.61, 2314.59, 2333.67],
[2314.68, 2310.59, 2296.58, 2320.96],
[2309.16, 2286.6, 2264.83, 2333.29],
[2282.17, 2263.97, 2253.25, 2286.33],
[2255.77, 2270.28, 2253.31, 2276.22]]
kline = Kline("K 线图示例")
kline.add("日K", ["2017/7/{}".format(i + 1) for i in range(31)], v1)
kline.renderer = 'svg'
return kline
@FACTORY.collect('map')
def create_simple_map():
value = [155, 10, 66, 78]
attr = ["福建", "山东", "北京", "上海"]
map1 = Map("全国地图示例", width=1200, height=600)
map1.add("", attr, value, maptype='china')
map1.renderer = 'svg'
return map1
@FACTORY.collect('pie')
def create_simple_pie():
pie = Pie('各类电影中"好片"所占的比例', "数据来着豆瓣", title_pos='center')
pie.add("", ["剧情", ""], [25, 75], center=[10, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, )
pie.add("", ["奇幻", ""], [24, 76], center=[30, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, legend_pos='left')
pie.add("", ["爱情", ""], [14, 86], center=[50, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["惊悚", ""], [11, 89], center=[70, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["冒险", ""], [27, 73], center=[90, 30], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["动作", ""], [15, 85], center=[10, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["喜剧", ""], [54, 46], center=[30, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["科幻", ""], [26, 74], center=[50, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["悬疑", ""], [25, 75], center=[70, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None)
pie.add("", ["犯罪", ""], [28, 72], center=[90, 70], radius=[18, 24],
label_pos='center', is_label_show=True, label_text_color=None, is_legend_show=True, legend_top="center")
pie.renderer = 'svg'
return pie
@FACTORY.collect('word_cloud')
def create_word_cloud():
name = [
'<NAME>', 'Macys', '<NAME>', '<NAME>', 'Charter Communications',
'<NAME>', 'Planet Fitness', 'Pitch Perfect', 'Express', 'Home', '<NAME>',
'<NAME>', '<NAME>', 'KXAN', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'NCAA baseball tournament', 'Point Break']
value = [
10000, 6181, 4386, 4055, 2467, 2244, 1898, 1484, 1112,
965, 847, 582, 555, 550, 462, 366, 360, 282, 273, 265]
wordcloud = WordCloud(width=800, height=400)
wordcloud.add("", name, value, word_size_range=[20, 100])
return wordcloud | 0.488771 | 0.389024 |
from django.views.generic import RedirectView, FormView
from django.contrib.auth import authenticate, login
from django.shortcuts import Http404, redirect
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.contrib.auth.models import User
from registration.models import RegistrationProfile
from registration.backends.default.views import RegistrationView
from account.forms import RegistrationFormNameAndUniqueEmail
from account.forms import UserProfileForm, SetPasswordForm
from account.models import UserProfile
from sabot.views import JobProcessingView
from sponsor.views import id_generator
class GenerateAuthTokenView(JobProcessingView):
next_view = "auth_user_list"
def process_job(self):
try:
user = User.objects.get(pk=self.kwargs["pk"])
except User.DoesNotExist:
raise Http404
try:
up = UserProfile.objects.get(user=user)
except UserProfile.DoesNotExist:
up = UserProfile(user=user)
up.authToken = id_generator(24)
up.save()
return True
class TokenLoginView(RedirectView):
permanent = False
def get_redirect_url(self, **kwargs):
user = authenticate(token = kwargs["token"])
if user is not None:
if user.is_active:
login(self.request, user)
return self.request.GET.get("next","/")
raise Http404
class UserProfileView(FormView):
template_name = "registration/profile.html"
form_class = UserProfileForm
def get_initial(self):
return {
"firstName" : self.request.user.first_name,
"lastName" : self.request.user.last_name,
"email" : self.request.user.email,
}
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data["firstName"]
user.last_name = form.cleaned_data["lastName"]
user.email = form.cleaned_data["email"]
user.save()
return self.form_invalid(form)
class ActivateAndSetPWView(FormView):
form_class = SetPasswordForm
template_name = "registration/activate_with_pw.html"
invalid_template_name = "registration/activate.html"
def get(self, request, *args, **kwargs):
# check if activation link is ok, otherwise link to invalid
try:
profile = RegistrationProfile.objects.get(activation_key=kwargs["activation_key"])
return super(ActivateAndSetPWView, self).get(request, *args, **kwargs)
except RegistrationProfile.DoesNotExist:
return self.response_class(
request = self.request,
template = self.invalid_template_name,
context = {})
def form_valid(self, form):
try:
profile = RegistrationProfile.objects.get(activation_key=self.kwargs["activation_key"])
profile.user.set_password(form.cleaned_data["<PASSWORD>"])
profile.user.save()
RegistrationProfile.objects.activate_user(self.kwargs["activation_key"])
return redirect(reverse("auth_login"))
except RegistrationProfile.DoesNotExist:
raise Http404 | account/views.py | from django.views.generic import RedirectView, FormView
from django.contrib.auth import authenticate, login
from django.shortcuts import Http404, redirect
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured, PermissionDenied
from django.contrib.auth.models import User
from registration.models import RegistrationProfile
from registration.backends.default.views import RegistrationView
from account.forms import RegistrationFormNameAndUniqueEmail
from account.forms import UserProfileForm, SetPasswordForm
from account.models import UserProfile
from sabot.views import JobProcessingView
from sponsor.views import id_generator
class GenerateAuthTokenView(JobProcessingView):
next_view = "auth_user_list"
def process_job(self):
try:
user = User.objects.get(pk=self.kwargs["pk"])
except User.DoesNotExist:
raise Http404
try:
up = UserProfile.objects.get(user=user)
except UserProfile.DoesNotExist:
up = UserProfile(user=user)
up.authToken = id_generator(24)
up.save()
return True
class TokenLoginView(RedirectView):
permanent = False
def get_redirect_url(self, **kwargs):
user = authenticate(token = kwargs["token"])
if user is not None:
if user.is_active:
login(self.request, user)
return self.request.GET.get("next","/")
raise Http404
class UserProfileView(FormView):
template_name = "registration/profile.html"
form_class = UserProfileForm
def get_initial(self):
return {
"firstName" : self.request.user.first_name,
"lastName" : self.request.user.last_name,
"email" : self.request.user.email,
}
def form_valid(self, form):
user = self.request.user
user.first_name = form.cleaned_data["firstName"]
user.last_name = form.cleaned_data["lastName"]
user.email = form.cleaned_data["email"]
user.save()
return self.form_invalid(form)
class ActivateAndSetPWView(FormView):
form_class = SetPasswordForm
template_name = "registration/activate_with_pw.html"
invalid_template_name = "registration/activate.html"
def get(self, request, *args, **kwargs):
# check if activation link is ok, otherwise link to invalid
try:
profile = RegistrationProfile.objects.get(activation_key=kwargs["activation_key"])
return super(ActivateAndSetPWView, self).get(request, *args, **kwargs)
except RegistrationProfile.DoesNotExist:
return self.response_class(
request = self.request,
template = self.invalid_template_name,
context = {})
def form_valid(self, form):
try:
profile = RegistrationProfile.objects.get(activation_key=self.kwargs["activation_key"])
profile.user.set_password(form.cleaned_data["<PASSWORD>"])
profile.user.save()
RegistrationProfile.objects.activate_user(self.kwargs["activation_key"])
return redirect(reverse("auth_login"))
except RegistrationProfile.DoesNotExist:
raise Http404 | 0.307878 | 0.054199 |
import unittest
import numpy as np
from phdTester.functions import DataFrameFunctionsDict
class MyTestCase(unittest.TestCase):
def test_01(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [3, 4])
def test_02(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f2", 3, 5)
self.assertEqual(df.contains_function("f1"), True)
self.assertEqual(df.contains_function("f2"), True)
self.assertEqual(df.contains_function("f3"), False)
self.assertEqual(df.contains_function_point("f1", 4), True)
self.assertEqual(df.contains_function_point("f1", 3), False)
self.assertEqual(df.contains_function_point("f2", 4), False)
self.assertEqual(df.contains_function_point("f2", 3), True)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [3])
self.assertEqual(list(df.get_ordered_xy("f1")), [(4, 2)])
self.assertEqual(list(df.get_ordered_xy("f2")), [(3, 5)])
def test_03(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
df.update_function_point("f2", 4, 2)
df.update_function_point("f2", 5, 2)
df.update_function_point("f2", 6, 2)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 5, 6])
df.remove_function_point("f2", 5)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 6])
df.remove_function_point("f1", 4)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 6])
def test_04(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
data = df.get_statistics("f1")
self.assertEqual(data.count, 3)
self.assertEqual(data.min, 2)
self.assertEqual(data.max, 10)
self.assertEqual(data.mean, (2+5+10)/3)
self.assertEqual(data.median, 5)
self.assertEqual(data.lower_percentile, 3.5)
self.assertEqual(data.upper_percentile, 7.5)
data = df.get_statistics("f2")
self.assertEqual(data.count, 3)
self.assertEqual(data.min, 0)
self.assertEqual(data.max, 15)
self.assertEqual(data.mean, (0 + 5 + 15) / 3)
self.assertEqual(data.median, 5)
self.assertEqual(data.lower_percentile, 2.5)
self.assertEqual(data.upper_percentile, 10)
def test_05(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
statistic_dict = df.get_all_statistics()
self.assertEqual(statistic_dict['f1'].count, 3)
self.assertEqual(statistic_dict['f1'].min, 2)
self.assertEqual(statistic_dict['f1'].max, 10)
self.assertEqual(statistic_dict['f1'].mean, (2 + 5 + 10) / 3)
self.assertEqual(statistic_dict['f1'].median, 5)
self.assertEqual(statistic_dict['f1'].lower_percentile, 3.5)
self.assertEqual(statistic_dict['f1'].upper_percentile, 7.5)
self.assertEqual(statistic_dict['f2'].count, 3)
self.assertEqual(statistic_dict['f2'].min, 0)
self.assertEqual(statistic_dict['f2'].max, 15)
self.assertEqual(statistic_dict['f2'].mean, (0 + 5 + 15) / 3)
self.assertEqual(statistic_dict['f2'].median, 5)
self.assertEqual(statistic_dict['f2'].lower_percentile, 2.5)
self.assertEqual(statistic_dict['f2'].upper_percentile, 10)
def test_06_replace_infinites_with_inplace(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, np.inf)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
expected_df = DataFrameFunctionsDict()
expected_df.update_function_point("f1", 4, 2)
expected_df.update_function_point("f1", 3, 10)
expected_df.update_function_point("f1", 2, 10)
expected_df.update_function_point("f2", 4, 0)
expected_df.update_function_point("f2", 5, 5)
expected_df.update_function_point("f2", 7, 15)
df2 = df.replace_infinites_with(to_value=10, inplace=True)
self.assertEqual(df2, expected_df)
self.assertEqual(df.get_function_y("f1", 3), 10)
def test_07_replace_infinites_without_inplace(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, np.inf)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
expected_df = DataFrameFunctionsDict()
expected_df.update_function_point("f1", 4, 2)
expected_df.update_function_point("f1", 3, 10)
expected_df.update_function_point("f1", 2, 10)
expected_df.update_function_point("f2", 4, 0)
expected_df.update_function_point("f2", 5, 5)
expected_df.update_function_point("f2", 7, 15)
df2 = df.replace_infinites_with(to_value=10, inplace=False)
self.assertEqual(df2, expected_df)
self.assertEqual(df.get_function_y("f1", 3), np.inf)
self.assertEqual(df2.get_function_y("f1", 3), 10)
if __name__ == '__main__':
unittest.main() | PhdTester/phdTester/tests/test_dataframe_function_dict.py | import unittest
import numpy as np
from phdTester.functions import DataFrameFunctionsDict
class MyTestCase(unittest.TestCase):
def test_01(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [3, 4])
def test_02(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f2", 3, 5)
self.assertEqual(df.contains_function("f1"), True)
self.assertEqual(df.contains_function("f2"), True)
self.assertEqual(df.contains_function("f3"), False)
self.assertEqual(df.contains_function_point("f1", 4), True)
self.assertEqual(df.contains_function_point("f1", 3), False)
self.assertEqual(df.contains_function_point("f2", 4), False)
self.assertEqual(df.contains_function_point("f2", 3), True)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [3])
self.assertEqual(list(df.get_ordered_xy("f1")), [(4, 2)])
self.assertEqual(list(df.get_ordered_xy("f2")), [(3, 5)])
def test_03(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
df.update_function_point("f2", 4, 2)
df.update_function_point("f2", 5, 2)
df.update_function_point("f2", 6, 2)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 5, 6])
df.remove_function_point("f2", 5)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3, 4])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 6])
df.remove_function_point("f1", 4)
self.assertEqual(list(df.get_ordered_x_axis("f1")), [2, 3])
self.assertEqual(list(df.get_ordered_x_axis("f2")), [4, 6])
def test_04(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
data = df.get_statistics("f1")
self.assertEqual(data.count, 3)
self.assertEqual(data.min, 2)
self.assertEqual(data.max, 10)
self.assertEqual(data.mean, (2+5+10)/3)
self.assertEqual(data.median, 5)
self.assertEqual(data.lower_percentile, 3.5)
self.assertEqual(data.upper_percentile, 7.5)
data = df.get_statistics("f2")
self.assertEqual(data.count, 3)
self.assertEqual(data.min, 0)
self.assertEqual(data.max, 15)
self.assertEqual(data.mean, (0 + 5 + 15) / 3)
self.assertEqual(data.median, 5)
self.assertEqual(data.lower_percentile, 2.5)
self.assertEqual(data.upper_percentile, 10)
def test_05(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, 5)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
statistic_dict = df.get_all_statistics()
self.assertEqual(statistic_dict['f1'].count, 3)
self.assertEqual(statistic_dict['f1'].min, 2)
self.assertEqual(statistic_dict['f1'].max, 10)
self.assertEqual(statistic_dict['f1'].mean, (2 + 5 + 10) / 3)
self.assertEqual(statistic_dict['f1'].median, 5)
self.assertEqual(statistic_dict['f1'].lower_percentile, 3.5)
self.assertEqual(statistic_dict['f1'].upper_percentile, 7.5)
self.assertEqual(statistic_dict['f2'].count, 3)
self.assertEqual(statistic_dict['f2'].min, 0)
self.assertEqual(statistic_dict['f2'].max, 15)
self.assertEqual(statistic_dict['f2'].mean, (0 + 5 + 15) / 3)
self.assertEqual(statistic_dict['f2'].median, 5)
self.assertEqual(statistic_dict['f2'].lower_percentile, 2.5)
self.assertEqual(statistic_dict['f2'].upper_percentile, 10)
def test_06_replace_infinites_with_inplace(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, np.inf)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
expected_df = DataFrameFunctionsDict()
expected_df.update_function_point("f1", 4, 2)
expected_df.update_function_point("f1", 3, 10)
expected_df.update_function_point("f1", 2, 10)
expected_df.update_function_point("f2", 4, 0)
expected_df.update_function_point("f2", 5, 5)
expected_df.update_function_point("f2", 7, 15)
df2 = df.replace_infinites_with(to_value=10, inplace=True)
self.assertEqual(df2, expected_df)
self.assertEqual(df.get_function_y("f1", 3), 10)
def test_07_replace_infinites_without_inplace(self):
df = DataFrameFunctionsDict()
df.update_function_point("f1", 4, 2)
df.update_function_point("f1", 3, np.inf)
df.update_function_point("f1", 2, 10)
df.update_function_point("f2", 4, 0)
df.update_function_point("f2", 5, 5)
df.update_function_point("f2", 7, 15)
expected_df = DataFrameFunctionsDict()
expected_df.update_function_point("f1", 4, 2)
expected_df.update_function_point("f1", 3, 10)
expected_df.update_function_point("f1", 2, 10)
expected_df.update_function_point("f2", 4, 0)
expected_df.update_function_point("f2", 5, 5)
expected_df.update_function_point("f2", 7, 15)
df2 = df.replace_infinites_with(to_value=10, inplace=False)
self.assertEqual(df2, expected_df)
self.assertEqual(df.get_function_y("f1", 3), np.inf)
self.assertEqual(df2.get_function_y("f1", 3), 10)
if __name__ == '__main__':
unittest.main() | 0.605566 | 0.601857 |
import datetime
import argparse
import logging
from os import listdir
from os.path import join
from sys import exit
import xml.etree.ElementTree as ET
import wptools
from model import Mention, Entry, LinkedMention
from ner import detect as apply_ner
# create logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(ch)
class MentionDetector:
IGNORED_ENTITY_TYPES = {"ORDINAL", "NUMBER", "DATE", "PERCENT", "MONEY", "DURATION", "CAUSE_OF_DEATH", "SET",
"TIME", "URL", "IDEOLOGY", "CRIMINAL_CHARGE",
"RELIGION", # Ver si no se toma como ORG
"TITLE"} # Title is an special case, because given coreference I should use it
# but for this baseline, it doesn't make sense
def __init__(self, file_name):
self.file_name = file_name
self.doc_id = None
def get_mentions(self):
tree = ET.parse(self.file_name)
doc = tree.getroot()[0]
self.doc_id = doc.find("docId").text
result = []
previous_type = None
for sentence in doc.find("sentences"):
for token in sentence[0]:
entity_type = token.find("NER").text
if entity_type != 'O' and entity_type not in MentionDetector.IGNORED_ENTITY_TYPES:
if entity_type == previous_type:
head_string, end_offset = self.get_head_string_and_offset(token)
result[-1].add(head_string, end_offset)
else:
mention = self.create_mention(token)
result.append(mention)
previous_type = entity_type
return result
def create_mention(self, token):
head_string, end = self.get_head_string_and_offset(token)
begin = token.find("CharacterOffsetBegin").text
entity_type = token.find("NER").text
return Mention(head_string, self.doc_id, begin, end, entity_type)
def get_head_string_and_offset(self, token):
head_string = token.find("word").text
end = str(int(token.find("CharacterOffsetEnd").text) - 1) # StanfordNER offsets is exclusive (adds one extra char at the end)
return head_string, end
def link_mentions(mentions):
result = []
for mention in mentions:
try:
entry = Entry(wptools.page(mention.head_string.lower()).get_query())
except LookupError:
entry = Entry(None)
result.append(LinkedMention(mention, entry))
return result
def get_run_id():
now = datetime.datetime.now()
return now.strftime("%Y%m%d-%H%M%S")
def export_linked_mentions(file_name, linked_mentions):
run_id = get_run_id()
with open(file_name + ".tab", "w+") as f:
for linked_mention in linked_mentions:
line = "{}\t{}\n".format(run_id, str(linked_mention))
logger.debug(line)
f.write(line)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Executes NEL Baseline.')
parser.add_argument('-r', '--raw', help='Input raw text file')
parser.add_argument('-x', '--xml', help='Input NER xml file')
parser.add_argument('-xd', '--xmldir', help='Input NER xml directory')
args = parser.parse_args()
if args.raw is None and args.xml is None and args.xmldir is None:
parser.print_usage()
exit(0)
ner_files = []
if args.raw is not None:
logger.info("Applying NER on file {}".format(args.raw))
ner_files.append(apply_ner(args.raw))
if args.xml is not None:
ner_files.append(args.xml)
if args.xmldir is not None:
ner_files.extend([join(args.xmldir, filename) for filename in listdir(args.xmldir)])
logger.info("Detecting mentions from XML Files")
mentions = []
for ner_file in ner_files:
md = MentionDetector(ner_file)
mentions.extend(md.get_mentions())
logger.info("Linking mentions to wikipedia articles")
linked_mentions = link_mentions(mentions)
logger.info("Exporting mentions to tab file")
export_linked_mentions("res-"+get_run_id(), linked_mentions) | main.py | import datetime
import argparse
import logging
from os import listdir
from os.path import join
from sys import exit
import xml.etree.ElementTree as ET
import wptools
from model import Mention, Entry, LinkedMention
from ner import detect as apply_ner
# create logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
logger.addHandler(ch)
class MentionDetector:
IGNORED_ENTITY_TYPES = {"ORDINAL", "NUMBER", "DATE", "PERCENT", "MONEY", "DURATION", "CAUSE_OF_DEATH", "SET",
"TIME", "URL", "IDEOLOGY", "CRIMINAL_CHARGE",
"RELIGION", # Ver si no se toma como ORG
"TITLE"} # Title is an special case, because given coreference I should use it
# but for this baseline, it doesn't make sense
def __init__(self, file_name):
self.file_name = file_name
self.doc_id = None
def get_mentions(self):
tree = ET.parse(self.file_name)
doc = tree.getroot()[0]
self.doc_id = doc.find("docId").text
result = []
previous_type = None
for sentence in doc.find("sentences"):
for token in sentence[0]:
entity_type = token.find("NER").text
if entity_type != 'O' and entity_type not in MentionDetector.IGNORED_ENTITY_TYPES:
if entity_type == previous_type:
head_string, end_offset = self.get_head_string_and_offset(token)
result[-1].add(head_string, end_offset)
else:
mention = self.create_mention(token)
result.append(mention)
previous_type = entity_type
return result
def create_mention(self, token):
head_string, end = self.get_head_string_and_offset(token)
begin = token.find("CharacterOffsetBegin").text
entity_type = token.find("NER").text
return Mention(head_string, self.doc_id, begin, end, entity_type)
def get_head_string_and_offset(self, token):
head_string = token.find("word").text
end = str(int(token.find("CharacterOffsetEnd").text) - 1) # StanfordNER offsets is exclusive (adds one extra char at the end)
return head_string, end
def link_mentions(mentions):
result = []
for mention in mentions:
try:
entry = Entry(wptools.page(mention.head_string.lower()).get_query())
except LookupError:
entry = Entry(None)
result.append(LinkedMention(mention, entry))
return result
def get_run_id():
now = datetime.datetime.now()
return now.strftime("%Y%m%d-%H%M%S")
def export_linked_mentions(file_name, linked_mentions):
run_id = get_run_id()
with open(file_name + ".tab", "w+") as f:
for linked_mention in linked_mentions:
line = "{}\t{}\n".format(run_id, str(linked_mention))
logger.debug(line)
f.write(line)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Executes NEL Baseline.')
parser.add_argument('-r', '--raw', help='Input raw text file')
parser.add_argument('-x', '--xml', help='Input NER xml file')
parser.add_argument('-xd', '--xmldir', help='Input NER xml directory')
args = parser.parse_args()
if args.raw is None and args.xml is None and args.xmldir is None:
parser.print_usage()
exit(0)
ner_files = []
if args.raw is not None:
logger.info("Applying NER on file {}".format(args.raw))
ner_files.append(apply_ner(args.raw))
if args.xml is not None:
ner_files.append(args.xml)
if args.xmldir is not None:
ner_files.extend([join(args.xmldir, filename) for filename in listdir(args.xmldir)])
logger.info("Detecting mentions from XML Files")
mentions = []
for ner_file in ner_files:
md = MentionDetector(ner_file)
mentions.extend(md.get_mentions())
logger.info("Linking mentions to wikipedia articles")
linked_mentions = link_mentions(mentions)
logger.info("Exporting mentions to tab file")
export_linked_mentions("res-"+get_run_id(), linked_mentions) | 0.266071 | 0.108756 |
import os
import rclpy
from rclpy.node import Node
from thymio2_interfaces.msg import Thymio2Controller
from thymio2_interfaces.srv import Thymio2ControllerSrv
from thymio2_interfaces.srv import Thymio2MotorSrv
import dbus
import dbus.mainloop.glib
from gi.repository import GObject as gobject
from gi.repository import GLib as glib
class Thymio2ControllerNode(Node): # MODIFY NAME
def __init__(self):
super().__init__("Thymio2ControllerNode")
self.declare_parameter("dbus_config", "session")
# "session" = Use the standard configuration file for the per-login-session message bus
# "system" = Use the standard configuration file for the systemwide message bus.
self.thymio2_status_publisher_ = self.create_publisher(Thymio2Controller, "ThymioControllerPublisher", 10)
self.thymio2_controller_service_ = self.create_service(Thymio2ControllerSrv, "ThymioControllerService", self.callback_thymio2_controller)
self.thymio2_motor_service_ = self.create_service(Thymio2MotorSrv, "Thymio2MotorSrv", self.callback_thymio2_motors)
self.counter_ = 0
self.timer_ = self.create_timer(0.5, self.publish_thymio_status)
self.get_logger().info("Thymio2Controller publisher has been started.")
if self.get_parameter("dbus_config").value == "session":
bus = dbus.SessionBus()
self.get_logger().info("Using DBUS config " + str(self.get_parameter("dbus_config").value))
else:
bus = dbus.SystemBus()
self.get_logger().info("Using DBUS config " + str(self.get_parameter("dbus_config").value))
try:
asebaNetworkObject = bus.get_object('ch.epfl.mobots.Aseba', '/')
self.asebaNetwork = dbus.Interface(asebaNetworkObject, dbus_interface='ch.epfl.mobots.AsebaNetwork')
self.get_logger().info("ASEBA Network Nodes: " + str(self.asebaNetwork.GetNodesList()))
except dbus.exceptions.DBusException:
self.get_logger().info("Can not connect to Aseba DBus services! Is asebamedulla running?")
def callback_thymio2_controller(self, request, response):
if request.data:
self.counter_ = 0
response.success = True
response.log_message = "Counter has been reset"
else:
response.success = False
response.log_message = "Counter has not been reset"
return response
def callback_thymio2_motors(self, request, response):
response.log_message = "OK"
self.get_logger().info("Setting motor.left/right.target to " + str(request.motor_left_target) + "/" +str(request.motor_right_target))
try:
self.asebaNetwork.SetVariable("thymio-II", "motor.left.target", [int(request.motor_left_target)])
self.asebaNetwork.SetVariable("thymio-II", "motor.right.target", [int(request.motor_right_target)])
except dbus.exceptions.DBusException:
self.get_logger().error("DBUS Exception: Could not set motor targets")
response.log_message = "ERROR"
return response
def publish_thymio_status(self):
msg = Thymio2Controller()
msg.acc = self.asebaNetwork.GetVariable("thymio-II", "acc")
msg.prox_horizontal = self.asebaNetwork.GetVariable("thymio-II", "prox.horizontal")
msg.prox_ground_ambiant = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.ambiant")
msg.prox_ground_delta = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.delta")
msg.prox_ground_reflected = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.reflected")
msg.sd_present = bool(self.asebaNetwork.GetVariable("thymio-II", "sd.present"))
msg.debug_message = str(self.asebaNetwork.GetNodesList())
self.thymio2_status_publisher_.publish(msg)
# initialize asebamedulla in background and wait 0.3s to let
# asebamedulla startup
#os.system("(asebamedulla ser:name=Thymio-II &) && sleep 0.3")
#show running asebamedulla daemons, run ps afux | grep asebamedulla
#stop asebamedulla, run pkill -n asebamedull
def main(args=None):
rclpy.init(args=args)
node = Thymio2ControllerNode() # MODIFY NAME
rclpy.spin(node)
rclpy.shutdown()
if __name__ == "__main__":
main() | ros2_ws/src/thymio2_ros2_bridge_py_pkg/thymio2_ros2_bridge_py_pkg/thymio2_controler_node.py | import os
import rclpy
from rclpy.node import Node
from thymio2_interfaces.msg import Thymio2Controller
from thymio2_interfaces.srv import Thymio2ControllerSrv
from thymio2_interfaces.srv import Thymio2MotorSrv
import dbus
import dbus.mainloop.glib
from gi.repository import GObject as gobject
from gi.repository import GLib as glib
class Thymio2ControllerNode(Node): # MODIFY NAME
def __init__(self):
super().__init__("Thymio2ControllerNode")
self.declare_parameter("dbus_config", "session")
# "session" = Use the standard configuration file for the per-login-session message bus
# "system" = Use the standard configuration file for the systemwide message bus.
self.thymio2_status_publisher_ = self.create_publisher(Thymio2Controller, "ThymioControllerPublisher", 10)
self.thymio2_controller_service_ = self.create_service(Thymio2ControllerSrv, "ThymioControllerService", self.callback_thymio2_controller)
self.thymio2_motor_service_ = self.create_service(Thymio2MotorSrv, "Thymio2MotorSrv", self.callback_thymio2_motors)
self.counter_ = 0
self.timer_ = self.create_timer(0.5, self.publish_thymio_status)
self.get_logger().info("Thymio2Controller publisher has been started.")
if self.get_parameter("dbus_config").value == "session":
bus = dbus.SessionBus()
self.get_logger().info("Using DBUS config " + str(self.get_parameter("dbus_config").value))
else:
bus = dbus.SystemBus()
self.get_logger().info("Using DBUS config " + str(self.get_parameter("dbus_config").value))
try:
asebaNetworkObject = bus.get_object('ch.epfl.mobots.Aseba', '/')
self.asebaNetwork = dbus.Interface(asebaNetworkObject, dbus_interface='ch.epfl.mobots.AsebaNetwork')
self.get_logger().info("ASEBA Network Nodes: " + str(self.asebaNetwork.GetNodesList()))
except dbus.exceptions.DBusException:
self.get_logger().info("Can not connect to Aseba DBus services! Is asebamedulla running?")
def callback_thymio2_controller(self, request, response):
if request.data:
self.counter_ = 0
response.success = True
response.log_message = "Counter has been reset"
else:
response.success = False
response.log_message = "Counter has not been reset"
return response
def callback_thymio2_motors(self, request, response):
response.log_message = "OK"
self.get_logger().info("Setting motor.left/right.target to " + str(request.motor_left_target) + "/" +str(request.motor_right_target))
try:
self.asebaNetwork.SetVariable("thymio-II", "motor.left.target", [int(request.motor_left_target)])
self.asebaNetwork.SetVariable("thymio-II", "motor.right.target", [int(request.motor_right_target)])
except dbus.exceptions.DBusException:
self.get_logger().error("DBUS Exception: Could not set motor targets")
response.log_message = "ERROR"
return response
def publish_thymio_status(self):
msg = Thymio2Controller()
msg.acc = self.asebaNetwork.GetVariable("thymio-II", "acc")
msg.prox_horizontal = self.asebaNetwork.GetVariable("thymio-II", "prox.horizontal")
msg.prox_ground_ambiant = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.ambiant")
msg.prox_ground_delta = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.delta")
msg.prox_ground_reflected = self.asebaNetwork.GetVariable("thymio-II", "prox.ground.reflected")
msg.sd_present = bool(self.asebaNetwork.GetVariable("thymio-II", "sd.present"))
msg.debug_message = str(self.asebaNetwork.GetNodesList())
self.thymio2_status_publisher_.publish(msg)
# initialize asebamedulla in background and wait 0.3s to let
# asebamedulla startup
#os.system("(asebamedulla ser:name=Thymio-II &) && sleep 0.3")
#show running asebamedulla daemons, run ps afux | grep asebamedulla
#stop asebamedulla, run pkill -n asebamedull
def main(args=None):
rclpy.init(args=args)
node = Thymio2ControllerNode() # MODIFY NAME
rclpy.spin(node)
rclpy.shutdown()
if __name__ == "__main__":
main() | 0.305386 | 0.126434 |
import os
import re
import json
import time
import subprocess
import threading
from datetime import datetime
import psutil
import requests
TEST_SERVER_HOSTS = ['192.168.40.215', '192.168.40.91']
TEST_SERVER_PORT = 8999
TEST_REQ_TMPL = 'http://%(host)s:%(port)d/test'
APP_SERVER_IP = '192.168.3.235'
APP_SERVER_PATH_TMPL = 'http://%(ip)s:%(port)d/hello'
TARGET_REQUEST = {
'path_tmpl': '',
'headers': {
},
'params' : {
}
}
SECONDS = 10
CONCURRENTS = [400, 600, 800, 1000, 1600]
PROCESSES_LST = [1, 4, 8, 16, 32]
HEADERS = {'Content-type': 'application/json', 'Accept': 'text/plain'}
REGEXPS = {
'availability(%)' : r'^Availability.*\b(\d+\.\d+)\b.*',
'transaction-rate(trans/sec)': r'^Transaction rate.*\b(\d+\.\d+)\b.*'
}
SUMMARY = {
'INFO': {
'TAG' : 'None',
'SECONDS': SECONDS,
'CONCURRENTS': CONCURRENTS,
'PROCESSES_LST': PROCESSES_LST,
'TEST_SERVER_HOSTS': TEST_SERVER_HOSTS,
'APP_SERVER_IP' : APP_SERVER_IP
},
'tests': [
{
'app': 'test_http.go',
'cmd_tmpl': './webapps/test_http.bin -port=%(port)d -size=%(processes)d 2>/dev/null 1>/dev/null',
'port' : 9001,
'results': []
},
{
'app': 'test_martini.go',
'cmd_tmpl': './webapps/test_martini.bin -port=%(port)d -size=%(processes)d 2>/dev/null 1>/dev/null',
'port': 9002,
'results': []
},
{
'app': 'test_tornado.py',
'port': 8001,
'cmd_tmpl': './webapps/test_tornado.py --port=%(port)d --processes=%(processes)d 2>/dev/null 1>/dev/null',
'results': []
},
{
'app': 'test_webpy_gevent.py',
'port': 8002,
'cmd_tmpl': 'cd webapps && gunicorn -k gevent -w %(processes)d -b 0.0.0.0:%(port)d test_webpy_gevent:wsgiapp 2>/dev/null 1>/dev/null',
'results': []
}
]
}
time_now = lambda: datetime.now().strftime("%m-%d_%H:%M:%S")
results_lock = threading.Lock()
def kill_proc_tree(pid, including_parent=True):
parent = psutil.Process(pid)
for child in parent.children(recursive=True):
try:
child.kill()
except psutil.NoSuchProcess:
pass
if including_parent:
try:
parent.kill()
except psutil.NoSuchProcess:
pass
def ping(url):
status = False
req = None
try:
req = requests.get(url, verify=False, timeout=2)
except Exception as e:
print 'Ping failed:', url, e
time.sleep(30)
if req and req.status_code == 200:
status = True
return status
def extract_test(data):
output = data['output']
result = {
'output': output
}
for line in output.split('\n'):
for name, regexp in REGEXPS.iteritems():
m = re.match(regexp, line)
if m:
match_result = m.groups()[0]
result[name] = float(match_result)
break
return result
def test_request(results, url, data, timeout):
retry = 3
resp_data = None
while retry > 0:
try:
req = requests.post(url, headers=HEADERS, data=json.dumps(data), timeout=timeout)
resp_data = req.json()
retry = 0 # !!!
except requests.Timeout as e:
print (3-retry), e
retry -= 1
if resp_data:
result = extract_test(resp_data)
results_lock.acquire()
results.append(result)
results_lock.release()
def merge_test(datas):
if len(datas) == 0: return None
result = {}
outputs = []
keys = []
for key in REGEXPS.keys():
keys.append(key)
# result[key] = []
result[key + '_TOTAL'] = 0
for data in datas:
outputs.append(data['output'])
for key in keys:
if key not in data: continue
# result[key].append(data[key])
result[key + '_TOTAL'] = result[key + '_TOTAL'] + data[key]
result['output'] = '\n\n'.join(outputs)
return result
def do_test(app_url, concurrent, seconds=20):
data = {
'url': app_url,
'concurrent': concurrent,
'seconds': seconds,
}
timeout = seconds + 10
results = []
threads = []
for host in TEST_SERVER_HOSTS:
port = TEST_SERVER_PORT
test_req_url = TEST_REQ_TMPL % locals()
t = threading.Thread(target=test_request, args=(results, test_req_url, data, timeout))
t.start()
threads.append(t)
[t.join() for t in threads]
return merge_test(results)
def gen_server_results(cmd_tmpl, port, app_url):
for processes in PROCESSES_LST:
cmd = cmd_tmpl % locals()
print 'Server:', cmd
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
time.sleep(0.5)
if not ping(app_url):
yield {
'processes': processes,
'concurrent': -1,
'output': 'PingError'
}
kill_proc_tree(p.pid)
continue
for concurrent in CONCURRENTS:
result = do_test(app_url, concurrent, seconds=SECONDS)
result['processes'] = processes
result['concurrent'] = concurrent * len(TEST_SERVER_HOSTS)
yield result
kill_proc_tree(p.pid)
time.sleep(3)
def main():
def cmp_res(a, b):
c1, c2 = a['concurrent'], b['concurrent']
if c1 > c2: return 1
if c1 < c2: return -1
p1, p2 = a['processes'], b['processes']
if p1 > p2: return 1
if p1 <= p2: return -1
for info in SUMMARY['tests']:
cmd_tmpl = info['cmd_tmpl']
port = info['port']
ip = APP_SERVER_IP
app_url = APP_SERVER_PATH_TMPL % locals()
results = info['results']
print 'Section:', info['app'], app_url
print time_now()
print '=================='
for result in gen_server_results(cmd_tmpl, port, app_url):
print 'section: {0}, processes: {1}, concurrent: {2}'.format(info['app'], result['processes'], result['concurrent'])
output = result.pop('output')
print '--------------------'
print output
print '--------------------'
print time_now(), info['app']
print '----------------------------------------\n'
results.append(result)
results.sort(cmp=cmp_res)
print '======================================================\n\n'
with open(os.path.join('results', '{0}_summary.json'.format(time_now())), 'w') as f:
f.write(json.dumps(SUMMARY, indent=4))
if __name__ == '__main__':
main() | gateway/gateway-perfmance-tests/TestAll.py |
import os
import re
import json
import time
import subprocess
import threading
from datetime import datetime
import psutil
import requests
TEST_SERVER_HOSTS = ['192.168.40.215', '192.168.40.91']
TEST_SERVER_PORT = 8999
TEST_REQ_TMPL = 'http://%(host)s:%(port)d/test'
APP_SERVER_IP = '192.168.3.235'
APP_SERVER_PATH_TMPL = 'http://%(ip)s:%(port)d/hello'
TARGET_REQUEST = {
'path_tmpl': '',
'headers': {
},
'params' : {
}
}
SECONDS = 10
CONCURRENTS = [400, 600, 800, 1000, 1600]
PROCESSES_LST = [1, 4, 8, 16, 32]
HEADERS = {'Content-type': 'application/json', 'Accept': 'text/plain'}
REGEXPS = {
'availability(%)' : r'^Availability.*\b(\d+\.\d+)\b.*',
'transaction-rate(trans/sec)': r'^Transaction rate.*\b(\d+\.\d+)\b.*'
}
SUMMARY = {
'INFO': {
'TAG' : 'None',
'SECONDS': SECONDS,
'CONCURRENTS': CONCURRENTS,
'PROCESSES_LST': PROCESSES_LST,
'TEST_SERVER_HOSTS': TEST_SERVER_HOSTS,
'APP_SERVER_IP' : APP_SERVER_IP
},
'tests': [
{
'app': 'test_http.go',
'cmd_tmpl': './webapps/test_http.bin -port=%(port)d -size=%(processes)d 2>/dev/null 1>/dev/null',
'port' : 9001,
'results': []
},
{
'app': 'test_martini.go',
'cmd_tmpl': './webapps/test_martini.bin -port=%(port)d -size=%(processes)d 2>/dev/null 1>/dev/null',
'port': 9002,
'results': []
},
{
'app': 'test_tornado.py',
'port': 8001,
'cmd_tmpl': './webapps/test_tornado.py --port=%(port)d --processes=%(processes)d 2>/dev/null 1>/dev/null',
'results': []
},
{
'app': 'test_webpy_gevent.py',
'port': 8002,
'cmd_tmpl': 'cd webapps && gunicorn -k gevent -w %(processes)d -b 0.0.0.0:%(port)d test_webpy_gevent:wsgiapp 2>/dev/null 1>/dev/null',
'results': []
}
]
}
time_now = lambda: datetime.now().strftime("%m-%d_%H:%M:%S")
results_lock = threading.Lock()
def kill_proc_tree(pid, including_parent=True):
parent = psutil.Process(pid)
for child in parent.children(recursive=True):
try:
child.kill()
except psutil.NoSuchProcess:
pass
if including_parent:
try:
parent.kill()
except psutil.NoSuchProcess:
pass
def ping(url):
status = False
req = None
try:
req = requests.get(url, verify=False, timeout=2)
except Exception as e:
print 'Ping failed:', url, e
time.sleep(30)
if req and req.status_code == 200:
status = True
return status
def extract_test(data):
output = data['output']
result = {
'output': output
}
for line in output.split('\n'):
for name, regexp in REGEXPS.iteritems():
m = re.match(regexp, line)
if m:
match_result = m.groups()[0]
result[name] = float(match_result)
break
return result
def test_request(results, url, data, timeout):
retry = 3
resp_data = None
while retry > 0:
try:
req = requests.post(url, headers=HEADERS, data=json.dumps(data), timeout=timeout)
resp_data = req.json()
retry = 0 # !!!
except requests.Timeout as e:
print (3-retry), e
retry -= 1
if resp_data:
result = extract_test(resp_data)
results_lock.acquire()
results.append(result)
results_lock.release()
def merge_test(datas):
if len(datas) == 0: return None
result = {}
outputs = []
keys = []
for key in REGEXPS.keys():
keys.append(key)
# result[key] = []
result[key + '_TOTAL'] = 0
for data in datas:
outputs.append(data['output'])
for key in keys:
if key not in data: continue
# result[key].append(data[key])
result[key + '_TOTAL'] = result[key + '_TOTAL'] + data[key]
result['output'] = '\n\n'.join(outputs)
return result
def do_test(app_url, concurrent, seconds=20):
data = {
'url': app_url,
'concurrent': concurrent,
'seconds': seconds,
}
timeout = seconds + 10
results = []
threads = []
for host in TEST_SERVER_HOSTS:
port = TEST_SERVER_PORT
test_req_url = TEST_REQ_TMPL % locals()
t = threading.Thread(target=test_request, args=(results, test_req_url, data, timeout))
t.start()
threads.append(t)
[t.join() for t in threads]
return merge_test(results)
def gen_server_results(cmd_tmpl, port, app_url):
for processes in PROCESSES_LST:
cmd = cmd_tmpl % locals()
print 'Server:', cmd
p = subprocess.Popen(cmd, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
time.sleep(0.5)
if not ping(app_url):
yield {
'processes': processes,
'concurrent': -1,
'output': 'PingError'
}
kill_proc_tree(p.pid)
continue
for concurrent in CONCURRENTS:
result = do_test(app_url, concurrent, seconds=SECONDS)
result['processes'] = processes
result['concurrent'] = concurrent * len(TEST_SERVER_HOSTS)
yield result
kill_proc_tree(p.pid)
time.sleep(3)
def main():
def cmp_res(a, b):
c1, c2 = a['concurrent'], b['concurrent']
if c1 > c2: return 1
if c1 < c2: return -1
p1, p2 = a['processes'], b['processes']
if p1 > p2: return 1
if p1 <= p2: return -1
for info in SUMMARY['tests']:
cmd_tmpl = info['cmd_tmpl']
port = info['port']
ip = APP_SERVER_IP
app_url = APP_SERVER_PATH_TMPL % locals()
results = info['results']
print 'Section:', info['app'], app_url
print time_now()
print '=================='
for result in gen_server_results(cmd_tmpl, port, app_url):
print 'section: {0}, processes: {1}, concurrent: {2}'.format(info['app'], result['processes'], result['concurrent'])
output = result.pop('output')
print '--------------------'
print output
print '--------------------'
print time_now(), info['app']
print '----------------------------------------\n'
results.append(result)
results.sort(cmp=cmp_res)
print '======================================================\n\n'
with open(os.path.join('results', '{0}_summary.json'.format(time_now())), 'w') as f:
f.write(json.dumps(SUMMARY, indent=4))
if __name__ == '__main__':
main() | 0.175009 | 0.123049 |
import warnings
import nibabel as nb
import numpy as np
from nilearn.image import resample_img
PATH_SCHAEFER = (
"data/parcellations/Schaefer2018_1000Parcels_7Networks_order_FSLMNI152_2mm.nii.gz"
)
PATH_TIAN = "data/parcellations/Tian_Subcortex_S4_3T_2009cAsym.nii.gz"
def combine_atlas(img1, img2):
"""Combine two atlases in the same space."""
max_val = np.max(img1.dataobj)
relabe_2 = (img2.dataobj + max_val) * (img2.dataobj > 0).astype(int)
label_combined = img1.dataobj + relabe_2
n_max_label = sum(len(np.unique(img.dataobj)) - 1 for img in [img1, img2])
overlap = (label_combined > n_max_label).astype(int)
mask = (label_combined > 0).astype(int) - overlap
if np.sum(overlap) == 0:
return (
nb.Nifti1Image(
label_combined,
affine=img1.affine,
header=img1.header,
),
None,
)
warnings.warn(f"Input images contain {np.sum(overlap)} overlapping voxels.")
overlap = nb.Nifti1Image(
overlap,
affine=img1.affine,
header=img1.header,
)
combined = nb.Nifti1Image(
label_combined * mask,
affine=img1.affine,
header=img1.header,
)
return combined, overlap
if __name__ == "__main__":
schaefer = nb.load(PATH_SCHAEFER)
tian_resampled = resample_img(
PATH_TIAN,
target_affine=schaefer.affine,
target_shape=schaefer.shape,
interpolation="nearest",
)
combined, overlap = combine_atlas(schaefer, tian_resampled)
combined.to_filename("data/parcellations/SchaeferTian_combined_MNI152_2mm.nii.gz")
if overlap:
overlap.to_filename("data/parcellations/SchaeferTian_overlap.nii.gz")
def test_combine_atlas():
schaefer = nb.load(PATH_SCHAEFER)
tian_resampled = resample_img(
PATH_TIAN,
target_affine=schaefer.affine,
target_shape=schaefer.shape,
interpolation="nearest",
)
combined, overlap = combine_atlas(schaefer, tian_resampled)
assert np.max(combined.dataobj) == 1054
assert np.sum(overlap.dataobj) == 168 | bin/nifti_atlas.py | import warnings
import nibabel as nb
import numpy as np
from nilearn.image import resample_img
PATH_SCHAEFER = (
"data/parcellations/Schaefer2018_1000Parcels_7Networks_order_FSLMNI152_2mm.nii.gz"
)
PATH_TIAN = "data/parcellations/Tian_Subcortex_S4_3T_2009cAsym.nii.gz"
def combine_atlas(img1, img2):
"""Combine two atlases in the same space."""
max_val = np.max(img1.dataobj)
relabe_2 = (img2.dataobj + max_val) * (img2.dataobj > 0).astype(int)
label_combined = img1.dataobj + relabe_2
n_max_label = sum(len(np.unique(img.dataobj)) - 1 for img in [img1, img2])
overlap = (label_combined > n_max_label).astype(int)
mask = (label_combined > 0).astype(int) - overlap
if np.sum(overlap) == 0:
return (
nb.Nifti1Image(
label_combined,
affine=img1.affine,
header=img1.header,
),
None,
)
warnings.warn(f"Input images contain {np.sum(overlap)} overlapping voxels.")
overlap = nb.Nifti1Image(
overlap,
affine=img1.affine,
header=img1.header,
)
combined = nb.Nifti1Image(
label_combined * mask,
affine=img1.affine,
header=img1.header,
)
return combined, overlap
if __name__ == "__main__":
schaefer = nb.load(PATH_SCHAEFER)
tian_resampled = resample_img(
PATH_TIAN,
target_affine=schaefer.affine,
target_shape=schaefer.shape,
interpolation="nearest",
)
combined, overlap = combine_atlas(schaefer, tian_resampled)
combined.to_filename("data/parcellations/SchaeferTian_combined_MNI152_2mm.nii.gz")
if overlap:
overlap.to_filename("data/parcellations/SchaeferTian_overlap.nii.gz")
def test_combine_atlas():
schaefer = nb.load(PATH_SCHAEFER)
tian_resampled = resample_img(
PATH_TIAN,
target_affine=schaefer.affine,
target_shape=schaefer.shape,
interpolation="nearest",
)
combined, overlap = combine_atlas(schaefer, tian_resampled)
assert np.max(combined.dataobj) == 1054
assert np.sum(overlap.dataobj) == 168 | 0.632843 | 0.32314 |
import six
if six.PY3:
import unittest
else:
import unittest2 as unittest
import collections
from depsolver.compat \
import \
OrderedDict, sorted_with_cmp
from depsolver.package \
import \
PackageInfo
from depsolver.pool \
import \
Pool
from depsolver.repository \
import \
Repository
from depsolver.solver.policy \
import \
DefaultPolicy
P = PackageInfo.from_string
class TestDefaultPolicy(unittest.TestCase):
def setUp(self):
self.mkl_10_3_0 = P("mkl-10.3.0")
self.mkl_11_0_0 = P("mkl-11.0.0")
def test_simple(self):
"""Ensure the policy returns the highest version across a set of
packages with the same name."""
pool = Pool([Repository([self.mkl_10_3_0, self.mkl_11_0_0])])
policy = DefaultPolicy()
queue = [self.mkl_10_3_0.id, self.mkl_11_0_0.id]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
def test_simple_fulfilled_installed(self):
"""Ensure the policy returns the installed version first if it fulfills
the requirement, even if higher versions are available."""
mkl_10_4_0 = P("mkl-10.4.0")
remote_repository = Repository([self.mkl_10_3_0, self.mkl_11_0_0])
installed_repository = Repository([mkl_10_4_0])
pool = Pool([installed_repository, remote_repository])
policy = DefaultPolicy()
queue = [p.id for p in [mkl_10_4_0, self.mkl_10_3_0, self.mkl_11_0_0]]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
candidates = policy.prefered_package_ids(pool, {mkl_10_4_0.id: True}, queue)
self.assertEqual(list(candidates), [mkl_10_4_0.id, self.mkl_11_0_0.id])
def test_simple_fulfilled_installed(self):
"""Ensure the policy returns the installed version first if it fulfills
the requirement, even if higher versions are available."""
mkl_10_4_0 = P("mkl-10.4.0")
remote_repository = Repository([self.mkl_10_3_0, self.mkl_11_0_0], "remote")
installed_repository = Repository([mkl_10_4_0], "installed")
pool = Pool([installed_repository, remote_repository])
pool.set_repository_order("installed", "remote")
policy = DefaultPolicy()
queue = [p.id for p in [mkl_10_4_0, self.mkl_10_3_0, self.mkl_11_0_0]]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
candidates = policy.select_preferred_packages(pool, {mkl_10_4_0.id: True}, queue)
self.assertEqual(list(candidates), [mkl_10_4_0.id, self.mkl_11_0_0.id])
def test_cmp_by_priority_prefer_installed_same_repository_simple(self):
"""
Check packages from a same repository are sorted by their id.
"""
numpy_1_6_0 = P("numpy-1.6.0")
numpy_1_6_1 = P("numpy-1.6.1")
numpy_1_7_0 = P("numpy-1.7.0")
remote_repository = Repository([numpy_1_7_0, numpy_1_6_1, numpy_1_6_0], "remote")
r_sorted_packages = [numpy_1_7_0, numpy_1_6_1, numpy_1_6_0]
pool = Pool([remote_repository])
policy = DefaultPolicy()
queue = [numpy_1_7_0, numpy_1_6_0, numpy_1_6_1]
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
def test_cmp_by_priority_prefer_installed_multi_repositories(self):
"""
Check packages from multiple repositories are sorted accordingt to
repository priority.
"""
numpy_1_6_0 = P("numpy-1.6.0")
numpy_1_6_1 = P("numpy-1.6.1")
numpy_1_7_0 = P("numpy-1.7.0")
i_numpy_1_6_0 = P("numpy-1.6.0")
remote_repository = Repository([numpy_1_7_0, numpy_1_6_1, numpy_1_6_0], "remote")
installed_repository = Repository([i_numpy_1_6_0], "installed")
r_sorted_packages = [i_numpy_1_6_0, numpy_1_7_0, numpy_1_6_1, numpy_1_6_0]
pool = Pool([installed_repository, remote_repository])
pool.set_repository_order("installed", "remote")
policy = DefaultPolicy()
queue = [numpy_1_7_0, i_numpy_1_6_0, numpy_1_6_0, numpy_1_6_1]
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
def test_cmp_by_priority_prefer_installed_replace(self):
"""
Check replaced packages take priority over replacing ones then they
come from the same repository.
"""
def _assert_sort_by_priority(package, r_sorted_packages):
remote_repository = Repository(packages)
pool = Pool([remote_repository])
policy = DefaultPolicy()
# We reverse the list to ensure queue is not originally in the
# final order
queue = reversed(packages)
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
scikits_0_12_0 = P("scikits_learn-0.12.0")
sklearn_0_13_0 = P("sklearn-0.13.0")
packages = [sklearn_0_13_0, scikits_0_12_0]
r_sorted_packages = [sklearn_0_13_0, scikits_0_12_0]
_assert_sort_by_priority(packages, r_sorted_packages)
scikits_0_12_0 = P("scikits_learn-0.12.0")
sklearn_0_13_0 = P("sklearn-0.13.0; replaces (scikits_learn < 0.13.0)")
packages = [sklearn_0_13_0, scikits_0_12_0]
r_sorted_packages = [scikits_0_12_0, sklearn_0_13_0]
_assert_sort_by_priority(packages, r_sorted_packages)
class TestSelectPreferredPackages(unittest.TestCase):
def setUp(self):
self.numpy_1_6_0 = P("numpy-1.6.0")
self.numpy_1_6_1 = P("numpy-1.6.1")
self.numpy_1_7_1 = P("numpy-1.7.1")
self.nomkl_numpy_1_6_0 = P("nomkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.nomkl_numpy_1_6_1 = P("nomkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.nomkl_numpy_1_7_1 = P("nomkl_numpy-1.7.1; replaces (numpy==1.7.1)")
self.mkl_numpy_1_6_1 = P("mkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.mkl_numpy_1_6_0 = P("mkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.mkl_numpy_1_7_1 = P("mkl_numpy-1.7.1; replaces (numpy==1.7.1)")
def test_simple(self):
"""Test we select the most recent version across a list of same
packages with same name."""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
selected_ids = policy.select_preferred_packages(pool, {}, [p.id for p in packages])
r_selected_ids = [self.numpy_1_7_1.id]
self.assertEqual(r_selected_ids, selected_ids)
def test_multiple_providers(self):
"""
Test we select the most recent version across a list of different
packages providing the same package.
"""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1,
self.nomkl_numpy_1_6_0,
self.nomkl_numpy_1_6_1,
self.nomkl_numpy_1_7_1,
self.mkl_numpy_1_6_0,
self.mkl_numpy_1_6_1,
self.mkl_numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
selected_ids = policy.select_preferred_packages(pool, {},
[p.id for p in packages])
r_selected_ids = [self.numpy_1_7_1.id, self.nomkl_numpy_1_7_1.id, self.mkl_numpy_1_7_1.id]
self.assertEqual(r_selected_ids, selected_ids)
class TestComputePreferredPackages(unittest.TestCase):
def setUp(self):
self.mkl_10_3_0 = P("mkl-10.3.0")
self.mkl_10_4_0 = P("mkl-10.4.0")
self.numpy_1_6_0 = P("numpy-1.6.0")
self.numpy_1_6_1 = P("numpy-1.6.1")
self.numpy_1_7_1 = P("numpy-1.7.1")
self.nomkl_numpy_1_6_0 = P("nomkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.nomkl_numpy_1_6_1 = P("nomkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.nomkl_numpy_1_7_1 = P("nomkl_numpy-1.7.1; replaces (numpy==1.7.1)")
self.mkl_numpy_1_6_1 = P("mkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.mkl_numpy_1_6_0 = P("mkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.mkl_numpy_1_7_1 = P("mkl_numpy-1.7.1; replaces (numpy==1.7.1)")
def test_multiple_providers(self):
"""Test package queues with different packages providing the same
package."""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1,
self.nomkl_numpy_1_6_0,
self.nomkl_numpy_1_6_1,
self.nomkl_numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
package_queues = policy._compute_prefered_packages_installed_first(
pool, {}, [p.id for p in packages])
r_package_queues = OrderedDict()
r_package_queues[six.u("numpy")] = collections.deque([
self.numpy_1_6_0.id, self.numpy_1_6_1.id, self.numpy_1_7_1.id])
r_package_queues[six.u("nomkl_numpy")] = collections.deque([
self.nomkl_numpy_1_6_0.id, self.nomkl_numpy_1_6_1.id,
self.nomkl_numpy_1_7_1.id])
self.assertEqual(r_package_queues, package_queues)
def test_installed_first(self):
"""
Test installed version come first, even before higher version.
"""
packages = [self.mkl_10_3_0,
self.mkl_10_4_0]
installed_packages = [P("mkl-10.4.0")]
repository = Repository(packages)
installed_repository = Repository(installed_packages)
pool = Pool([repository, installed_repository])
policy = DefaultPolicy()
installed_map = OrderedDict()
for p in installed_packages:
installed_map[p.id] = p
package_queues = policy._compute_prefered_packages_installed_first(
pool, installed_map, [p.id for p in packages + installed_packages])
r_package_queues = OrderedDict()
r_package_queues[six.u("mkl")] = collections.deque(p.id for p in installed_packages + packages)
self.assertEqual(r_package_queues, package_queues) | depsolver/solver/tests/test_policy.py | import six
if six.PY3:
import unittest
else:
import unittest2 as unittest
import collections
from depsolver.compat \
import \
OrderedDict, sorted_with_cmp
from depsolver.package \
import \
PackageInfo
from depsolver.pool \
import \
Pool
from depsolver.repository \
import \
Repository
from depsolver.solver.policy \
import \
DefaultPolicy
P = PackageInfo.from_string
class TestDefaultPolicy(unittest.TestCase):
def setUp(self):
self.mkl_10_3_0 = P("mkl-10.3.0")
self.mkl_11_0_0 = P("mkl-11.0.0")
def test_simple(self):
"""Ensure the policy returns the highest version across a set of
packages with the same name."""
pool = Pool([Repository([self.mkl_10_3_0, self.mkl_11_0_0])])
policy = DefaultPolicy()
queue = [self.mkl_10_3_0.id, self.mkl_11_0_0.id]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
def test_simple_fulfilled_installed(self):
"""Ensure the policy returns the installed version first if it fulfills
the requirement, even if higher versions are available."""
mkl_10_4_0 = P("mkl-10.4.0")
remote_repository = Repository([self.mkl_10_3_0, self.mkl_11_0_0])
installed_repository = Repository([mkl_10_4_0])
pool = Pool([installed_repository, remote_repository])
policy = DefaultPolicy()
queue = [p.id for p in [mkl_10_4_0, self.mkl_10_3_0, self.mkl_11_0_0]]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
candidates = policy.prefered_package_ids(pool, {mkl_10_4_0.id: True}, queue)
self.assertEqual(list(candidates), [mkl_10_4_0.id, self.mkl_11_0_0.id])
def test_simple_fulfilled_installed(self):
"""Ensure the policy returns the installed version first if it fulfills
the requirement, even if higher versions are available."""
mkl_10_4_0 = P("mkl-10.4.0")
remote_repository = Repository([self.mkl_10_3_0, self.mkl_11_0_0], "remote")
installed_repository = Repository([mkl_10_4_0], "installed")
pool = Pool([installed_repository, remote_repository])
pool.set_repository_order("installed", "remote")
policy = DefaultPolicy()
queue = [p.id for p in [mkl_10_4_0, self.mkl_10_3_0, self.mkl_11_0_0]]
candidates = policy.select_preferred_packages(pool, {}, queue)
self.assertEqual(list(candidates), [self.mkl_11_0_0.id])
candidates = policy.select_preferred_packages(pool, {mkl_10_4_0.id: True}, queue)
self.assertEqual(list(candidates), [mkl_10_4_0.id, self.mkl_11_0_0.id])
def test_cmp_by_priority_prefer_installed_same_repository_simple(self):
"""
Check packages from a same repository are sorted by their id.
"""
numpy_1_6_0 = P("numpy-1.6.0")
numpy_1_6_1 = P("numpy-1.6.1")
numpy_1_7_0 = P("numpy-1.7.0")
remote_repository = Repository([numpy_1_7_0, numpy_1_6_1, numpy_1_6_0], "remote")
r_sorted_packages = [numpy_1_7_0, numpy_1_6_1, numpy_1_6_0]
pool = Pool([remote_repository])
policy = DefaultPolicy()
queue = [numpy_1_7_0, numpy_1_6_0, numpy_1_6_1]
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
def test_cmp_by_priority_prefer_installed_multi_repositories(self):
"""
Check packages from multiple repositories are sorted accordingt to
repository priority.
"""
numpy_1_6_0 = P("numpy-1.6.0")
numpy_1_6_1 = P("numpy-1.6.1")
numpy_1_7_0 = P("numpy-1.7.0")
i_numpy_1_6_0 = P("numpy-1.6.0")
remote_repository = Repository([numpy_1_7_0, numpy_1_6_1, numpy_1_6_0], "remote")
installed_repository = Repository([i_numpy_1_6_0], "installed")
r_sorted_packages = [i_numpy_1_6_0, numpy_1_7_0, numpy_1_6_1, numpy_1_6_0]
pool = Pool([installed_repository, remote_repository])
pool.set_repository_order("installed", "remote")
policy = DefaultPolicy()
queue = [numpy_1_7_0, i_numpy_1_6_0, numpy_1_6_0, numpy_1_6_1]
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
def test_cmp_by_priority_prefer_installed_replace(self):
"""
Check replaced packages take priority over replacing ones then they
come from the same repository.
"""
def _assert_sort_by_priority(package, r_sorted_packages):
remote_repository = Repository(packages)
pool = Pool([remote_repository])
policy = DefaultPolicy()
# We reverse the list to ensure queue is not originally in the
# final order
queue = reversed(packages)
def _cmp(a, b):
return policy.cmp_by_priority_prefer_installed(pool, {}, a, b)
self.assertEqual(r_sorted_packages, sorted_with_cmp(queue, cmp=_cmp))
scikits_0_12_0 = P("scikits_learn-0.12.0")
sklearn_0_13_0 = P("sklearn-0.13.0")
packages = [sklearn_0_13_0, scikits_0_12_0]
r_sorted_packages = [sklearn_0_13_0, scikits_0_12_0]
_assert_sort_by_priority(packages, r_sorted_packages)
scikits_0_12_0 = P("scikits_learn-0.12.0")
sklearn_0_13_0 = P("sklearn-0.13.0; replaces (scikits_learn < 0.13.0)")
packages = [sklearn_0_13_0, scikits_0_12_0]
r_sorted_packages = [scikits_0_12_0, sklearn_0_13_0]
_assert_sort_by_priority(packages, r_sorted_packages)
class TestSelectPreferredPackages(unittest.TestCase):
def setUp(self):
self.numpy_1_6_0 = P("numpy-1.6.0")
self.numpy_1_6_1 = P("numpy-1.6.1")
self.numpy_1_7_1 = P("numpy-1.7.1")
self.nomkl_numpy_1_6_0 = P("nomkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.nomkl_numpy_1_6_1 = P("nomkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.nomkl_numpy_1_7_1 = P("nomkl_numpy-1.7.1; replaces (numpy==1.7.1)")
self.mkl_numpy_1_6_1 = P("mkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.mkl_numpy_1_6_0 = P("mkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.mkl_numpy_1_7_1 = P("mkl_numpy-1.7.1; replaces (numpy==1.7.1)")
def test_simple(self):
"""Test we select the most recent version across a list of same
packages with same name."""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
selected_ids = policy.select_preferred_packages(pool, {}, [p.id for p in packages])
r_selected_ids = [self.numpy_1_7_1.id]
self.assertEqual(r_selected_ids, selected_ids)
def test_multiple_providers(self):
"""
Test we select the most recent version across a list of different
packages providing the same package.
"""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1,
self.nomkl_numpy_1_6_0,
self.nomkl_numpy_1_6_1,
self.nomkl_numpy_1_7_1,
self.mkl_numpy_1_6_0,
self.mkl_numpy_1_6_1,
self.mkl_numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
selected_ids = policy.select_preferred_packages(pool, {},
[p.id for p in packages])
r_selected_ids = [self.numpy_1_7_1.id, self.nomkl_numpy_1_7_1.id, self.mkl_numpy_1_7_1.id]
self.assertEqual(r_selected_ids, selected_ids)
class TestComputePreferredPackages(unittest.TestCase):
def setUp(self):
self.mkl_10_3_0 = P("mkl-10.3.0")
self.mkl_10_4_0 = P("mkl-10.4.0")
self.numpy_1_6_0 = P("numpy-1.6.0")
self.numpy_1_6_1 = P("numpy-1.6.1")
self.numpy_1_7_1 = P("numpy-1.7.1")
self.nomkl_numpy_1_6_0 = P("nomkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.nomkl_numpy_1_6_1 = P("nomkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.nomkl_numpy_1_7_1 = P("nomkl_numpy-1.7.1; replaces (numpy==1.7.1)")
self.mkl_numpy_1_6_1 = P("mkl_numpy-1.6.0; replaces (numpy==1.6.0)")
self.mkl_numpy_1_6_0 = P("mkl_numpy-1.6.1; replaces (numpy==1.6.1)")
self.mkl_numpy_1_7_1 = P("mkl_numpy-1.7.1; replaces (numpy==1.7.1)")
def test_multiple_providers(self):
"""Test package queues with different packages providing the same
package."""
packages = [self.numpy_1_6_0, self.numpy_1_6_1, self.numpy_1_7_1,
self.nomkl_numpy_1_6_0,
self.nomkl_numpy_1_6_1,
self.nomkl_numpy_1_7_1]
repository = Repository(packages)
pool = Pool([repository])
policy = DefaultPolicy()
package_queues = policy._compute_prefered_packages_installed_first(
pool, {}, [p.id for p in packages])
r_package_queues = OrderedDict()
r_package_queues[six.u("numpy")] = collections.deque([
self.numpy_1_6_0.id, self.numpy_1_6_1.id, self.numpy_1_7_1.id])
r_package_queues[six.u("nomkl_numpy")] = collections.deque([
self.nomkl_numpy_1_6_0.id, self.nomkl_numpy_1_6_1.id,
self.nomkl_numpy_1_7_1.id])
self.assertEqual(r_package_queues, package_queues)
def test_installed_first(self):
"""
Test installed version come first, even before higher version.
"""
packages = [self.mkl_10_3_0,
self.mkl_10_4_0]
installed_packages = [P("mkl-10.4.0")]
repository = Repository(packages)
installed_repository = Repository(installed_packages)
pool = Pool([repository, installed_repository])
policy = DefaultPolicy()
installed_map = OrderedDict()
for p in installed_packages:
installed_map[p.id] = p
package_queues = policy._compute_prefered_packages_installed_first(
pool, installed_map, [p.id for p in packages + installed_packages])
r_package_queues = OrderedDict()
r_package_queues[six.u("mkl")] = collections.deque(p.id for p in installed_packages + packages)
self.assertEqual(r_package_queues, package_queues) | 0.672332 | 0.254214 |
import unicodedata
import argparse
import re
from typing import Any, Callable, List, Union, TypeVar
from re import Pattern
CATEGORIES = {
"Cc": "Other, control",
"Cf": "Other, format",
"Cn": "Other, not assigned",
"Co": "Other, private use",
"Cs": "Other, surrogate",
"Ll": "Letter, lowercase",
"Lm": "Letter, modifier",
"Lo": "Letter, other",
"Lt": "Letter, titlecase",
"Lu": "Letter, uppercase",
"Mc": "Mark, spacing combining",
"Me": "Mark, enclosing",
"Mn": "Mark, nonspacing",
"Nd": "Number, decimal digit",
"Nl": "Number, letter",
"No": "Number, other",
"Pc": "Punctuation, connector",
"Pd": "Punctuation, dash",
"Pe": "Punctuation, close",
"Pf": "Punctuation, final quote",
"Pi": "Punctuation, initial quote",
"Po": "Punctuation, other",
"Ps": "Punctuation, open",
"Sc": "Symbol, currency",
"Sk": "Symbol, modifier",
"Sm": "Symbol, math",
"So": "Symbol, other",
"Zl": "Separator, line",
"Zp": "Separator, paragraph",
"Zs": "Separator, space",
}
COMBINING_CLASSES = {
"0": "Not Reordered",
"1": "Overlay",
"10": "CCC10",
"103": "CCC103",
"107": "CCC107",
"11": "CCC11",
"118": "CCC118",
"12": "CCC12",
"122": "CCC122",
"129": "CCC129",
"13": "CCC13",
"130": "CCC130",
"132": "CCC132",
"14": "CCC14",
"15": "CCC15",
"16": "CCC16",
"17": "CCC17",
"18": "CCC18",
"19": "CCC19",
"20": "CCC20",
"202": "Attached Below",
"21": "CCC21",
"214": "Attached Above",
"216": "Attached Above Right",
"218": "Below Left",
"22": "CCC22",
"220": "Below",
"222": "Below Right",
"224": "Left",
"226": "Right",
"228": "Above Left",
"23": "CCC23",
"230": "Above",
"232": "Above Right",
"233": "Double Below",
"234": "Double Above",
"24": "CCC24",
"240": "Iota Subscript",
"25": "CCC25",
"26": "CCC26",
"27": "CCC27",
"28": "CCC28",
"29": "CCC29",
"30": "CCC30",
"31": "CCC31",
"32": "CCC32",
"33": "CCC33",
"34": "CCC34",
"35": "CCC35",
"36": "CCC36",
"6": "6",
"7": "Nukta",
"8": "Kana Voicing",
"84": "CCC84",
"9": "Virama",
"91": "CCC91",
}
T = TypeVar("T", str, bytes)
def regex_arg(regex: Pattern, type_: Callable[[str], Any] = str) -> Callable[[str], str]:
def inner(arg: str) -> str:
match = regex.match(arg)
if not match:
raise argparse.ArgumentTypeError
try:
return type_(match.group(1))
except:
return type_(match.group(0))
return inner
def _bool(string: Union[str, int]) -> bool:
return string == "ON" or string == 1
def _split(string: T, length: int) -> List[T]:
return [string[i : i + length] for i in range(0, len(string), length)]
class CharacterInfo:
__slots__ = (
"char",
"name",
"combining",
"category",
"is_bidirectional",
"is_mirrored",
"html_entity",
"html_entity_alt",
"css_entity",
"utf_8_entity",
"utf_8_encoding",
"utf_16_encoding",
"utf_32_encoding",
"binary_code",
"decimal_code",
"octal_code",
"hex_code",
)
def __init__(self, char: str):
# General Data
self.char = char
self.name = unicodedata.name(char)
self.combining = _bool(unicodedata.combining(char))
self.category = CATEGORIES[unicodedata.category(char)]
self.is_bidirectional = _bool(unicodedata.bidirectional(char))
self.is_mirrored = _bool(unicodedata.mirrored(char))
# Entities
self.html_entity = f"&#{ord(char)};"
self.html_entity_alt = f"&#{hex(ord(char)).lstrip('0')};"
self.css_entity = f"\\{hex(ord(char)).lstrip('0x').zfill(4)}"
self.utf_8_entity = f"\\u{hex(ord(char)).lstrip('0x').zfill(4)}"
# Encodings
self.utf_8_encoding = f"{' '.join('0x'+ x.zfill(2) for x in _split(hex(ord(char)).lstrip('0x'), 2))}"
self.utf_16_encoding = f"{' '.join('0x'+ x.zfill(4) for x in _split(hex(ord(char)).lstrip('0x'), 4))}"
self.utf_32_encoding = f"{'0x'+hex(ord(char)).lstrip('0x').zfill(8)}"
# Character Codes
self.binary_code = bin(ord(char))[2:]
self.decimal_code = ord(char)
self.octal_code = oct(ord(char)).lstrip("0o")
self.hex_code = hex(ord(char))
argumentparser = argparse.ArgumentParser(description="Print unicode character info")
argumentparser.add_argument("characters", nargs="?", help="The character(s) to print info for", default=[])
argumentparser.add_argument(
"-n",
"--name",
help="The name of the character to print info for (can be used multiple times)",
action="append",
default=[],
)
argumentparser.add_argument(
"-u",
"--unicode",
help="The unicode of the character to print info for (can be used multiple times)",
action="append",
default=[],
type=regex_arg(re.compile(r"(?:\\u|U\+||&#x)?(?P<code>[0-9A-Fa-f]{4})", re.I)),
)
argumentparser.add_argument(
"-d",
"--decimal",
help="The decimal value of the unicode codepoint of the character to print info for (can be used multiple times)",
action="append",
default=[],
type=regex_arg(re.compile(r"[0-9]{1,5}"), type_=int),
)
args = argumentparser.parse_args()
for i in args.name:
args.characters.append(unicodedata.lookup(i))
for i in args.unicode:
args.characters.append(chr(int(i, 16)))
for i in args.decimal:
args.characters.append(chr(i)) | charinfo/core.py | import unicodedata
import argparse
import re
from typing import Any, Callable, List, Union, TypeVar
from re import Pattern
CATEGORIES = {
"Cc": "Other, control",
"Cf": "Other, format",
"Cn": "Other, not assigned",
"Co": "Other, private use",
"Cs": "Other, surrogate",
"Ll": "Letter, lowercase",
"Lm": "Letter, modifier",
"Lo": "Letter, other",
"Lt": "Letter, titlecase",
"Lu": "Letter, uppercase",
"Mc": "Mark, spacing combining",
"Me": "Mark, enclosing",
"Mn": "Mark, nonspacing",
"Nd": "Number, decimal digit",
"Nl": "Number, letter",
"No": "Number, other",
"Pc": "Punctuation, connector",
"Pd": "Punctuation, dash",
"Pe": "Punctuation, close",
"Pf": "Punctuation, final quote",
"Pi": "Punctuation, initial quote",
"Po": "Punctuation, other",
"Ps": "Punctuation, open",
"Sc": "Symbol, currency",
"Sk": "Symbol, modifier",
"Sm": "Symbol, math",
"So": "Symbol, other",
"Zl": "Separator, line",
"Zp": "Separator, paragraph",
"Zs": "Separator, space",
}
COMBINING_CLASSES = {
"0": "Not Reordered",
"1": "Overlay",
"10": "CCC10",
"103": "CCC103",
"107": "CCC107",
"11": "CCC11",
"118": "CCC118",
"12": "CCC12",
"122": "CCC122",
"129": "CCC129",
"13": "CCC13",
"130": "CCC130",
"132": "CCC132",
"14": "CCC14",
"15": "CCC15",
"16": "CCC16",
"17": "CCC17",
"18": "CCC18",
"19": "CCC19",
"20": "CCC20",
"202": "Attached Below",
"21": "CCC21",
"214": "Attached Above",
"216": "Attached Above Right",
"218": "Below Left",
"22": "CCC22",
"220": "Below",
"222": "Below Right",
"224": "Left",
"226": "Right",
"228": "Above Left",
"23": "CCC23",
"230": "Above",
"232": "Above Right",
"233": "Double Below",
"234": "Double Above",
"24": "CCC24",
"240": "Iota Subscript",
"25": "CCC25",
"26": "CCC26",
"27": "CCC27",
"28": "CCC28",
"29": "CCC29",
"30": "CCC30",
"31": "CCC31",
"32": "CCC32",
"33": "CCC33",
"34": "CCC34",
"35": "CCC35",
"36": "CCC36",
"6": "6",
"7": "Nukta",
"8": "Kana Voicing",
"84": "CCC84",
"9": "Virama",
"91": "CCC91",
}
T = TypeVar("T", str, bytes)
def regex_arg(regex: Pattern, type_: Callable[[str], Any] = str) -> Callable[[str], str]:
def inner(arg: str) -> str:
match = regex.match(arg)
if not match:
raise argparse.ArgumentTypeError
try:
return type_(match.group(1))
except:
return type_(match.group(0))
return inner
def _bool(string: Union[str, int]) -> bool:
return string == "ON" or string == 1
def _split(string: T, length: int) -> List[T]:
return [string[i : i + length] for i in range(0, len(string), length)]
class CharacterInfo:
__slots__ = (
"char",
"name",
"combining",
"category",
"is_bidirectional",
"is_mirrored",
"html_entity",
"html_entity_alt",
"css_entity",
"utf_8_entity",
"utf_8_encoding",
"utf_16_encoding",
"utf_32_encoding",
"binary_code",
"decimal_code",
"octal_code",
"hex_code",
)
def __init__(self, char: str):
# General Data
self.char = char
self.name = unicodedata.name(char)
self.combining = _bool(unicodedata.combining(char))
self.category = CATEGORIES[unicodedata.category(char)]
self.is_bidirectional = _bool(unicodedata.bidirectional(char))
self.is_mirrored = _bool(unicodedata.mirrored(char))
# Entities
self.html_entity = f"&#{ord(char)};"
self.html_entity_alt = f"&#{hex(ord(char)).lstrip('0')};"
self.css_entity = f"\\{hex(ord(char)).lstrip('0x').zfill(4)}"
self.utf_8_entity = f"\\u{hex(ord(char)).lstrip('0x').zfill(4)}"
# Encodings
self.utf_8_encoding = f"{' '.join('0x'+ x.zfill(2) for x in _split(hex(ord(char)).lstrip('0x'), 2))}"
self.utf_16_encoding = f"{' '.join('0x'+ x.zfill(4) for x in _split(hex(ord(char)).lstrip('0x'), 4))}"
self.utf_32_encoding = f"{'0x'+hex(ord(char)).lstrip('0x').zfill(8)}"
# Character Codes
self.binary_code = bin(ord(char))[2:]
self.decimal_code = ord(char)
self.octal_code = oct(ord(char)).lstrip("0o")
self.hex_code = hex(ord(char))
argumentparser = argparse.ArgumentParser(description="Print unicode character info")
argumentparser.add_argument("characters", nargs="?", help="The character(s) to print info for", default=[])
argumentparser.add_argument(
"-n",
"--name",
help="The name of the character to print info for (can be used multiple times)",
action="append",
default=[],
)
argumentparser.add_argument(
"-u",
"--unicode",
help="The unicode of the character to print info for (can be used multiple times)",
action="append",
default=[],
type=regex_arg(re.compile(r"(?:\\u|U\+||&#x)?(?P<code>[0-9A-Fa-f]{4})", re.I)),
)
argumentparser.add_argument(
"-d",
"--decimal",
help="The decimal value of the unicode codepoint of the character to print info for (can be used multiple times)",
action="append",
default=[],
type=regex_arg(re.compile(r"[0-9]{1,5}"), type_=int),
)
args = argumentparser.parse_args()
for i in args.name:
args.characters.append(unicodedata.lookup(i))
for i in args.unicode:
args.characters.append(chr(int(i, 16)))
for i in args.decimal:
args.characters.append(chr(i)) | 0.67662 | 0.380644 |
import sys
import pandas as pd
import nltk
nltk.download(['punkt', 'wordnet'])
from sqlalchemy import create_engine
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from sklearn.multioutput import MultiOutputClassifier
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.feature_extraction.text import TfidfTransformer, CountVectorizer
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.metrics import classification_report
from sklearn.ensemble import RandomForestClassifier
import pickle
def load_data(database_filepath):
"""
input:
database_filepath: loading data from the given location
output:
returning X,y and categories of y
"""
#load data from db
engine = create_engine("sqlite:///{}".format(database_filepath))
df = pd.read_sql_table("disaster_message_cat",con=engine)
#create X and y
X = df["message"]
y = df.drop(["message","genre","id","original"],axis=1)
category_names=y.columns
return X,y,category_names
def tokenize(text):
"""
input:
text that will be tokenized
output:
clean tokens of the given text
"""
#create tokens
tokens = word_tokenize(text)
#create lemmatizer
lemmatizer = WordNetLemmatizer()
#clean tokens
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
def build_model():
"""
setting up the model (pipeline and gridsearch for optimizing parameters)
output: model
"""
pipeline = Pipeline([
('vect', CountVectorizer(tokenizer=tokenize)),
('tfidf', TfidfTransformer()),
('clf',MultiOutputClassifier(RandomForestClassifier()))
])
#provide a set of parameters for gridsearch
parameters = {
'clf__estimator__min_samples_split':[2,4,6],
'vect__ngram_range': ((1, 1), (1, 2)),
'tfidf__use_idf': (True, False)
}
cv = GridSearchCV(pipeline,param_grid=parameters, n_jobs=-1)
return cv
def evaluate_model(model, X_test, Y_test, category_names):
"""
printing results
input:
model: model that is being evaluated
X_test: tokens of test-dataset
Y_test: results of test-dataset
category_names: categories/columns that results will be provided for
"""
y_pred=model.predict(X_test)
y_pred_df=pd.DataFrame(data=y_pred,columns=Y_test.columns)
for col in Y_test.columns:
print(col,classification_report(y_true=Y_test[col],y_pred=y_pred_df[col]))
def save_model(model, model_filepath):
"""
input:
model: model to be saved
model_filepath: path to save to
"""
filename = model_filepath
pickle.dump(model, open(filename, 'wb'))
def main():
if len(sys.argv) == 3:
database_filepath, model_filepath = sys.argv[1:]
print('Loading data...\n DATABASE: {}'.format(database_filepath))
X, Y, category_names = load_data(database_filepath)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
print('Building model...')
model = build_model()
print('Training model...')
model.fit(X_train, Y_train)
print('Evaluating model...')
evaluate_model(model, X_test, Y_test, category_names)
print('Saving model...\n MODEL: {}'.format(model_filepath))
save_model(model, model_filepath)
print('Trained model saved!')
else:
print('Please provide the filepath of the disaster messages database '\
'as the first argument and the filepath of the pickle file to '\
'save the model to as the second argument. \n\nExample: python '\
'train_classifier.py ../data/DisasterResponse.db classifier.pkl')
if __name__ == '__main__':
main() | models/train_classifier.py | import sys
import pandas as pd
import nltk
nltk.download(['punkt', 'wordnet'])
from sqlalchemy import create_engine
from nltk.tokenize import word_tokenize
from nltk.stem import WordNetLemmatizer
from sklearn.multioutput import MultiOutputClassifier
from sklearn.pipeline import Pipeline, FeatureUnion
from sklearn.feature_extraction.text import TfidfTransformer, CountVectorizer
from sklearn.neighbors import KNeighborsClassifier
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.metrics import classification_report
from sklearn.ensemble import RandomForestClassifier
import pickle
def load_data(database_filepath):
"""
input:
database_filepath: loading data from the given location
output:
returning X,y and categories of y
"""
#load data from db
engine = create_engine("sqlite:///{}".format(database_filepath))
df = pd.read_sql_table("disaster_message_cat",con=engine)
#create X and y
X = df["message"]
y = df.drop(["message","genre","id","original"],axis=1)
category_names=y.columns
return X,y,category_names
def tokenize(text):
"""
input:
text that will be tokenized
output:
clean tokens of the given text
"""
#create tokens
tokens = word_tokenize(text)
#create lemmatizer
lemmatizer = WordNetLemmatizer()
#clean tokens
clean_tokens = []
for tok in tokens:
clean_tok = lemmatizer.lemmatize(tok).lower().strip()
clean_tokens.append(clean_tok)
return clean_tokens
def build_model():
"""
setting up the model (pipeline and gridsearch for optimizing parameters)
output: model
"""
pipeline = Pipeline([
('vect', CountVectorizer(tokenizer=tokenize)),
('tfidf', TfidfTransformer()),
('clf',MultiOutputClassifier(RandomForestClassifier()))
])
#provide a set of parameters for gridsearch
parameters = {
'clf__estimator__min_samples_split':[2,4,6],
'vect__ngram_range': ((1, 1), (1, 2)),
'tfidf__use_idf': (True, False)
}
cv = GridSearchCV(pipeline,param_grid=parameters, n_jobs=-1)
return cv
def evaluate_model(model, X_test, Y_test, category_names):
"""
printing results
input:
model: model that is being evaluated
X_test: tokens of test-dataset
Y_test: results of test-dataset
category_names: categories/columns that results will be provided for
"""
y_pred=model.predict(X_test)
y_pred_df=pd.DataFrame(data=y_pred,columns=Y_test.columns)
for col in Y_test.columns:
print(col,classification_report(y_true=Y_test[col],y_pred=y_pred_df[col]))
def save_model(model, model_filepath):
"""
input:
model: model to be saved
model_filepath: path to save to
"""
filename = model_filepath
pickle.dump(model, open(filename, 'wb'))
def main():
if len(sys.argv) == 3:
database_filepath, model_filepath = sys.argv[1:]
print('Loading data...\n DATABASE: {}'.format(database_filepath))
X, Y, category_names = load_data(database_filepath)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.2)
print('Building model...')
model = build_model()
print('Training model...')
model.fit(X_train, Y_train)
print('Evaluating model...')
evaluate_model(model, X_test, Y_test, category_names)
print('Saving model...\n MODEL: {}'.format(model_filepath))
save_model(model, model_filepath)
print('Trained model saved!')
else:
print('Please provide the filepath of the disaster messages database '\
'as the first argument and the filepath of the pickle file to '\
'save the model to as the second argument. \n\nExample: python '\
'train_classifier.py ../data/DisasterResponse.db classifier.pkl')
if __name__ == '__main__':
main() | 0.432303 | 0.354852 |
import shutil
import subprocess
from collections import namedtuple
from subprocess import CompletedProcess
from textwrap import dedent
import pytest
from Pegasus.client._client import Client, PegasusClientError, Result, from_env
def test_PegasusClientError():
return_value = namedtuple("return_value", ["stdout", "stderr"])
rv = return_value("stdout", "stderr")
try:
raise PegasusClientError("pegasus command failed", rv)
except PegasusClientError as e:
assert e.output == "stdout\nstderr"
assert e.result == rv
def test_from_env(mocker):
mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version")
try:
from_env()
shutil.which.assert_called_once_with("pegasus-version")
except ValueError as e:
pytest.fail("should not have thrown {}".format(e))
def test_from_env_no_pegasus_home(monkeypatch):
monkeypatch.setenv("PATH", "/tmp")
with pytest.raises(ValueError) as e:
from_env()
assert "PEGASUS_HOME not found" in str(e)
@pytest.fixture(scope="function")
def mock_subprocess(mocker):
cp = CompletedProcess(None, returncode=0, stdout=b" ", stderr=b" ")
mocker.patch("subprocess.run", return_value=cp)
@pytest.fixture(scope="function")
def client():
return Client("/path")
class TestClient:
def test_plan(self, mock_subprocess, client):
client.plan(
"dax.yml",
conf="pegasus.conf",
sites=["site1", "site2"],
output_sites=["local", "other_site"],
staging_sites={"es1": "ss1", "es2": "ss2"},
input_dirs=["/input_dir1", "/input_dir2"],
output_dir="/output_dir",
dir="/dir",
relative_dir="/relative_dir",
cleanup="leaf",
verbose=3,
force=True,
submit=True,
env=123,
)
subprocess.run.assert_called_once_with(
[
"/path/bin/pegasus-plan",
"-Denv=123",
"--conf",
"pegasus.conf",
"--sites",
"site1,site2",
"--output-sites",
"local,other_site",
"--staging-site",
"es1=ss1,es2=ss2",
"--input-dir",
"/input_dir1,/input_dir2",
"--output-dir",
"/output_dir",
"--dir",
"/dir",
"--relative-dir",
"/relative_dir",
"--cleanup",
"leaf",
"-vvv",
"--force",
"--submit",
"--dax",
"dax.yml",
],
stderr=-1,
stdout=-1,
)
def test_plan_invalid_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", sites="local")
assert "invalid sites: local" in str(e)
def test_plan_invalid_staging_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", staging_sites="condorpool=origin")
assert "invalid staging_sites: condorpool=origin" in str(e)
def test_plan_invalid_output_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", output_sites="site1,site2")
assert "invalid output_sites: site1,site2" in str(e)
def test_plan_invalid_input_dirs(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", input_dirs="/input_dir")
assert "invalid input_dirs: /input_dir" in str(e)
def test_run(self, mock_subprocess, client):
client.run("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-run", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_status(self, mock_subprocess, client):
client.status("submit_dir", long=True, verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-status", "--long", "-vvv", "submit_dir"],
stderr=-1,
stdout=-1,
)
@pytest.mark.parametrize(
"pegasus_status_out, expected_wait_out",
[
(
dedent(
"""
(no matching jobs found in Condor Q)
UNRDY READY PRE IN_Q POST DONE FAIL %DONE STATE DAGNAME
0 0 0 0 0 8 0 100.0 Success *appends-0.dag
Summary: 1 DAG total (Success:1)
"""
).encode("utf8"),
"\r[\x1b[1;32m##################################################\x1b[0m] 100.0% ..Success (\x1b[1;32mCompleted: 8\x1b[0m, \x1b[1;33mQueued: 0\x1b[0m, \x1b[1;36mRunning: 0\x1b[0m, \x1b[1;31mFailed: 0\x1b[0m)\n",
),
(
dedent(
"""
STAT IN_STATE JOB
Run 01:10 appends-0 ( /nas/home/tanaka/workflows/test-workflow-1583372721 )
Summary: 1 Condor job total (R:1)
UNRDY READY PRE IN_Q POST DONE FAIL %DONE STATE DAGNAME
4 0 0 0 0 3 1 37.5 Failure *appends-0.dag
Summary: 1 DAG total (Failure:1)
"""
).encode("utf8"),
"\r[\x1b[1;32m###################\x1b[0m-------------------------------] 37.5% ..Failure (\x1b[1;32mCompleted: 3\x1b[0m, \x1b[1;33mQueued: 0\x1b[0m, \x1b[1;36mRunning: 0\x1b[0m, \x1b[1;31mFailed: 1\x1b[0m)\n",
),
],
)
def test_wait(self, mocker, capsys, client, pegasus_status_out, expected_wait_out):
mocker.patch(
"subprocess.run",
return_value=CompletedProcess(
None, returncode=0, stdout=pegasus_status_out, stderr=""
),
)
client.wait("submit_dir")
out, _ = capsys.readouterr()
assert out == expected_wait_out
def test_remove(self, mock_subprocess, client):
client.remove("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-remove", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_analyzer(self, mock_subprocess, client):
client.analyzer("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-analyzer", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_statistics(self, mock_subprocess, client):
client.statistics("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-statistics", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test__exec(self, mock_subprocess):
Client._exec("ls")
with pytest.raises(ValueError) as e:
Client._exec(None)
assert str(e.value) == "cmd is required"
def test__make_result(self):
with pytest.raises(ValueError) as e:
Client._make_result(None)
assert str(e.value) == "rv is required"
def test__get_submit_dir(self):
plan_output_with_direct_submit = dedent(
"""
2020.02.11 15:39:42.958 PST:
2020.02.11 15:39:42.963 PST: -----------------------------------------------------------------------
2020.02.11 15:39:42.969 PST: File for submitting this DAG to HTCondor : appends-0.dag.condor.sub
2020.02.11 15:39:42.974 PST: Log of DAGMan debugging messages : appends-0.dag.dagman.out
2020.02.11 15:39:42.979 PST: Log of HTCondor library output : appends-0.dag.lib.out
2020.02.11 15:39:42.984 PST: Log of HTCondor library error messages : appends-0.dag.lib.err
2020.02.11 15:39:42.990 PST: Log of the life of condor_dagman itself : appends-0.dag.dagman.log
2020.02.11 15:39:42.995 PST:
2020.02.11 15:39:43.000 PST: -no_submit given, not submitting DAG to HTCondor. You can do this with:
2020.02.11 15:39:43.010 PST: -----------------------------------------------------------------------
2020.02.11 15:39:43.820 PST: Your database is compatible with Pegasus version: 4.9.3
2020.02.11 15:39:43.912 PST: Submitting to condor appends-0.dag.condor.sub
2020.02.11 15:39:43.940 PST: Submitting job(s).
2020.02.11 15:39:43.945 PST: 1 job(s) submitted to cluster 1533083.
2020.02.11 15:39:43.950 PST:
2020.02.11 15:39:43.956 PST: Your workflow has been started and is running in the base directory:
2020.02.11 15:39:43.961 PST:
2020.02.11 15:39:43.966 PST: /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:43.971 PST:
2020.02.11 15:39:43.977 PST: *** To monitor the workflow you can run ***
2020.02.11 15:39:43.982 PST:
2020.02.11 15:39:43.987 PST: pegasus-status -l /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:43.992 PST:
2020.02.11 15:39:43.998 PST: *** To remove your workflow run ***
2020.02.11 15:39:44.003 PST:
2020.02.11 15:39:44.008 PST: pegasus-remove /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:44.013 PST:
2020.02.11 15:39:44.069 PST: Time taken to execute is 2.117 seconds
"""
)
assert (
Client._get_submit_dir(plan_output_with_direct_submit)
== "/local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()"
)
plan_output_without_direct_submit = dedent(
"""
2020.02.11 15:42:04.236 PST:
2020.02.11 15:42:04.242 PST: -----------------------------------------------------------------------
2020.02.11 15:42:04.247 PST: File for submitting this DAG to HTCondor : appends-0.dag.condor.sub
2020.02.11 15:42:04.252 PST: Log of DAGMan debugging messages : appends-0.dag.dagman.out
2020.02.11 15:42:04.258 PST: Log of HTCondor library output : appends-0.dag.lib.out
2020.02.11 15:42:04.263 PST: Log of HTCondor library error messages : appends-0.dag.lib.err
2020.02.11 15:42:04.268 PST: Log of the life of condor_dagman itself : appends-0.dag.dagman.log
2020.02.11 15:42:04.273 PST:
2020.02.11 15:42:04.279 PST: -no_submit given, not submitting DAG to HTCondor. You can do this with:
2020.02.11 15:42:04.289 PST: -----------------------------------------------------------------------
2020.02.11 15:42:05.120 PST: Your database is compatible with Pegasus version: 4.9.3
2020.02.11 15:42:05.126 PST:
I have concretized your abstract workflow. The workflow has been entered
into the workflow database with a state of "planned". The next step is
to start or execute your workflow. The invocation required is
pegasus-run /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
"""
)
assert (
Client._get_submit_dir(plan_output_without_direct_submit)
== "/local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()"
)
@pytest.fixture(scope="function")
def make_result():
def _make_result(cmd="command", exit_code=0, stdout=b"", stderr=b""):
r = Result(cmd, exit_code, stdout, stderr)
return r
return _make_result
def test_raise_exit_code(make_result):
r = make_result()
assert r.raise_exit_code() is None
with pytest.raises(ValueError) as e:
r = make_result(exit_code=1)
r.raise_exit_code()
assert e.value.args[1] == r
def test_empty(make_result):
r = make_result()
assert r.output == ""
assert r.stdout == ""
assert r.stderr == ""
assert r.json is None
assert r.yaml is None
assert r.yaml_all is None
def test_output(make_result):
r = make_result(stdout=b"test")
assert r.output == "test"
def test_output_fail(make_result):
r = make_result(stdout=None)
with pytest.raises(ValueError) as e:
r.stdout
assert str(e.value) == "stdout not captured"
def test_stdout(make_result):
r = make_result(stdout=b"test")
assert r.stdout == "test"
def test_stdout_fail(make_result):
r = make_result(stdout=None)
with pytest.raises(ValueError) as e:
r.stdout
assert str(e.value) == "stdout not captured"
def test_stderr(make_result):
r = make_result(stderr=b"test")
assert r.stderr == "test"
def test_stderr_fail(make_result):
r = make_result(stderr=None)
with pytest.raises(ValueError) as e:
r.stderr
assert str(e.value) == "stderr not captured"
def test_json(make_result):
r = make_result(stdout=b'{"a": 1}')
assert isinstance(r.json, dict)
assert r.json["a"] == 1
def test_yaml(make_result):
r = make_result(stdout=b"a: 1")
assert isinstance(r.yaml, dict)
assert r.yaml["a"] == 1
def test_yaml_all(make_result):
r = make_result(
stdout=b"""---
a: 1
---
b: 2
"""
)
d = [y for y in r.yaml_all]
assert isinstance(d, list)
assert len(d) == 2
assert d[0]["a"] == 1
assert d[1]["b"] == 2 | packages/pegasus-common/test/client/test_client.py | import shutil
import subprocess
from collections import namedtuple
from subprocess import CompletedProcess
from textwrap import dedent
import pytest
from Pegasus.client._client import Client, PegasusClientError, Result, from_env
def test_PegasusClientError():
return_value = namedtuple("return_value", ["stdout", "stderr"])
rv = return_value("stdout", "stderr")
try:
raise PegasusClientError("pegasus command failed", rv)
except PegasusClientError as e:
assert e.output == "stdout\nstderr"
assert e.result == rv
def test_from_env(mocker):
mocker.patch("shutil.which", return_value="/usr/bin/pegasus-version")
try:
from_env()
shutil.which.assert_called_once_with("pegasus-version")
except ValueError as e:
pytest.fail("should not have thrown {}".format(e))
def test_from_env_no_pegasus_home(monkeypatch):
monkeypatch.setenv("PATH", "/tmp")
with pytest.raises(ValueError) as e:
from_env()
assert "PEGASUS_HOME not found" in str(e)
@pytest.fixture(scope="function")
def mock_subprocess(mocker):
cp = CompletedProcess(None, returncode=0, stdout=b" ", stderr=b" ")
mocker.patch("subprocess.run", return_value=cp)
@pytest.fixture(scope="function")
def client():
return Client("/path")
class TestClient:
def test_plan(self, mock_subprocess, client):
client.plan(
"dax.yml",
conf="pegasus.conf",
sites=["site1", "site2"],
output_sites=["local", "other_site"],
staging_sites={"es1": "ss1", "es2": "ss2"},
input_dirs=["/input_dir1", "/input_dir2"],
output_dir="/output_dir",
dir="/dir",
relative_dir="/relative_dir",
cleanup="leaf",
verbose=3,
force=True,
submit=True,
env=123,
)
subprocess.run.assert_called_once_with(
[
"/path/bin/pegasus-plan",
"-Denv=123",
"--conf",
"pegasus.conf",
"--sites",
"site1,site2",
"--output-sites",
"local,other_site",
"--staging-site",
"es1=ss1,es2=ss2",
"--input-dir",
"/input_dir1,/input_dir2",
"--output-dir",
"/output_dir",
"--dir",
"/dir",
"--relative-dir",
"/relative_dir",
"--cleanup",
"leaf",
"-vvv",
"--force",
"--submit",
"--dax",
"dax.yml",
],
stderr=-1,
stdout=-1,
)
def test_plan_invalid_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", sites="local")
assert "invalid sites: local" in str(e)
def test_plan_invalid_staging_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", staging_sites="condorpool=origin")
assert "invalid staging_sites: condorpool=origin" in str(e)
def test_plan_invalid_output_sites(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", output_sites="site1,site2")
assert "invalid output_sites: site1,site2" in str(e)
def test_plan_invalid_input_dirs(self, client):
with pytest.raises(TypeError) as e:
client.plan("wf.yml", input_dirs="/input_dir")
assert "invalid input_dirs: /input_dir" in str(e)
def test_run(self, mock_subprocess, client):
client.run("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-run", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_status(self, mock_subprocess, client):
client.status("submit_dir", long=True, verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-status", "--long", "-vvv", "submit_dir"],
stderr=-1,
stdout=-1,
)
@pytest.mark.parametrize(
"pegasus_status_out, expected_wait_out",
[
(
dedent(
"""
(no matching jobs found in Condor Q)
UNRDY READY PRE IN_Q POST DONE FAIL %DONE STATE DAGNAME
0 0 0 0 0 8 0 100.0 Success *appends-0.dag
Summary: 1 DAG total (Success:1)
"""
).encode("utf8"),
"\r[\x1b[1;32m##################################################\x1b[0m] 100.0% ..Success (\x1b[1;32mCompleted: 8\x1b[0m, \x1b[1;33mQueued: 0\x1b[0m, \x1b[1;36mRunning: 0\x1b[0m, \x1b[1;31mFailed: 0\x1b[0m)\n",
),
(
dedent(
"""
STAT IN_STATE JOB
Run 01:10 appends-0 ( /nas/home/tanaka/workflows/test-workflow-1583372721 )
Summary: 1 Condor job total (R:1)
UNRDY READY PRE IN_Q POST DONE FAIL %DONE STATE DAGNAME
4 0 0 0 0 3 1 37.5 Failure *appends-0.dag
Summary: 1 DAG total (Failure:1)
"""
).encode("utf8"),
"\r[\x1b[1;32m###################\x1b[0m-------------------------------] 37.5% ..Failure (\x1b[1;32mCompleted: 3\x1b[0m, \x1b[1;33mQueued: 0\x1b[0m, \x1b[1;36mRunning: 0\x1b[0m, \x1b[1;31mFailed: 1\x1b[0m)\n",
),
],
)
def test_wait(self, mocker, capsys, client, pegasus_status_out, expected_wait_out):
mocker.patch(
"subprocess.run",
return_value=CompletedProcess(
None, returncode=0, stdout=pegasus_status_out, stderr=""
),
)
client.wait("submit_dir")
out, _ = capsys.readouterr()
assert out == expected_wait_out
def test_remove(self, mock_subprocess, client):
client.remove("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-remove", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_analyzer(self, mock_subprocess, client):
client.analyzer("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-analyzer", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test_statistics(self, mock_subprocess, client):
client.statistics("submit_dir", verbose=3)
subprocess.run.assert_called_once_with(
["/path/bin/pegasus-statistics", "-vvv", "submit_dir"], stderr=-1, stdout=-1
)
def test__exec(self, mock_subprocess):
Client._exec("ls")
with pytest.raises(ValueError) as e:
Client._exec(None)
assert str(e.value) == "cmd is required"
def test__make_result(self):
with pytest.raises(ValueError) as e:
Client._make_result(None)
assert str(e.value) == "rv is required"
def test__get_submit_dir(self):
plan_output_with_direct_submit = dedent(
"""
2020.02.11 15:39:42.958 PST:
2020.02.11 15:39:42.963 PST: -----------------------------------------------------------------------
2020.02.11 15:39:42.969 PST: File for submitting this DAG to HTCondor : appends-0.dag.condor.sub
2020.02.11 15:39:42.974 PST: Log of DAGMan debugging messages : appends-0.dag.dagman.out
2020.02.11 15:39:42.979 PST: Log of HTCondor library output : appends-0.dag.lib.out
2020.02.11 15:39:42.984 PST: Log of HTCondor library error messages : appends-0.dag.lib.err
2020.02.11 15:39:42.990 PST: Log of the life of condor_dagman itself : appends-0.dag.dagman.log
2020.02.11 15:39:42.995 PST:
2020.02.11 15:39:43.000 PST: -no_submit given, not submitting DAG to HTCondor. You can do this with:
2020.02.11 15:39:43.010 PST: -----------------------------------------------------------------------
2020.02.11 15:39:43.820 PST: Your database is compatible with Pegasus version: 4.9.3
2020.02.11 15:39:43.912 PST: Submitting to condor appends-0.dag.condor.sub
2020.02.11 15:39:43.940 PST: Submitting job(s).
2020.02.11 15:39:43.945 PST: 1 job(s) submitted to cluster 1533083.
2020.02.11 15:39:43.950 PST:
2020.02.11 15:39:43.956 PST: Your workflow has been started and is running in the base directory:
2020.02.11 15:39:43.961 PST:
2020.02.11 15:39:43.966 PST: /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:43.971 PST:
2020.02.11 15:39:43.977 PST: *** To monitor the workflow you can run ***
2020.02.11 15:39:43.982 PST:
2020.02.11 15:39:43.987 PST: pegasus-status -l /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:43.992 PST:
2020.02.11 15:39:43.998 PST: *** To remove your workflow run ***
2020.02.11 15:39:44.003 PST:
2020.02.11 15:39:44.008 PST: pegasus-remove /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
2020.02.11 15:39:44.013 PST:
2020.02.11 15:39:44.069 PST: Time taken to execute is 2.117 seconds
"""
)
assert (
Client._get_submit_dir(plan_output_with_direct_submit)
== "/local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()"
)
plan_output_without_direct_submit = dedent(
"""
2020.02.11 15:42:04.236 PST:
2020.02.11 15:42:04.242 PST: -----------------------------------------------------------------------
2020.02.11 15:42:04.247 PST: File for submitting this DAG to HTCondor : appends-0.dag.condor.sub
2020.02.11 15:42:04.252 PST: Log of DAGMan debugging messages : appends-0.dag.dagman.out
2020.02.11 15:42:04.258 PST: Log of HTCondor library output : appends-0.dag.lib.out
2020.02.11 15:42:04.263 PST: Log of HTCondor library error messages : appends-0.dag.lib.err
2020.02.11 15:42:04.268 PST: Log of the life of condor_dagman itself : appends-0.dag.dagman.log
2020.02.11 15:42:04.273 PST:
2020.02.11 15:42:04.279 PST: -no_submit given, not submitting DAG to HTCondor. You can do this with:
2020.02.11 15:42:04.289 PST: -----------------------------------------------------------------------
2020.02.11 15:42:05.120 PST: Your database is compatible with Pegasus version: 4.9.3
2020.02.11 15:42:05.126 PST:
I have concretized your abstract workflow. The workflow has been entered
into the workflow database with a state of "planned". The next step is
to start or execute your workflow. The invocation required is
pegasus-run /local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()
"""
)
assert (
Client._get_submit_dir(plan_output_without_direct_submit)
== "/local-scratch/tanaka/workflows/test-workflow-THIS-SHOULD-BE-FOUND-BY-Client._get_submit_dir()"
)
@pytest.fixture(scope="function")
def make_result():
def _make_result(cmd="command", exit_code=0, stdout=b"", stderr=b""):
r = Result(cmd, exit_code, stdout, stderr)
return r
return _make_result
def test_raise_exit_code(make_result):
r = make_result()
assert r.raise_exit_code() is None
with pytest.raises(ValueError) as e:
r = make_result(exit_code=1)
r.raise_exit_code()
assert e.value.args[1] == r
def test_empty(make_result):
r = make_result()
assert r.output == ""
assert r.stdout == ""
assert r.stderr == ""
assert r.json is None
assert r.yaml is None
assert r.yaml_all is None
def test_output(make_result):
r = make_result(stdout=b"test")
assert r.output == "test"
def test_output_fail(make_result):
r = make_result(stdout=None)
with pytest.raises(ValueError) as e:
r.stdout
assert str(e.value) == "stdout not captured"
def test_stdout(make_result):
r = make_result(stdout=b"test")
assert r.stdout == "test"
def test_stdout_fail(make_result):
r = make_result(stdout=None)
with pytest.raises(ValueError) as e:
r.stdout
assert str(e.value) == "stdout not captured"
def test_stderr(make_result):
r = make_result(stderr=b"test")
assert r.stderr == "test"
def test_stderr_fail(make_result):
r = make_result(stderr=None)
with pytest.raises(ValueError) as e:
r.stderr
assert str(e.value) == "stderr not captured"
def test_json(make_result):
r = make_result(stdout=b'{"a": 1}')
assert isinstance(r.json, dict)
assert r.json["a"] == 1
def test_yaml(make_result):
r = make_result(stdout=b"a: 1")
assert isinstance(r.yaml, dict)
assert r.yaml["a"] == 1
def test_yaml_all(make_result):
r = make_result(
stdout=b"""---
a: 1
---
b: 2
"""
)
d = [y for y in r.yaml_all]
assert isinstance(d, list)
assert len(d) == 2
assert d[0]["a"] == 1
assert d[1]["b"] == 2 | 0.42179 | 0.272965 |
import itertools
import numpy as np
import pandas as pd
import urllib.request
import matplotlib.pyplot as plt
def download_data(url, filename):
"""
Download the dataset from the url
:param url: url of file to be downloaded
:param filename: filname to be saved
:return:
"""
urllib.request.urlretrieve(url, filename)
def load_data(filename, class_col='class', rm_nan_by_axis=0):
"""
Load the dataset from file and return X, y
:param filename: name of xls file
:param class_col: column name of class
:param rm_nan_by_axis: remove empty values by axis row=0, column=1
:return: X: features y:labels
"""
xls_file = pd.read_excel(filename, index_col=0)
# remove missing values by row: axis=0, column: axis=1
xls_file = xls_file.dropna(axis=rm_nan_by_axis)
X = xls_file[xls_file.columns[0:-4]].values
y = xls_file[class_col].astype('category').cat.codes.values
return X, y
def write_metadata(filename,indices, labels):
"""
Create a metadata file consisting of sample indices and labels
:param filename: name of the file to save on disk
:param shape: tensor of labels
"""
with open(filename, 'w') as f:
f.write("Index\tLabel\n")
for index, label in zip(indices, labels):
f.write("{}\t{}\n".format(index, label))
def plot_confusion_matrix(cm, classes=None,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
:param cm: confusion matrix
:param classes: list of class names
:param normalize: normalize to 0-1
:param title: plot title
:param cmap: colormap
:return:
"""
if classes is None:
classes = ['class_{}'.format(i) for i in range(1, len(cm) + 1)]
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
plt.show() | 2_Neural_Network/2_AutoEncoder/utils.py | import itertools
import numpy as np
import pandas as pd
import urllib.request
import matplotlib.pyplot as plt
def download_data(url, filename):
"""
Download the dataset from the url
:param url: url of file to be downloaded
:param filename: filname to be saved
:return:
"""
urllib.request.urlretrieve(url, filename)
def load_data(filename, class_col='class', rm_nan_by_axis=0):
"""
Load the dataset from file and return X, y
:param filename: name of xls file
:param class_col: column name of class
:param rm_nan_by_axis: remove empty values by axis row=0, column=1
:return: X: features y:labels
"""
xls_file = pd.read_excel(filename, index_col=0)
# remove missing values by row: axis=0, column: axis=1
xls_file = xls_file.dropna(axis=rm_nan_by_axis)
X = xls_file[xls_file.columns[0:-4]].values
y = xls_file[class_col].astype('category').cat.codes.values
return X, y
def write_metadata(filename,indices, labels):
"""
Create a metadata file consisting of sample indices and labels
:param filename: name of the file to save on disk
:param shape: tensor of labels
"""
with open(filename, 'w') as f:
f.write("Index\tLabel\n")
for index, label in zip(indices, labels):
f.write("{}\t{}\n".format(index, label))
def plot_confusion_matrix(cm, classes=None,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
:param cm: confusion matrix
:param classes: list of class names
:param normalize: normalize to 0-1
:param title: plot title
:param cmap: colormap
:return:
"""
if classes is None:
classes = ['class_{}'.format(i) for i in range(1, len(cm) + 1)]
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.tight_layout()
plt.show() | 0.719679 | 0.574693 |
from __future__ import print_function, division
import argparse
import torch
import torch.nn as nn
import torch.optim as optim
import math
import pickle
import matplotlib.pyplot as plt
from teacher_student import *
from teacher_dataset import *
def get_Q(path_to_mask_list, path_to_teacher, input_dim):
unpruned_MLP, mask_list = pickle.load(open(path_to_mask_list, 'rb'))
print('student w2:', unpruned_MLP.w2.weight.data)
mask_num = len(mask_list)
w1 = unpruned_MLP.w1.weight.data.cpu().numpy() # hid_dim * inp_dim
hid_dim, inp_dim = w1.shape[0], w1.shape[1]
print('student w1 size:', w1.shape, 'mask size:', mask_list[0].T.shape)
# get the expected Q
expected_Q = np.zeros((hid_dim, hid_dim))
for mask in mask_list:
purned_w = w1 * mask.T
expected_Q += np.dot(purned_w, purned_w.T)
expected_Q = expected_Q / mask_num
expected_Q = expected_Q / input_dim
# get the unpruned Q
unpruned_Q = np.dot(w1, w1.T) / input_dim
# get the teacher net
teacher = pickle.load(open(path_to_teacher, 'rb'))
teahcer_w1 = teacher.w1.data.cpu().numpy() # teacher_hid_dim * input_dim
print('teacher w1 size:', teahcer_w1.shape)
teacher_Q = np.dot(teahcer_w1, teahcer_w1.T) / input_dim
return expected_Q, unpruned_Q, teacher_Q
def plot_Q(expected_Q, unpruned_Q, teacher_Q):
plt.figure(1)
fig, ax = plt.subplots()
expected_Q = abs(expected_Q)
im = ax.imshow(expected_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(expected_Q)):
for j in range(len(expected_Q)):
text = ax.text(j, i, '%.3f'%expected_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("expected_Q")
fig.tight_layout()
plt.savefig('expected_Q.png')
plt.figure(2)
fig, ax = plt.subplots()
unpruned_Q = abs(unpruned_Q)
im = ax.imshow(unpruned_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(unpruned_Q)):
for j in range(len(unpruned_Q)):
text = ax.text(j, i, '%.3f'%unpruned_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("unpruned_Q")
fig.tight_layout()
plt.savefig('unpruned_Q.png')
plt.figure(3)
fig, ax = plt.subplots()
teacher_Q = abs(teacher_Q)
im = ax.imshow(teacher_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(teacher_Q)):
for j in range(len(teacher_Q)):
text = ax.text(j, i, '%.3f'%teacher_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("teacher_Q")
fig.tight_layout()
plt.savefig('teacher_Q.png')
#
def get_R(path_to_student_mask, path_to_teacher, input_dim):
# get the student net
unpruned_MLP, mask_list = pickle.load(open(path_to_student_mask, 'rb'))
mask_num = len(mask_list)
student_w1 = unpruned_MLP.w1.weight.data.cpu().numpy() # student_hid_dim * inp_dim
student_hid_dim, inp_dim = student_w1.shape[0], student_w1.shape[1]
print('student w1 size:', student_w1.shape, 'mask size:', mask_list[0].T.shape)
# get the teacher net
teacher = pickle.load(open(path_to_teacher, 'rb'))
teahcer_w1 = teacher.w1.data.cpu().numpy().T # input_dim * teacher_hid_dim
teacher_hid_dim = teahcer_w1.shape[1]
print('teacher w1 size:', teahcer_w1.shape)
# get the expected R on purned student_w1
# student_hid_dim * teacher_hid_dim
expected_R = np.zeros((student_hid_dim, teacher_hid_dim))
for mask in mask_list:
expected_R += np.dot(student_w1 * mask.T, teahcer_w1)
expected_R = expected_R / mask_num
# get the expected R on unpruned student_w1
unpruned_R = np.dot(student_w1, teahcer_w1)
# pickle.dump((expected_R, unpruned_R), open('expected_R', "wb"))
return expected_R / input_dim, unpruned_R / input_dim
def plot_R(expected_R, unpruned_R):
plt.figure(1)
fig, ax = plt.subplots()
expected_R = abs(expected_R)
im = ax.imshow(expected_R)
# Loop over data dimensions and create text annotations.
for i in range(len(expected_R)):
for j in range(len(expected_R[1])):
text = ax.text(j, i, '%.3f'%expected_R[i, j],
ha="center", va="center", color="w")
ax.set_title("expected_R")
fig.tight_layout()
plt.savefig('expected_R.png')
plt.figure(2)
fig, ax = plt.subplots()
unpruned_R = abs(unpruned_R)
im = ax.imshow(unpruned_R)
# Loop over data dimensions and create text annotations.
for i in range(len(unpruned_R)):
for j in range(len(unpruned_R[1])):
text = ax.text(j, i, '%.3f'%unpruned_R[i, j],
ha="center", va="center", color="w")
ax.set_title("unpruned_R")
fig.tight_layout()
plt.savefig('unpruned_R.png')
def main():
parser = argparse.ArgumentParser(description='Order Parameter')
parser.add_argument('--path_to_student_mask', type = str)
parser.add_argument('--path_to_teacher', type = str, default = 'place_holder')
parser.add_argument('--input_dim', type = int, help='The input dimension for each data point.')
args = parser.parse_args()
expected_Q, unpruned_Q, teacher_Q = get_Q(args.path_to_student_mask, args.path_to_teacher, args.input_dim)
expected_R, unpruned_R = get_R(args.path_to_student_mask, args.path_to_teacher, args.input_dim)
# Permute the matrix to make it block diagonal
student_hid_dim, teacher_hid_dim = unpruned_R.shape
z = int(student_hid_dim/teacher_hid_dim)
unpruned_R_dash, unpruned_Q_dash, expected_R_dash ,expected_Q_dash = np.zeros((student_hid_dim,teacher_hid_dim)), np.zeros((student_hid_dim,student_hid_dim)), np.zeros((student_hid_dim,teacher_hid_dim)), np.zeros((student_hid_dim,student_hid_dim))
dic = [[] for x in range(teacher_hid_dim)]
for i in range(teacher_hid_dim):
for j in range(student_hid_dim):
if abs(unpruned_R[j][i])>=0.7:
dic[i].append(j)
print(dic,"hello")
for x in range(teacher_hid_dim):
for y in range(len(dic[x])):
new_row = x*z+y
cur = dic[x][y]
print(new_row,cur)
unpruned_R_dash[new_row,:] = unpruned_R[cur,:]
expected_R_dash[new_row,:] = expected_R[cur,:]
for x in range(student_hid_dim):
for y in range(x+1):
i = dic[int(x/z)][x%z]
j = dic[int(y/z)][y%z]
if x==y:
unpruned_Q_dash[x][x] = unpruned_Q[i][i]
expected_Q_dash[x][x] = expected_Q[i][i]
else:
unpruned_Q_dash[x][y] = unpruned_Q[i][j]
unpruned_Q_dash[y][x] = unpruned_Q[i][j]
expected_Q_dash[x][y] = expected_Q[i][j]
expected_Q_dash[y][x] = expected_Q[i][j]
# unpruned_Q[[new_row,cur],:] = unpruned_Q[[cur,new_row],:]
# expected_Q[[new_row,cur],:] = expected_Q[[cur,new_row],:]
# unpruned_Q[:,[new_row,cur]] = unpruned_Q[:,[cur,new_row]]
# expected_Q[:,[new_row,cur]] = expected_Q[:,[cur,new_row]]
plot_Q(expected_Q_dash, unpruned_Q_dash, teacher_Q)
plot_R(expected_R_dash, unpruned_R_dash)
if __name__ == '__main__':
main() | evaluate.py | from __future__ import print_function, division
import argparse
import torch
import torch.nn as nn
import torch.optim as optim
import math
import pickle
import matplotlib.pyplot as plt
from teacher_student import *
from teacher_dataset import *
def get_Q(path_to_mask_list, path_to_teacher, input_dim):
unpruned_MLP, mask_list = pickle.load(open(path_to_mask_list, 'rb'))
print('student w2:', unpruned_MLP.w2.weight.data)
mask_num = len(mask_list)
w1 = unpruned_MLP.w1.weight.data.cpu().numpy() # hid_dim * inp_dim
hid_dim, inp_dim = w1.shape[0], w1.shape[1]
print('student w1 size:', w1.shape, 'mask size:', mask_list[0].T.shape)
# get the expected Q
expected_Q = np.zeros((hid_dim, hid_dim))
for mask in mask_list:
purned_w = w1 * mask.T
expected_Q += np.dot(purned_w, purned_w.T)
expected_Q = expected_Q / mask_num
expected_Q = expected_Q / input_dim
# get the unpruned Q
unpruned_Q = np.dot(w1, w1.T) / input_dim
# get the teacher net
teacher = pickle.load(open(path_to_teacher, 'rb'))
teahcer_w1 = teacher.w1.data.cpu().numpy() # teacher_hid_dim * input_dim
print('teacher w1 size:', teahcer_w1.shape)
teacher_Q = np.dot(teahcer_w1, teahcer_w1.T) / input_dim
return expected_Q, unpruned_Q, teacher_Q
def plot_Q(expected_Q, unpruned_Q, teacher_Q):
plt.figure(1)
fig, ax = plt.subplots()
expected_Q = abs(expected_Q)
im = ax.imshow(expected_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(expected_Q)):
for j in range(len(expected_Q)):
text = ax.text(j, i, '%.3f'%expected_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("expected_Q")
fig.tight_layout()
plt.savefig('expected_Q.png')
plt.figure(2)
fig, ax = plt.subplots()
unpruned_Q = abs(unpruned_Q)
im = ax.imshow(unpruned_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(unpruned_Q)):
for j in range(len(unpruned_Q)):
text = ax.text(j, i, '%.3f'%unpruned_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("unpruned_Q")
fig.tight_layout()
plt.savefig('unpruned_Q.png')
plt.figure(3)
fig, ax = plt.subplots()
teacher_Q = abs(teacher_Q)
im = ax.imshow(teacher_Q)
# Loop over data dimensions and create text annotations.
for i in range(len(teacher_Q)):
for j in range(len(teacher_Q)):
text = ax.text(j, i, '%.3f'%teacher_Q[i, j],
ha="center", va="center", color="w")
ax.set_title("teacher_Q")
fig.tight_layout()
plt.savefig('teacher_Q.png')
#
def get_R(path_to_student_mask, path_to_teacher, input_dim):
# get the student net
unpruned_MLP, mask_list = pickle.load(open(path_to_student_mask, 'rb'))
mask_num = len(mask_list)
student_w1 = unpruned_MLP.w1.weight.data.cpu().numpy() # student_hid_dim * inp_dim
student_hid_dim, inp_dim = student_w1.shape[0], student_w1.shape[1]
print('student w1 size:', student_w1.shape, 'mask size:', mask_list[0].T.shape)
# get the teacher net
teacher = pickle.load(open(path_to_teacher, 'rb'))
teahcer_w1 = teacher.w1.data.cpu().numpy().T # input_dim * teacher_hid_dim
teacher_hid_dim = teahcer_w1.shape[1]
print('teacher w1 size:', teahcer_w1.shape)
# get the expected R on purned student_w1
# student_hid_dim * teacher_hid_dim
expected_R = np.zeros((student_hid_dim, teacher_hid_dim))
for mask in mask_list:
expected_R += np.dot(student_w1 * mask.T, teahcer_w1)
expected_R = expected_R / mask_num
# get the expected R on unpruned student_w1
unpruned_R = np.dot(student_w1, teahcer_w1)
# pickle.dump((expected_R, unpruned_R), open('expected_R', "wb"))
return expected_R / input_dim, unpruned_R / input_dim
def plot_R(expected_R, unpruned_R):
plt.figure(1)
fig, ax = plt.subplots()
expected_R = abs(expected_R)
im = ax.imshow(expected_R)
# Loop over data dimensions and create text annotations.
for i in range(len(expected_R)):
for j in range(len(expected_R[1])):
text = ax.text(j, i, '%.3f'%expected_R[i, j],
ha="center", va="center", color="w")
ax.set_title("expected_R")
fig.tight_layout()
plt.savefig('expected_R.png')
plt.figure(2)
fig, ax = plt.subplots()
unpruned_R = abs(unpruned_R)
im = ax.imshow(unpruned_R)
# Loop over data dimensions and create text annotations.
for i in range(len(unpruned_R)):
for j in range(len(unpruned_R[1])):
text = ax.text(j, i, '%.3f'%unpruned_R[i, j],
ha="center", va="center", color="w")
ax.set_title("unpruned_R")
fig.tight_layout()
plt.savefig('unpruned_R.png')
def main():
parser = argparse.ArgumentParser(description='Order Parameter')
parser.add_argument('--path_to_student_mask', type = str)
parser.add_argument('--path_to_teacher', type = str, default = 'place_holder')
parser.add_argument('--input_dim', type = int, help='The input dimension for each data point.')
args = parser.parse_args()
expected_Q, unpruned_Q, teacher_Q = get_Q(args.path_to_student_mask, args.path_to_teacher, args.input_dim)
expected_R, unpruned_R = get_R(args.path_to_student_mask, args.path_to_teacher, args.input_dim)
# Permute the matrix to make it block diagonal
student_hid_dim, teacher_hid_dim = unpruned_R.shape
z = int(student_hid_dim/teacher_hid_dim)
unpruned_R_dash, unpruned_Q_dash, expected_R_dash ,expected_Q_dash = np.zeros((student_hid_dim,teacher_hid_dim)), np.zeros((student_hid_dim,student_hid_dim)), np.zeros((student_hid_dim,teacher_hid_dim)), np.zeros((student_hid_dim,student_hid_dim))
dic = [[] for x in range(teacher_hid_dim)]
for i in range(teacher_hid_dim):
for j in range(student_hid_dim):
if abs(unpruned_R[j][i])>=0.7:
dic[i].append(j)
print(dic,"hello")
for x in range(teacher_hid_dim):
for y in range(len(dic[x])):
new_row = x*z+y
cur = dic[x][y]
print(new_row,cur)
unpruned_R_dash[new_row,:] = unpruned_R[cur,:]
expected_R_dash[new_row,:] = expected_R[cur,:]
for x in range(student_hid_dim):
for y in range(x+1):
i = dic[int(x/z)][x%z]
j = dic[int(y/z)][y%z]
if x==y:
unpruned_Q_dash[x][x] = unpruned_Q[i][i]
expected_Q_dash[x][x] = expected_Q[i][i]
else:
unpruned_Q_dash[x][y] = unpruned_Q[i][j]
unpruned_Q_dash[y][x] = unpruned_Q[i][j]
expected_Q_dash[x][y] = expected_Q[i][j]
expected_Q_dash[y][x] = expected_Q[i][j]
# unpruned_Q[[new_row,cur],:] = unpruned_Q[[cur,new_row],:]
# expected_Q[[new_row,cur],:] = expected_Q[[cur,new_row],:]
# unpruned_Q[:,[new_row,cur]] = unpruned_Q[:,[cur,new_row]]
# expected_Q[:,[new_row,cur]] = expected_Q[:,[cur,new_row]]
plot_Q(expected_Q_dash, unpruned_Q_dash, teacher_Q)
plot_R(expected_R_dash, unpruned_R_dash)
if __name__ == '__main__':
main() | 0.459561 | 0.45175 |
import json
import os
import re
import time
from aqt import mw
from aqt.utils import showInfo, chooseList
from aqt.qt import *
from anki.storage import Collection
def add_pitch_dialog():
# environment
collection_path = mw.col.path
plugin_dir_name = __name__
user_dir_path = os.path.split(collection_path)[0]
anki_dir_path = os.path.split(user_dir_path)[0]
plugin_dir_path = os.path.join(anki_dir_path, 'addons21', plugin_dir_name)
# plugin utils import
pa_util = __import__('{}.util'.format(plugin_dir_name), fromlist=('foo'))
# load pitch dict
pitch_csv_path = os.path.join(plugin_dir_path, 'wadoku_pitchdb.csv')
acc_dict = pa_util.get_accent_dict(pitch_csv_path)
# figure out collection structure
deck_id = pa_util.select_deck_id('Which deck would you like to extend?')
note_type_ids = pa_util.get_note_type_ids(deck_id)
if len(note_type_ids) > 1:
note_type_id = pa_util.select_note_type(note_type_ids)
elif len(note_type_ids) < 1:
showInfo('No cards found.')
return
else:
note_type_id = note_type_ids[0]
note_ids = pa_util.get_note_ids(deck_id, note_type_id)
expr_idx, rdng_idx, out_idx = pa_util.select_note_fields_all(note_ids[0])
# extend notes
nf_lst, n_updt, n_adone, n_sfail = pa_util.add_pitch(
acc_dict, plugin_dir_name, note_ids, expr_idx, rdng_idx, out_idx
)
showInfo(('done :)\n'
'skipped {} already annotated notes\n'
'updated {} notes\n'
'failed to generate {} annotations\n'
'could not find {} expressions').format(
n_adone, n_updt, n_sfail, len(nf_lst)
)
)
def remove_pitch_dialog():
# environment
collection_path = mw.col.path
plugin_dir_name = __name__
user_dir_path = os.path.split(collection_path)[0]
anki_dir_path = os.path.split(user_dir_path)[0]
plugin_dir_path = os.path.join(anki_dir_path, 'addons21', plugin_dir_name)
# plugin utils import
pa_util = __import__('{}.util'.format(plugin_dir_name), fromlist=('foo'))
# figure out collection structure
deck_id = pa_util.select_deck_id(
'From which deck would you like to remove?'
)
note_type_ids = pa_util.get_note_type_ids(deck_id)
if len(note_type_ids) > 1:
note_type_id = pa_util.select_note_type(note_type_ids)
elif len(note_type_ids) < 1:
showInfo('No cards found.')
return
else:
note_type_id = note_type_ids[0]
note_ids = pa_util.get_note_ids(deck_id, note_type_id)
del_idx = pa_util.select_note_fields_del(note_ids[0])
# remove from notes
n_adone, n_updt = pa_util.remove_pitch(note_ids, del_idx)
showInfo(('done :)\n'
'skipped {} notes w/o accent annotation\n'
'updated {} notes').format(
n_adone, n_updt
)
)
# add menu items
pa_menu = QMenu('Pitch Accent', mw)
pa_menu_add = pa_menu.addAction('add')
pa_menu_remove = pa_menu.addAction('remove')
# add triggers
pa_menu_add.triggered.connect(add_pitch_dialog)
pa_menu_remove.triggered.connect(remove_pitch_dialog)
# and add it to the tools menu
mw.form.menuTools.addMenu(pa_menu)
# mw.col.db.execute("update cards set ivl = ? where id = ?", newIvl, cardId) | __init__.py | import json
import os
import re
import time
from aqt import mw
from aqt.utils import showInfo, chooseList
from aqt.qt import *
from anki.storage import Collection
def add_pitch_dialog():
# environment
collection_path = mw.col.path
plugin_dir_name = __name__
user_dir_path = os.path.split(collection_path)[0]
anki_dir_path = os.path.split(user_dir_path)[0]
plugin_dir_path = os.path.join(anki_dir_path, 'addons21', plugin_dir_name)
# plugin utils import
pa_util = __import__('{}.util'.format(plugin_dir_name), fromlist=('foo'))
# load pitch dict
pitch_csv_path = os.path.join(plugin_dir_path, 'wadoku_pitchdb.csv')
acc_dict = pa_util.get_accent_dict(pitch_csv_path)
# figure out collection structure
deck_id = pa_util.select_deck_id('Which deck would you like to extend?')
note_type_ids = pa_util.get_note_type_ids(deck_id)
if len(note_type_ids) > 1:
note_type_id = pa_util.select_note_type(note_type_ids)
elif len(note_type_ids) < 1:
showInfo('No cards found.')
return
else:
note_type_id = note_type_ids[0]
note_ids = pa_util.get_note_ids(deck_id, note_type_id)
expr_idx, rdng_idx, out_idx = pa_util.select_note_fields_all(note_ids[0])
# extend notes
nf_lst, n_updt, n_adone, n_sfail = pa_util.add_pitch(
acc_dict, plugin_dir_name, note_ids, expr_idx, rdng_idx, out_idx
)
showInfo(('done :)\n'
'skipped {} already annotated notes\n'
'updated {} notes\n'
'failed to generate {} annotations\n'
'could not find {} expressions').format(
n_adone, n_updt, n_sfail, len(nf_lst)
)
)
def remove_pitch_dialog():
# environment
collection_path = mw.col.path
plugin_dir_name = __name__
user_dir_path = os.path.split(collection_path)[0]
anki_dir_path = os.path.split(user_dir_path)[0]
plugin_dir_path = os.path.join(anki_dir_path, 'addons21', plugin_dir_name)
# plugin utils import
pa_util = __import__('{}.util'.format(plugin_dir_name), fromlist=('foo'))
# figure out collection structure
deck_id = pa_util.select_deck_id(
'From which deck would you like to remove?'
)
note_type_ids = pa_util.get_note_type_ids(deck_id)
if len(note_type_ids) > 1:
note_type_id = pa_util.select_note_type(note_type_ids)
elif len(note_type_ids) < 1:
showInfo('No cards found.')
return
else:
note_type_id = note_type_ids[0]
note_ids = pa_util.get_note_ids(deck_id, note_type_id)
del_idx = pa_util.select_note_fields_del(note_ids[0])
# remove from notes
n_adone, n_updt = pa_util.remove_pitch(note_ids, del_idx)
showInfo(('done :)\n'
'skipped {} notes w/o accent annotation\n'
'updated {} notes').format(
n_adone, n_updt
)
)
# add menu items
pa_menu = QMenu('Pitch Accent', mw)
pa_menu_add = pa_menu.addAction('add')
pa_menu_remove = pa_menu.addAction('remove')
# add triggers
pa_menu_add.triggered.connect(add_pitch_dialog)
pa_menu_remove.triggered.connect(remove_pitch_dialog)
# and add it to the tools menu
mw.form.menuTools.addMenu(pa_menu)
# mw.col.db.execute("update cards set ivl = ? where id = ?", newIvl, cardId) | 0.343342 | 0.093512 |
import logging
import math
import os
import json
from urllib.parse import urlparse
from flask import g as flask_g, abort
from flask import (request, current_app, Response,
stream_with_context)
from flask_babel import gettext
from flask_restful import Resource
from py4j.protocol import Py4JJavaError
from sqlalchemy import or_, and_
from flask.views import MethodView
from marshmallow.exceptions import ValidationError
from limonero.py4j_init import create_gateway
from limonero.util import (upload, parse_hdfs_extra_params,
get_hdfs_conf)
from .app_auth import requires_auth
from .schema import *
_ = gettext
log = logging.getLogger(__name__)
def apply_filter(query, args, name, transform=None, transform_name=None):
result = query
if name in args and args[name].strip() != '':
v = transform(args[name]) if transform else args[name]
f = transform_name(name) if transform_name else name
result = query.filter_by(**{f: v})
return result
def _filter_by_permissions(models, permissions):
if flask_g.user.id != 0: # It is not a inter service call
conditions = or_(
Model.user_id == flask_g.user.id,
flask_g.user.id == 1,
and_(
ModelPermission.user_id == flask_g.user.id,
ModelPermission.permission.in_(permissions)
)
)
models = models.join(
Model.permissions, isouter=True).filter(conditions)
return models
class ModelListApi(Resource):
""" REST API for listing class Model """
@staticmethod
@requires_auth
def get():
result, result_code = {'status': 'ERROR',
'message': 'Internal error'}, 500
# noinspection PyBroadException
try:
if request.args.get('simple') != 'true':
only = None
else:
only = ('id', 'name', 'created',
'user_name', 'user_id')
if request.args.get('fields'):
only = tuple(
[x.strip() for x in request.args.get('fields').split(',')])
possible_filters = {'enabled': bool, 'type': None, 'user_id': int}
models = Model.query
for f, transform in list(possible_filters.items()):
models = apply_filter(models, request.args, f,
transform, lambda field: field)
models = _filter_by_permissions(
models, list(PermissionType.values())).filter(Model.enabled)
q = request.args.get('query')
if q:
models = models.filter(or_(
Model.name.like('%%{}%%'.format(q)),
Model.type.like('%%{}%%'.format(q))
))
t = request.args.get('type')
if t:
models = models.filter(Model.type.in_(t.split(',')))
sort = request.args.get('sort', 'name')
if sort not in ['name', 'id', 'user_id', 'user_name', 'type']:
sort = 'id'
sort_option = getattr(Model, sort)
if request.args.get('asc', 'true') == 'false':
sort_option = sort_option.desc()
models = models.order_by(sort_option)
page = request.args.get('page') or '1'
if page is not None and page.isdigit():
page_size = int(request.args.get('size', 20))
page = int(page)
pagination = models.paginate(page, page_size, True)
result = {
'data': ModelListResponseSchema(
many=True, only=only).dump(pagination.items),
'pagination': {
'page': page, 'size': page_size,
'total': pagination.total,
'pages': int(
math.ceil(1.0 * pagination.total // page_size))}
}
else:
result = {
'data': ModelListResponseSchema(
many=True, only=only).dump(models)}
db.session.commit()
result_code = 200
except Exception as ex:
log.exception(str(ex))
return result, result_code
@staticmethod
@requires_auth
def post():
result, result_code = dict(
status="ERROR", message="Missing json in the request body"), 400
if request.json is not None:
overwrite = request.json.pop('overwrite', False)
request_schema = ModelCreateRequestSchema()
response_schema = ModelItemResponseSchema()
try:
model = request_schema.load(request.json)
if overwrite:
original = Model.query.filter(
Model.task_id==request.json['task_id']).first()
if original:
model.id = original.id
db.session.merge(model)
else:
db.session.add(model)
else:
db.session.add(model)
db.session.commit()
result, result_code = response_schema.dump(model), 200
except ValidationError as e:
result = dict(status="ERROR", message=gettext('Invalid data'),
errors=e.messages)
result_code = 400
except Exception as e:
log.exception('Error in POST')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelDetailApi(Resource):
""" REST API for a single instance of class Model """
@staticmethod
@requires_auth
def get(model_id):
filtered = _filter_by_permissions(Model.query,
list(PermissionType.values()))
model = filtered.filter(Model.id == model_id).first()
if model is not None:
return ModelItemResponseSchema().dump(model)
else:
return dict(status="ERROR", message=_("%(type)s not found.",
type=_('Model'))), 404
@staticmethod
@requires_auth
def delete(model_id):
result, result_code = dict(
status="ERROR",
message=_("%(type)s not found.", type=_('Model'))), 404
filtered = _filter_by_permissions(
Model.query, [PermissionType.MANAGE, PermissionType.WRITE])
model = filtered.filter(Model.id == model_id).first()
if model is not None:
try:
model.enabled = False
db.session.add(model)
db.session.commit()
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly deleted",
what=_('Model'))), 200
except Exception as e:
log.exception('Error in DELETE')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
@staticmethod
@requires_auth
def patch(model_id):
result = dict(status="ERROR", message=_("Insufficient data"))
result_code = 404
if request.json:
request_schema = partial_schema_factory(
ModelCreateRequestSchema)
# Ignore missing fields to allow partial updates
response_schema = ModelItemResponseSchema()
try:
model = request_schema.load(request.json, partial=True)
model.id = model_id
model = db.session.merge(model)
db.session.commit()
if model is not None:
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly updated",
what=_('Model')),
data=response_schema.dump(model)), 200
else:
result = dict(status="ERROR",
message=_("%(type)s not found.",
type=_('Model')))
except ValidationError as e:
result = dict(status="ERROR", message=gettext('Invalid data'),
errors=e.messages)
result_code = 400
except Exception as e:
log.exception('Error in PATCH')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelPermissionApi(Resource):
""" REST API for sharing a Model """
@staticmethod
@requires_auth
def post(model_id, user_id):
result, result_code = dict(
status="ERROR", message=_("Missing json in the request body")), 400
if request.json is not None:
form = request.json
to_validate = ['permission', 'user_name', 'user_login']
error = False
for check in to_validate:
if check not in form or form.get(check, '').strip() == '':
result, result_code = dict(
status="ERROR", message=_("Validation error"),
errors={'Missing': check}), 400
error = True
break
if check == 'permission' and form.get(
'permission') not in list(PermissionType.values()):
result, result_code = dict(
status="ERROR", message=_("Validation error"),
errors={'Invalid': check}), 400
error = True
break
if not error:
try:
filtered = _filter_by_permissions(
Model.query, [PermissionType.MANAGE])
model = filtered.filter(
Model.id == model_id).first()
if model is not None:
conditions = [ModelPermission.model_id ==
model_id,
ModelPermission.user_id == user_id]
permission = ModelPermission.query.filter(
*conditions).first()
action_performed = _('%(what)s saved with success')
if permission is not None:
permission.permission = form['permission']
else:
permission = ModelPermission(
model=model, user_id=user_id,
user_name=form['user_name'],
user_login=form['user_login'],
permission=form['permission'])
db.session.add(permission)
db.session.commit()
result, result_code = {'message': action_performed,
'status': 'OK'}, 200
else:
result, result_code = dict(
status="ERROR", message=_("%(type)s not found.",
type=_('Model'))), 404
except Exception as e:
log.exception('Error in POST')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
@staticmethod
@requires_auth
def delete(model_id, user_id):
result, result_code = dict(
status="ERROR",
message=_("%(type)s not found.", type=_('Model'))), 404
filtered = _filter_by_permissions(Model.query,
[PermissionType.MANAGE])
model = filtered.filter(Model.id == model_id).first()
if model is not None:
permission = ModelPermission.query.filter(
ModelPermission.model_id == model_id,
ModelPermission.user_id == user_id).first()
if permission is not None:
try:
db.session.delete(permission)
db.session.commit()
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly deleted",
what=_('Model'))), 200
except Exception as e:
log.exception(
_('Error deleting %(what)s.', what=_('Model')))
result, result_code = dict(
status="ERROR", message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelUploadApi(Resource):
""" REST API for upload a Model """
@staticmethod
@requires_auth
def get():
# noinspection PyBroadException
try:
result, result_code = 'OK', 200
identifier = request.args.get('resumableIdentifier', type=str)
filename = request.args.get('resumableFilename', type=str)
chunk_number = request.args.get('resumableChunkNumber', type=int)
storage_id = request.args.get('storage_id', type=int)
if not all([storage_id, identifier, filename, chunk_number]):
result, result_code = {'status': 'ERROR', 'message': gettext(
'Missing required parameters')}, 400
else:
use_hostname = current_app.config.get(
'dfs.client.use.datanode.hostname', True)
chunk_path, hdfs = upload.create_hdfs_chunk(
chunk_number, filename,
Storage.query.get(storage_id),
use_hostname, current_app.gateway_port)
current_app.logger.debug('Creating chunk: %s', chunk_path)
if not hdfs.exists(chunk_path):
# The chunk does not exists and needs to be uploaded
# by resumable.js
result, result_code = {'status': 'OK',
'message': gettext('Not found')}, 404
return result, result_code
except Py4JJavaError as java_ex:
return ModelUploadApi.handle_jvm_error(java_ex)
except:
raise
@staticmethod
def handle_jvm_error(java_ex):
log.exception('Java error')
if 'Could not obtain block' in java_ex.java_exception.getMessage():
result, status = {'status': 'ERROR',
'message': upload.WRONG_HDFS_CONFIG}, 400
elif 'Could not obtain block' in java_ex.java_exception.getMessage():
result, status = {'status': 'ERROR',
'message': upload.WRONG_HDFS_CONFIG}, 400
else:
result, status = {'status': 'ERROR',
'message': gettext('Internal error')}, 400
return result, status
@staticmethod
@requires_auth
def post():
try:
result, result_code = 'OK', 200
identifier = request.args.get('resumableIdentifier', type=str)
filename = request.args.get('resumableFilename', type=str)
chunk_number = request.args.get('resumableChunkNumber', type=int)
total_chunks = request.args.get('resumableTotalChunks', type=int)
total_size = request.args.get('resumableTotalSize', type=int)
storage_id = request.args.get('storage_id', type=int)
if not all([identifier, filename, chunk_number]):
result, result_code = {'status': 'ERROR', 'message': gettext(
'Missing required parameters')}, 400
else:
use_hostname = current_app.config.get(
'dfs.client.use.datanode.hostname', True)
conf, jvm = upload.create_gateway_and_hdfs_conf(
use_hostname, current_app.gateway_port)
storage = Storage.query.get(storage_id)
file_data, hdfs, uri, full_path, counter = upload.write_chunk(
jvm, chunk_number, filename, storage, request.get_data(),
conf)
current_app.logger.debug('Wrote chunk: %s', full_path)
if counter == total_chunks:
result_code, result, target_path = upload.merge_chunks(
conf, filename, full_path, hdfs, jvm, uri,
current_app.config.get('instance', 'unnamed'))
if result_code != 500:
user = getattr(flask_g, 'user')
ds, response_schema = ModelUploadApi.after_merge_chunk(
user, storage, filename, target_path, file_data,
total_size)
result = {'status': 'OK',
'data': response_schema.dump(ds)}
return result, result_code, {
'Content-Type': 'application/json; charset=utf-8'}
except Py4JJavaError as java_ex:
return ModelUploadApi.handle_jvm_error(java_ex)
except:
raise
@staticmethod
def after_merge_chunk(user, storage, filename, target_path,
file_data=None, total_size=None):
model = Model(
name=filename,
enabled=True,
created=datetime.datetime.now(),
path=target_path.toString(),
class_name=None,
type=ModelType.UNSPECIFIED,
user_id=user.id,
user_login=user.login,
user_name='{} {}'.format(
user.first_name.encode('utf8'),
user.last_name.encode('utf8')).strip(),
storage_id=storage.id
)
db.session.add(model)
db.session.commit()
response_schema = ModelItemResponseSchema()
return model, response_schema
class ModelDownloadApi(MethodView):
""" Entry point for downloading a Model """
# noinspection PyUnresolvedReferences
@staticmethod
@requires_auth
def get(model_id):
if 'ADMINISTRATOR' in flask_g.user.permissions:
model = Model.query.filter(Model.id==model_id).first()
else:
model = Model.query.filter(Model.id==model_id,
Model.user_id==flask_g.user.id).first()
if not model:
abort(404)
url = model.storage.url
if url[-1] == '/':
url = url[:-1]
parsed = urlparse(f'{url}{model.path}')
gateway = create_gateway(log, current_app.gateway_port or 18001)
jvm = gateway.jvm
content_type = ('application/zip' if model.type == ModelType.MLEAP
else 'application/x-binary')
if parsed.scheme == 'file':
def do_download():
total = 0
done = False
with open(parsed.path, 'rb') as f:
while not done:
read_data = f.read(4096)
total += len(read_data)
if len(read_data) != 4096:
done = True
yield read_data
name = parsed.path.split('/')[-1]
result = Response(stream_with_context(
do_download()), mimetype=content_type)
result.headers[
'Cache-Control'] = 'no-cache, no-store, must-revalidate'
result.headers['Pragma'] = 'no-cache'
result.headers["Content-Disposition"] = \
"attachment; filename={}".format(name)
result_code = 200
else:
if parsed.port:
str_uri = '{proto}://{host}:{port}'.format(
proto=parsed.scheme, host=parsed.hostname, port=parsed.port)
else:
str_uri = '{proto}://{host}'.format(
proto=parsed.scheme, host=parsed.hostname)
try:
uri = jvm.java.net.URI(str_uri)
extra_params = parse_hdfs_extra_params(
model.storage.extra_params)
conf = get_hdfs_conf(jvm, extra_params, current_app.config)
hdfs = jvm.org.apache.hadoop.fs.FileSystem.get(uri, conf)
chunk_path = jvm.org.apache.hadoop.fs.Path(parsed.path)
if not hdfs.exists(chunk_path):
result, result_code = gettext("%(type)s not found.",
type=gettext(
'Data source')), 404
else:
buf = jvm.java.nio.ByteBuffer.allocate(4096)
input_in = hdfs.open(chunk_path)
def do_download():
total = 0
done = False
while not done:
lido = input_in.read(buf)
total += lido
buf.position(0)
if lido != 4096:
done = True
yield bytes(buf.array())[:lido]
else:
yield bytes(buf.array())
name = parsed.path.split('/')[-1]
result = Response(stream_with_context(
do_download()), mimetype=content_type)
result.headers[
'Cache-Control'] = 'no-cache, no-store, must-revalidate'
result.headers['Pragma'] = 'no-cache'
result.headers["Content-Disposition"] = \
"attachment; filename={}".format(name)
result_code = 200
except Py4JJavaError as java_ex:
if 'Could not obtain block' in \
java_ex.java_exception.getMessage():
return {'status': 'ERROR',
'message': WRONG_HDFS_CONFIG}, 400
log.exception('Java error')
except Exception as e:
result = json.dumps(
{'status': 'ERROR', 'message': gettext('Internal error')})
result_code = 500
log.exception(str(e))
return result, result_code | limonero/model_api.py | import logging
import math
import os
import json
from urllib.parse import urlparse
from flask import g as flask_g, abort
from flask import (request, current_app, Response,
stream_with_context)
from flask_babel import gettext
from flask_restful import Resource
from py4j.protocol import Py4JJavaError
from sqlalchemy import or_, and_
from flask.views import MethodView
from marshmallow.exceptions import ValidationError
from limonero.py4j_init import create_gateway
from limonero.util import (upload, parse_hdfs_extra_params,
get_hdfs_conf)
from .app_auth import requires_auth
from .schema import *
_ = gettext
log = logging.getLogger(__name__)
def apply_filter(query, args, name, transform=None, transform_name=None):
result = query
if name in args and args[name].strip() != '':
v = transform(args[name]) if transform else args[name]
f = transform_name(name) if transform_name else name
result = query.filter_by(**{f: v})
return result
def _filter_by_permissions(models, permissions):
if flask_g.user.id != 0: # It is not a inter service call
conditions = or_(
Model.user_id == flask_g.user.id,
flask_g.user.id == 1,
and_(
ModelPermission.user_id == flask_g.user.id,
ModelPermission.permission.in_(permissions)
)
)
models = models.join(
Model.permissions, isouter=True).filter(conditions)
return models
class ModelListApi(Resource):
""" REST API for listing class Model """
@staticmethod
@requires_auth
def get():
result, result_code = {'status': 'ERROR',
'message': 'Internal error'}, 500
# noinspection PyBroadException
try:
if request.args.get('simple') != 'true':
only = None
else:
only = ('id', 'name', 'created',
'user_name', 'user_id')
if request.args.get('fields'):
only = tuple(
[x.strip() for x in request.args.get('fields').split(',')])
possible_filters = {'enabled': bool, 'type': None, 'user_id': int}
models = Model.query
for f, transform in list(possible_filters.items()):
models = apply_filter(models, request.args, f,
transform, lambda field: field)
models = _filter_by_permissions(
models, list(PermissionType.values())).filter(Model.enabled)
q = request.args.get('query')
if q:
models = models.filter(or_(
Model.name.like('%%{}%%'.format(q)),
Model.type.like('%%{}%%'.format(q))
))
t = request.args.get('type')
if t:
models = models.filter(Model.type.in_(t.split(',')))
sort = request.args.get('sort', 'name')
if sort not in ['name', 'id', 'user_id', 'user_name', 'type']:
sort = 'id'
sort_option = getattr(Model, sort)
if request.args.get('asc', 'true') == 'false':
sort_option = sort_option.desc()
models = models.order_by(sort_option)
page = request.args.get('page') or '1'
if page is not None and page.isdigit():
page_size = int(request.args.get('size', 20))
page = int(page)
pagination = models.paginate(page, page_size, True)
result = {
'data': ModelListResponseSchema(
many=True, only=only).dump(pagination.items),
'pagination': {
'page': page, 'size': page_size,
'total': pagination.total,
'pages': int(
math.ceil(1.0 * pagination.total // page_size))}
}
else:
result = {
'data': ModelListResponseSchema(
many=True, only=only).dump(models)}
db.session.commit()
result_code = 200
except Exception as ex:
log.exception(str(ex))
return result, result_code
@staticmethod
@requires_auth
def post():
result, result_code = dict(
status="ERROR", message="Missing json in the request body"), 400
if request.json is not None:
overwrite = request.json.pop('overwrite', False)
request_schema = ModelCreateRequestSchema()
response_schema = ModelItemResponseSchema()
try:
model = request_schema.load(request.json)
if overwrite:
original = Model.query.filter(
Model.task_id==request.json['task_id']).first()
if original:
model.id = original.id
db.session.merge(model)
else:
db.session.add(model)
else:
db.session.add(model)
db.session.commit()
result, result_code = response_schema.dump(model), 200
except ValidationError as e:
result = dict(status="ERROR", message=gettext('Invalid data'),
errors=e.messages)
result_code = 400
except Exception as e:
log.exception('Error in POST')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelDetailApi(Resource):
""" REST API for a single instance of class Model """
@staticmethod
@requires_auth
def get(model_id):
filtered = _filter_by_permissions(Model.query,
list(PermissionType.values()))
model = filtered.filter(Model.id == model_id).first()
if model is not None:
return ModelItemResponseSchema().dump(model)
else:
return dict(status="ERROR", message=_("%(type)s not found.",
type=_('Model'))), 404
@staticmethod
@requires_auth
def delete(model_id):
result, result_code = dict(
status="ERROR",
message=_("%(type)s not found.", type=_('Model'))), 404
filtered = _filter_by_permissions(
Model.query, [PermissionType.MANAGE, PermissionType.WRITE])
model = filtered.filter(Model.id == model_id).first()
if model is not None:
try:
model.enabled = False
db.session.add(model)
db.session.commit()
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly deleted",
what=_('Model'))), 200
except Exception as e:
log.exception('Error in DELETE')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
@staticmethod
@requires_auth
def patch(model_id):
result = dict(status="ERROR", message=_("Insufficient data"))
result_code = 404
if request.json:
request_schema = partial_schema_factory(
ModelCreateRequestSchema)
# Ignore missing fields to allow partial updates
response_schema = ModelItemResponseSchema()
try:
model = request_schema.load(request.json, partial=True)
model.id = model_id
model = db.session.merge(model)
db.session.commit()
if model is not None:
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly updated",
what=_('Model')),
data=response_schema.dump(model)), 200
else:
result = dict(status="ERROR",
message=_("%(type)s not found.",
type=_('Model')))
except ValidationError as e:
result = dict(status="ERROR", message=gettext('Invalid data'),
errors=e.messages)
result_code = 400
except Exception as e:
log.exception('Error in PATCH')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelPermissionApi(Resource):
""" REST API for sharing a Model """
@staticmethod
@requires_auth
def post(model_id, user_id):
result, result_code = dict(
status="ERROR", message=_("Missing json in the request body")), 400
if request.json is not None:
form = request.json
to_validate = ['permission', 'user_name', 'user_login']
error = False
for check in to_validate:
if check not in form or form.get(check, '').strip() == '':
result, result_code = dict(
status="ERROR", message=_("Validation error"),
errors={'Missing': check}), 400
error = True
break
if check == 'permission' and form.get(
'permission') not in list(PermissionType.values()):
result, result_code = dict(
status="ERROR", message=_("Validation error"),
errors={'Invalid': check}), 400
error = True
break
if not error:
try:
filtered = _filter_by_permissions(
Model.query, [PermissionType.MANAGE])
model = filtered.filter(
Model.id == model_id).first()
if model is not None:
conditions = [ModelPermission.model_id ==
model_id,
ModelPermission.user_id == user_id]
permission = ModelPermission.query.filter(
*conditions).first()
action_performed = _('%(what)s saved with success')
if permission is not None:
permission.permission = form['permission']
else:
permission = ModelPermission(
model=model, user_id=user_id,
user_name=form['user_name'],
user_login=form['user_login'],
permission=form['permission'])
db.session.add(permission)
db.session.commit()
result, result_code = {'message': action_performed,
'status': 'OK'}, 200
else:
result, result_code = dict(
status="ERROR", message=_("%(type)s not found.",
type=_('Model'))), 404
except Exception as e:
log.exception('Error in POST')
result, result_code = dict(status="ERROR",
message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
@staticmethod
@requires_auth
def delete(model_id, user_id):
result, result_code = dict(
status="ERROR",
message=_("%(type)s not found.", type=_('Model'))), 404
filtered = _filter_by_permissions(Model.query,
[PermissionType.MANAGE])
model = filtered.filter(Model.id == model_id).first()
if model is not None:
permission = ModelPermission.query.filter(
ModelPermission.model_id == model_id,
ModelPermission.user_id == user_id).first()
if permission is not None:
try:
db.session.delete(permission)
db.session.commit()
result, result_code = dict(
status="OK",
message=_("%(what)s was successfuly deleted",
what=_('Model'))), 200
except Exception as e:
log.exception(
_('Error deleting %(what)s.', what=_('Model')))
result, result_code = dict(
status="ERROR", message=_("Internal error")), 500
if current_app.debug:
result['debug_detail'] = str(e)
db.session.rollback()
return result, result_code
class ModelUploadApi(Resource):
""" REST API for upload a Model """
@staticmethod
@requires_auth
def get():
# noinspection PyBroadException
try:
result, result_code = 'OK', 200
identifier = request.args.get('resumableIdentifier', type=str)
filename = request.args.get('resumableFilename', type=str)
chunk_number = request.args.get('resumableChunkNumber', type=int)
storage_id = request.args.get('storage_id', type=int)
if not all([storage_id, identifier, filename, chunk_number]):
result, result_code = {'status': 'ERROR', 'message': gettext(
'Missing required parameters')}, 400
else:
use_hostname = current_app.config.get(
'dfs.client.use.datanode.hostname', True)
chunk_path, hdfs = upload.create_hdfs_chunk(
chunk_number, filename,
Storage.query.get(storage_id),
use_hostname, current_app.gateway_port)
current_app.logger.debug('Creating chunk: %s', chunk_path)
if not hdfs.exists(chunk_path):
# The chunk does not exists and needs to be uploaded
# by resumable.js
result, result_code = {'status': 'OK',
'message': gettext('Not found')}, 404
return result, result_code
except Py4JJavaError as java_ex:
return ModelUploadApi.handle_jvm_error(java_ex)
except:
raise
@staticmethod
def handle_jvm_error(java_ex):
log.exception('Java error')
if 'Could not obtain block' in java_ex.java_exception.getMessage():
result, status = {'status': 'ERROR',
'message': upload.WRONG_HDFS_CONFIG}, 400
elif 'Could not obtain block' in java_ex.java_exception.getMessage():
result, status = {'status': 'ERROR',
'message': upload.WRONG_HDFS_CONFIG}, 400
else:
result, status = {'status': 'ERROR',
'message': gettext('Internal error')}, 400
return result, status
@staticmethod
@requires_auth
def post():
try:
result, result_code = 'OK', 200
identifier = request.args.get('resumableIdentifier', type=str)
filename = request.args.get('resumableFilename', type=str)
chunk_number = request.args.get('resumableChunkNumber', type=int)
total_chunks = request.args.get('resumableTotalChunks', type=int)
total_size = request.args.get('resumableTotalSize', type=int)
storage_id = request.args.get('storage_id', type=int)
if not all([identifier, filename, chunk_number]):
result, result_code = {'status': 'ERROR', 'message': gettext(
'Missing required parameters')}, 400
else:
use_hostname = current_app.config.get(
'dfs.client.use.datanode.hostname', True)
conf, jvm = upload.create_gateway_and_hdfs_conf(
use_hostname, current_app.gateway_port)
storage = Storage.query.get(storage_id)
file_data, hdfs, uri, full_path, counter = upload.write_chunk(
jvm, chunk_number, filename, storage, request.get_data(),
conf)
current_app.logger.debug('Wrote chunk: %s', full_path)
if counter == total_chunks:
result_code, result, target_path = upload.merge_chunks(
conf, filename, full_path, hdfs, jvm, uri,
current_app.config.get('instance', 'unnamed'))
if result_code != 500:
user = getattr(flask_g, 'user')
ds, response_schema = ModelUploadApi.after_merge_chunk(
user, storage, filename, target_path, file_data,
total_size)
result = {'status': 'OK',
'data': response_schema.dump(ds)}
return result, result_code, {
'Content-Type': 'application/json; charset=utf-8'}
except Py4JJavaError as java_ex:
return ModelUploadApi.handle_jvm_error(java_ex)
except:
raise
@staticmethod
def after_merge_chunk(user, storage, filename, target_path,
file_data=None, total_size=None):
model = Model(
name=filename,
enabled=True,
created=datetime.datetime.now(),
path=target_path.toString(),
class_name=None,
type=ModelType.UNSPECIFIED,
user_id=user.id,
user_login=user.login,
user_name='{} {}'.format(
user.first_name.encode('utf8'),
user.last_name.encode('utf8')).strip(),
storage_id=storage.id
)
db.session.add(model)
db.session.commit()
response_schema = ModelItemResponseSchema()
return model, response_schema
class ModelDownloadApi(MethodView):
""" Entry point for downloading a Model """
# noinspection PyUnresolvedReferences
@staticmethod
@requires_auth
def get(model_id):
if 'ADMINISTRATOR' in flask_g.user.permissions:
model = Model.query.filter(Model.id==model_id).first()
else:
model = Model.query.filter(Model.id==model_id,
Model.user_id==flask_g.user.id).first()
if not model:
abort(404)
url = model.storage.url
if url[-1] == '/':
url = url[:-1]
parsed = urlparse(f'{url}{model.path}')
gateway = create_gateway(log, current_app.gateway_port or 18001)
jvm = gateway.jvm
content_type = ('application/zip' if model.type == ModelType.MLEAP
else 'application/x-binary')
if parsed.scheme == 'file':
def do_download():
total = 0
done = False
with open(parsed.path, 'rb') as f:
while not done:
read_data = f.read(4096)
total += len(read_data)
if len(read_data) != 4096:
done = True
yield read_data
name = parsed.path.split('/')[-1]
result = Response(stream_with_context(
do_download()), mimetype=content_type)
result.headers[
'Cache-Control'] = 'no-cache, no-store, must-revalidate'
result.headers['Pragma'] = 'no-cache'
result.headers["Content-Disposition"] = \
"attachment; filename={}".format(name)
result_code = 200
else:
if parsed.port:
str_uri = '{proto}://{host}:{port}'.format(
proto=parsed.scheme, host=parsed.hostname, port=parsed.port)
else:
str_uri = '{proto}://{host}'.format(
proto=parsed.scheme, host=parsed.hostname)
try:
uri = jvm.java.net.URI(str_uri)
extra_params = parse_hdfs_extra_params(
model.storage.extra_params)
conf = get_hdfs_conf(jvm, extra_params, current_app.config)
hdfs = jvm.org.apache.hadoop.fs.FileSystem.get(uri, conf)
chunk_path = jvm.org.apache.hadoop.fs.Path(parsed.path)
if not hdfs.exists(chunk_path):
result, result_code = gettext("%(type)s not found.",
type=gettext(
'Data source')), 404
else:
buf = jvm.java.nio.ByteBuffer.allocate(4096)
input_in = hdfs.open(chunk_path)
def do_download():
total = 0
done = False
while not done:
lido = input_in.read(buf)
total += lido
buf.position(0)
if lido != 4096:
done = True
yield bytes(buf.array())[:lido]
else:
yield bytes(buf.array())
name = parsed.path.split('/')[-1]
result = Response(stream_with_context(
do_download()), mimetype=content_type)
result.headers[
'Cache-Control'] = 'no-cache, no-store, must-revalidate'
result.headers['Pragma'] = 'no-cache'
result.headers["Content-Disposition"] = \
"attachment; filename={}".format(name)
result_code = 200
except Py4JJavaError as java_ex:
if 'Could not obtain block' in \
java_ex.java_exception.getMessage():
return {'status': 'ERROR',
'message': WRONG_HDFS_CONFIG}, 400
log.exception('Java error')
except Exception as e:
result = json.dumps(
{'status': 'ERROR', 'message': gettext('Internal error')})
result_code = 500
log.exception(str(e))
return result, result_code | 0.373533 | 0.086864 |
import argparse
import hashlib
import os
import sys
from functools import partial
from pathlib import Path
from .utils import ApplicationError
def dhash(string):
m = hashlib.sha1()
m.update(string.encode())
return int(m.hexdigest(), base=16)
def autoport(path):
lower = 2000
upper = 65535 + 1
path = os.path.abspath(path)
return dhash(path) % (upper - lower) + lower
async def command_serve_impl(url, open_browser, open_url_delay, **kwargs):
root = Path(kwargs["root"])
if not root.exists():
raise ApplicationError(f"{root} does not exist")
elif not root.is_dir():
raise ApplicationError(f"{root} is not a directory")
import trio
from .server import start_server
async with trio.open_nursery() as nursery:
nursery.start_soon(partial(start_server, **kwargs))
if open_browser:
await trio.sleep(open_url_delay)
command_browse(url)
def command_serve(**kwargs):
"""
Serve files in `root`. This is the default behavior.
"""
import trio
trio.run(partial(command_serve_impl, **kwargs))
def command_browse(url, port=None, root=None):
"""
Open browser without starting the server.
"""
import webbrowser
print("Opening:", url, file=sys.stderr)
webbrowser.open(url)
def command_print_url(url, port=None, root=None):
"""
Print URL from which files in `root` would be served.
"""
print(url)
def preprocess_kwargs(*, port, **kwargs):
root = kwargs["root"]
if port.lower() == "auto":
port_num = autoport(root)
else:
port_num = int(root)
kwargs["port"] = port_num
kwargs["url"] = f"http://localhost:{port_num}"
return kwargs
def run_cli(*, command, **kwargs):
import logging
from rich.logging import RichHandler
if kwargs.get("debug", False):
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(
level=level,
format="%(message)s",
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
)
# https://rich.readthedocs.io/en/latest/logging.html
command(**preprocess_kwargs(**kwargs))
def parse_args(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__
)
subparsers = parser.add_subparsers()
def subp(argument, command):
doc = command.__doc__
try:
title = next(filter(None, map(str.strip, (doc or "").splitlines())))
except StopIteration:
title = None
p = subparsers.add_parser(
argument,
formatter_class=argparse.RawDescriptionHelpFormatter,
help=title,
description=doc,
)
p.set_defaults(command=command)
return p
def add_common_arguments(p):
p.add_argument(
"root",
nargs="?",
default=".",
help="""
Directory to serve.
(default: %(default)s)
""",
)
p.add_argument(
"--port",
default="auto",
help="""
Port number to use. "auto" (default) means to decide it based on
`root`.
""",
)
parser.set_defaults(command=command_serve)
p = subp("serve", command_serve)
add_common_arguments(p)
p.add_argument(
"--debug",
action="store_true",
help="""
Enable debugging.
""",
)
p.add_argument(
"--open-url-delay",
default=0.5,
type=float,
help="""
Number of seconds to wait before the URL is open.
""",
)
p.add_argument(
"--open",
dest="open_browser",
action="store_const",
const=True,
default=True,
help="""
Open the page after starting the server (default).
""",
)
p.add_argument(
"--no-open",
dest="open_browser",
action="store_const",
const=False,
help="""
Don't open the page after starting the server.
""",
)
p = subp("browse", command_browse)
add_common_arguments(p)
p = subp("print-url", command_print_url)
add_common_arguments(p)
'''
p = subp("pause", command_pause)
add_common_arguments(p)
p.add_argument(
"--restart-after",
type=float,
metavar="SECONDS",
help="""
Restart after given number of seconds. -1 (defualt) means to
never automatically restart.
""",
)
p = subp("restart", command_restart)
add_common_arguments(p)
'''
if args is None:
args = sys.argv[1:]
if not args:
args = ["serve"] # so that default above are used
return parser.parse_args(args)
def main(args=None):
try:
run_cli(**vars(parse_args(args)))
except ApplicationError as err:
print(err, file=sys.stderr)
sys.exit(1)
except KeyboardInterrupt:
pass | src/refresher/cli.py | import argparse
import hashlib
import os
import sys
from functools import partial
from pathlib import Path
from .utils import ApplicationError
def dhash(string):
m = hashlib.sha1()
m.update(string.encode())
return int(m.hexdigest(), base=16)
def autoport(path):
lower = 2000
upper = 65535 + 1
path = os.path.abspath(path)
return dhash(path) % (upper - lower) + lower
async def command_serve_impl(url, open_browser, open_url_delay, **kwargs):
root = Path(kwargs["root"])
if not root.exists():
raise ApplicationError(f"{root} does not exist")
elif not root.is_dir():
raise ApplicationError(f"{root} is not a directory")
import trio
from .server import start_server
async with trio.open_nursery() as nursery:
nursery.start_soon(partial(start_server, **kwargs))
if open_browser:
await trio.sleep(open_url_delay)
command_browse(url)
def command_serve(**kwargs):
"""
Serve files in `root`. This is the default behavior.
"""
import trio
trio.run(partial(command_serve_impl, **kwargs))
def command_browse(url, port=None, root=None):
"""
Open browser without starting the server.
"""
import webbrowser
print("Opening:", url, file=sys.stderr)
webbrowser.open(url)
def command_print_url(url, port=None, root=None):
"""
Print URL from which files in `root` would be served.
"""
print(url)
def preprocess_kwargs(*, port, **kwargs):
root = kwargs["root"]
if port.lower() == "auto":
port_num = autoport(root)
else:
port_num = int(root)
kwargs["port"] = port_num
kwargs["url"] = f"http://localhost:{port_num}"
return kwargs
def run_cli(*, command, **kwargs):
import logging
from rich.logging import RichHandler
if kwargs.get("debug", False):
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(
level=level,
format="%(message)s",
datefmt="[%X]",
handlers=[RichHandler(rich_tracebacks=True)],
)
# https://rich.readthedocs.io/en/latest/logging.html
command(**preprocess_kwargs(**kwargs))
def parse_args(args=None):
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=__doc__
)
subparsers = parser.add_subparsers()
def subp(argument, command):
doc = command.__doc__
try:
title = next(filter(None, map(str.strip, (doc or "").splitlines())))
except StopIteration:
title = None
p = subparsers.add_parser(
argument,
formatter_class=argparse.RawDescriptionHelpFormatter,
help=title,
description=doc,
)
p.set_defaults(command=command)
return p
def add_common_arguments(p):
p.add_argument(
"root",
nargs="?",
default=".",
help="""
Directory to serve.
(default: %(default)s)
""",
)
p.add_argument(
"--port",
default="auto",
help="""
Port number to use. "auto" (default) means to decide it based on
`root`.
""",
)
parser.set_defaults(command=command_serve)
p = subp("serve", command_serve)
add_common_arguments(p)
p.add_argument(
"--debug",
action="store_true",
help="""
Enable debugging.
""",
)
p.add_argument(
"--open-url-delay",
default=0.5,
type=float,
help="""
Number of seconds to wait before the URL is open.
""",
)
p.add_argument(
"--open",
dest="open_browser",
action="store_const",
const=True,
default=True,
help="""
Open the page after starting the server (default).
""",
)
p.add_argument(
"--no-open",
dest="open_browser",
action="store_const",
const=False,
help="""
Don't open the page after starting the server.
""",
)
p = subp("browse", command_browse)
add_common_arguments(p)
p = subp("print-url", command_print_url)
add_common_arguments(p)
'''
p = subp("pause", command_pause)
add_common_arguments(p)
p.add_argument(
"--restart-after",
type=float,
metavar="SECONDS",
help="""
Restart after given number of seconds. -1 (defualt) means to
never automatically restart.
""",
)
p = subp("restart", command_restart)
add_common_arguments(p)
'''
if args is None:
args = sys.argv[1:]
if not args:
args = ["serve"] # so that default above are used
return parser.parse_args(args)
def main(args=None):
try:
run_cli(**vars(parse_args(args)))
except ApplicationError as err:
print(err, file=sys.stderr)
sys.exit(1)
except KeyboardInterrupt:
pass | 0.380644 | 0.101233 |
from . import domainresource
class TestScript(domainresource.DomainResource):
""" Describes a set of tests.
A structured set of tests against a FHIR server or client implementation to
determine compliance against the FHIR specification.
"""
resource_type = "TestScript"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self._copyright = None
""" extension for fhir primitive copyright"""
self.date = None
""" Date last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the test script.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" An abstract server representing a destination or receiver in a
message exchange.
List of `TestScriptDestination` items (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self._experimental = None
""" extension for fhir primitive experimental"""
self.fixture = None
""" Fixture in the test script - by reference (uri).
List of `TestScriptFixture` items (represented as `dict` in JSON). """
self.identifier = None
""" Additional identifier for the test script.
Type `Identifier` (represented as `dict` in JSON). """
self.jurisdiction = None
""" Intended jurisdiction for test script (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.metadata = None
""" Required capability that is assumed to function correctly on the
FHIR server being tested.
Type `TestScriptMetadata` (represented as `dict` in JSON). """
self.name = None
""" Name for this test script (computer friendly).
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
self.origin = None
""" An abstract server representing a client or sender in a message
exchange.
List of `TestScriptOrigin` items (represented as `dict` in JSON). """
self.profile = None
""" Reference of the validation profile.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self._publisher = None
""" extension for fhir primitive publisher"""
self.purpose = None
""" Why this test script is defined.
Type `str`. """
self._purpose = None
""" extension for fhir primitive purpose"""
self.setup = None
""" A series of required setup operations before tests are executed.
Type `TestScriptSetup` (represented as `dict` in JSON). """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self._status = None
""" extension for fhir primitive status"""
self.teardown = None
""" A series of required clean up steps.
Type `TestScriptTeardown` (represented as `dict` in JSON). """
self.test = None
""" A test in this script.
List of `TestScriptTest` items (represented as `dict` in JSON). """
self.title = None
""" Name for this test script (human friendly).
Type `str`. """
self._title = None
""" extension for fhir primitive title"""
self.url = None
""" Canonical identifier for this test script, represented as a URI
(globally unique).
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
self.useContext = None
""" The context that the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.variable = None
""" Placeholder for evaluated elements.
List of `TestScriptVariable` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the test script.
Type `str`. """
self._version = None
""" extension for fhir primitive version"""
super(TestScript, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScript, self).elementProperties()
js.extend([
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("_copyright", "_copyright",fhirprimitive.FHIRPrimitive, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", TestScriptDestination, True, None, False),
("experimental", "experimental", bool, False, None, False),
("_experimental", "_experimental",fhirprimitive.FHIRPrimitive, False, None, False),
("fixture", "fixture", TestScriptFixture, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("metadata", "metadata", TestScriptMetadata, False, None, False),
("name", "name", str, False, None, True),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", TestScriptOrigin, True, None, False),
("profile", "profile", fhirreference.FHIRReference, True, None, False),
("publisher", "publisher", str, False, None, False),
("_publisher", "_publisher",fhirprimitive.FHIRPrimitive, False, None, False),
("purpose", "purpose", str, False, None, False),
("_purpose", "_purpose",fhirprimitive.FHIRPrimitive, False, None, False),
("setup", "setup", TestScriptSetup, False, None, False),
("status", "status", str, False, None, True),
("_status", "_status",fhirprimitive.FHIRPrimitive, False, None, False),
("teardown", "teardown", TestScriptTeardown, False, None, False),
("test", "test", TestScriptTest, True, None, False),
("title", "title", str, False, None, False),
("_title", "_title",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("variable", "variable", TestScriptVariable, True, None, False),
("version", "version", str, False, None, False),
("_version", "_version",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import backboneelement
class TestScriptDestination(backboneelement.BackboneElement):
""" An abstract server representing a destination or receiver in a message
exchange.
An abstract server used in operations within this test script in the
destination element.
"""
resource_type = "TestScriptDestination"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract destination server starting at 1.
Type `int`. """
self._index = None
""" extension for fhir primitive index"""
self.profile = None
""" FHIR-Server | FHIR-SDC-FormManager | FHIR-SDC-FormReceiver | FHIR-
SDC-FormProcessor.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptDestination, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptDestination, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("_index", "_index",fhirprimitive.FHIRPrimitive, False, None, False),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptFixture(backboneelement.BackboneElement):
""" Fixture in the test script - by reference (uri).
Fixture in the test script - by reference (uri). All fixtures are required
for the test script to execute.
"""
resource_type = "TestScriptFixture"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.autocreate = None
""" Whether or not to implicitly create the fixture during setup.
Type `bool`. """
self._autocreate = None
""" extension for fhir primitive autocreate"""
self.autodelete = None
""" Whether or not to implicitly delete the fixture during teardown.
Type `bool`. """
self._autodelete = None
""" extension for fhir primitive autodelete"""
self.resource = None
""" Reference of the resource.
Type `FHIRReference` (represented as `dict` in JSON). """
super(TestScriptFixture, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptFixture, self).elementProperties()
js.extend([
("autocreate", "autocreate", bool, False, None, True),
("_autocreate", "_autocreate",fhirprimitive.FHIRPrimitive, False, None, False),
("autodelete", "autodelete", bool, False, None, True),
("_autodelete", "_autodelete",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", fhirreference.FHIRReference, False, None, False),
])
return js
class TestScriptMetadata(backboneelement.BackboneElement):
""" Required capability that is assumed to function correctly on the FHIR
server being tested.
The required capability must exist and are assumed to function correctly on
the FHIR server being tested.
"""
resource_type = "TestScriptMetadata"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capability = None
""" Capabilities that are assumed to function correctly on the FHIR
server being tested.
List of `TestScriptMetadataCapability` items (represented as `dict` in JSON). """
self.link = None
""" Links to the FHIR specification.
List of `TestScriptMetadataLink` items (represented as `dict` in JSON). """
super(TestScriptMetadata, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadata, self).elementProperties()
js.extend([
("capability", "capability", TestScriptMetadataCapability, True, None, True),
("link", "link", TestScriptMetadataLink, True, None, False),
])
return js
class TestScriptMetadataCapability(backboneelement.BackboneElement):
""" Capabilities that are assumed to function correctly on the FHIR server
being tested.
Capabilities that must exist and are assumed to function correctly on the
FHIR server being tested.
"""
resource_type = "TestScriptMetadataCapability"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capabilities = None
""" Required Capability Statement.
Type `str`. """
self._capabilities = None
""" extension for fhir primitive capabilities"""
self.description = None
""" The expected capabilities of the server.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" Which server these requirements apply to.
Type `int`. """
self._destination = None
""" extension for fhir primitive destination"""
self.link = None
""" Links to the FHIR specification.
List of `str` items. """
self._link = None
""" extension for fhir primitive link"""
self.origin = None
""" Which origin server these requirements apply to.
List of `int` items. """
self._origin = None
""" extension for fhir primitive origin"""
self.required = None
""" Are the capabilities required?.
Type `bool`. """
self._required = None
""" extension for fhir primitive required"""
self.validated = None
""" Are the capabilities validated?.
Type `bool`. """
self._validated = None
""" extension for fhir primitive validated"""
super(TestScriptMetadataCapability, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataCapability, self).elementProperties()
js.extend([
("capabilities", "capabilities", str, False, None, True),
("_capabilities", "_capabilities",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", int, False, None, False),
("_destination", "_destination",fhirprimitive.FHIRPrimitive, False, None, False),
("link", "link", str, True, None, False),
("_link", "_link",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", int, True, None, False),
("_origin", "_origin",fhirprimitive.FHIRPrimitive, False, None, False),
("required", "required", bool, False, None, True),
("_required", "_required",fhirprimitive.FHIRPrimitive, False, None, False),
("validated", "validated", bool, False, None, True),
("_validated", "_validated",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptMetadataLink(backboneelement.BackboneElement):
""" Links to the FHIR specification.
A link to the FHIR specification that this test is covering.
"""
resource_type = "TestScriptMetadataLink"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.description = None
""" Short description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.url = None
""" URL to the specification.
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
super(TestScriptMetadataLink, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataLink, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptOrigin(backboneelement.BackboneElement):
""" An abstract server representing a client or sender in a message exchange.
An abstract server used in operations within this test script in the origin
element.
"""
resource_type = "TestScriptOrigin"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract origin server starting at 1.
Type `int`. """
self._index = None
""" extension for fhir primitive index"""
self.profile = None
""" FHIR-Client | FHIR-SDC-FormFiller.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptOrigin, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptOrigin, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("_index", "_index",fhirprimitive.FHIRPrimitive, False, None, False),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptSetup(backboneelement.BackboneElement):
""" A series of required setup operations before tests are executed.
"""
resource_type = "TestScriptSetup"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A setup operation or assert to perform.
List of `TestScriptSetupAction` items (represented as `dict` in JSON). """
super(TestScriptSetup, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetup, self).elementProperties()
js.extend([
("action", "action", TestScriptSetupAction, True, None, True),
])
return js
class TestScriptSetupAction(backboneelement.BackboneElement):
""" A setup operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptSetupAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptSetupAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptSetupActionAssert(backboneelement.BackboneElement):
""" The assertion to perform.
Evaluates the results of previous operations to determine if the server
under test behaves appropriately.
"""
resource_type = "TestScriptSetupActionAssert"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.compareToSourceExpression = None
""" The FHIRPath expression to evaluate against the source fixture.
Type `str`. """
self._compareToSourceExpression = None
""" extension for fhir primitive compareToSourceExpression"""
self.compareToSourceId = None
""" Id of the source fixture to be evaluated.
Type `str`. """
self._compareToSourceId = None
""" extension for fhir primitive compareToSourceId"""
self.compareToSourcePath = None
""" XPath or JSONPath expression to evaluate against the source fixture.
Type `str`. """
self._compareToSourcePath = None
""" extension for fhir primitive compareToSourcePath"""
self.contentType = None
""" Mime type to compare against the 'Content-Type' header.
Type `str`. """
self._contentType = None
""" extension for fhir primitive contentType"""
self.description = None
""" Tracking/reporting assertion description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.direction = None
""" response | request.
Type `str`. """
self._direction = None
""" extension for fhir primitive direction"""
self.expression = None
""" The FHIRPath expression to be evaluated.
Type `str`. """
self._expression = None
""" extension for fhir primitive expression"""
self.headerField = None
""" HTTP header field name.
Type `str`. """
self._headerField = None
""" extension for fhir primitive headerField"""
self.label = None
""" Tracking/logging assertion label.
Type `str`. """
self._label = None
""" extension for fhir primitive label"""
self.minimumId = None
""" Fixture Id of minimum content resource.
Type `str`. """
self._minimumId = None
""" extension for fhir primitive minimumId"""
self.navigationLinks = None
""" Perform validation on navigation links?.
Type `bool`. """
self._navigationLinks = None
""" extension for fhir primitive navigationLinks"""
self.operator = None
""" equals | notEquals | in | notIn | greaterThan | lessThan | empty |
notEmpty | contains | notContains | eval.
Type `str`. """
self._operator = None
""" extension for fhir primitive operator"""
self.path = None
""" XPath or JSONPath expression.
Type `str`. """
self._path = None
""" extension for fhir primitive path"""
self.requestMethod = None
""" delete | get | options | patch | post | put | head.
Type `str`. """
self._requestMethod = None
""" extension for fhir primitive requestMethod"""
self.requestURL = None
""" Request URL comparison value.
Type `str`. """
self._requestURL = None
""" extension for fhir primitive requestURL"""
self.resource = None
""" Resource type.
Type `str`. """
self._resource = None
""" extension for fhir primitive resource"""
self.response = None
""" okay | created | noContent | notModified | bad | forbidden |
notFound | methodNotAllowed | conflict | gone | preconditionFailed
| unprocessable.
Type `str`. """
self._response = None
""" extension for fhir primitive response"""
self.responseCode = None
""" HTTP response code to test.
Type `str`. """
self._responseCode = None
""" extension for fhir primitive responseCode"""
self.sourceId = None
""" Fixture Id of source expression or headerField.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
self.validateProfileId = None
""" Profile Id of validation profile reference.
Type `str`. """
self._validateProfileId = None
""" extension for fhir primitive validateProfileId"""
self.value = None
""" The value to compare to.
Type `str`. """
self._value = None
""" extension for fhir primitive value"""
self.warningOnly = None
""" Will this assert produce a warning only on error?.
Type `bool`. """
self._warningOnly = None
""" extension for fhir primitive warningOnly"""
super(TestScriptSetupActionAssert, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssert, self).elementProperties()
js.extend([
("compareToSourceExpression", "compareToSourceExpression", str, False, None, False),
("_compareToSourceExpression", "_compareToSourceExpression",fhirprimitive.FHIRPrimitive, False, None, False),
("compareToSourceId", "compareToSourceId", str, False, None, False),
("_compareToSourceId", "_compareToSourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("compareToSourcePath", "compareToSourcePath", str, False, None, False),
("_compareToSourcePath", "_compareToSourcePath",fhirprimitive.FHIRPrimitive, False, None, False),
("contentType", "contentType", str, False, None, False),
("_contentType", "_contentType",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("direction", "direction", str, False, None, False),
("_direction", "_direction",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("headerField", "headerField", str, False, None, False),
("_headerField", "_headerField",fhirprimitive.FHIRPrimitive, False, None, False),
("label", "label", str, False, None, False),
("_label", "_label",fhirprimitive.FHIRPrimitive, False, None, False),
("minimumId", "minimumId", str, False, None, False),
("_minimumId", "_minimumId",fhirprimitive.FHIRPrimitive, False, None, False),
("navigationLinks", "navigationLinks", bool, False, None, False),
("_navigationLinks", "_navigationLinks",fhirprimitive.FHIRPrimitive, False, None, False),
("operator", "operator", str, False, None, False),
("_operator", "_operator",fhirprimitive.FHIRPrimitive, False, None, False),
("path", "path", str, False, None, False),
("_path", "_path",fhirprimitive.FHIRPrimitive, False, None, False),
("requestMethod", "requestMethod", str, False, None, False),
("_requestMethod", "_requestMethod",fhirprimitive.FHIRPrimitive, False, None, False),
("requestURL", "requestURL", str, False, None, False),
("_requestURL", "_requestURL",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", str, False, None, False),
("_resource", "_resource",fhirprimitive.FHIRPrimitive, False, None, False),
("response", "response", str, False, None, False),
("_response", "_response",fhirprimitive.FHIRPrimitive, False, None, False),
("responseCode", "responseCode", str, False, None, False),
("_responseCode", "_responseCode",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("validateProfileId", "validateProfileId", str, False, None, False),
("_validateProfileId", "_validateProfileId",fhirprimitive.FHIRPrimitive, False, None, False),
("value", "value", str, False, None, False),
("_value", "_value",fhirprimitive.FHIRPrimitive, False, None, False),
("warningOnly", "warningOnly", bool, False, None, True),
("_warningOnly", "_warningOnly",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptSetupActionOperation(backboneelement.BackboneElement):
""" The setup operation to perform.
The operation to perform.
"""
resource_type = "TestScriptSetupActionOperation"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.accept = None
""" Mime type to accept in the payload of the response, with charset
etc..
Type `str`. """
self._accept = None
""" extension for fhir primitive accept"""
self.contentType = None
""" Mime type of the request payload contents, with charset etc..
Type `str`. """
self._contentType = None
""" extension for fhir primitive contentType"""
self.description = None
""" Tracking/reporting operation description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" Server responding to the request.
Type `int`. """
self._destination = None
""" extension for fhir primitive destination"""
self.encodeRequestUrl = None
""" Whether or not to send the request url in encoded format.
Type `bool`. """
self._encodeRequestUrl = None
""" extension for fhir primitive encodeRequestUrl"""
self.label = None
""" Tracking/logging operation label.
Type `str`. """
self._label = None
""" extension for fhir primitive label"""
self.method = None
""" delete | get | options | patch | post | put | head.
Type `str`. """
self._method = None
""" extension for fhir primitive method"""
self.origin = None
""" Server initiating the request.
Type `int`. """
self._origin = None
""" extension for fhir primitive origin"""
self.params = None
""" Explicitly defined path parameters.
Type `str`. """
self._params = None
""" extension for fhir primitive params"""
self.requestHeader = None
""" Each operation can have one or more header elements.
List of `TestScriptSetupActionOperationRequestHeader` items (represented as `dict` in JSON). """
self.requestId = None
""" Fixture Id of mapped request.
Type `str`. """
self._requestId = None
""" extension for fhir primitive requestId"""
self.resource = None
""" Resource type.
Type `str`. """
self._resource = None
""" extension for fhir primitive resource"""
self.responseId = None
""" Fixture Id of mapped response.
Type `str`. """
self._responseId = None
""" extension for fhir primitive responseId"""
self.sourceId = None
""" Fixture Id of body for PUT and POST requests.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
self.targetId = None
""" Id of fixture used for extracting the [id], [type], and [vid] for
GET requests.
Type `str`. """
self._targetId = None
""" extension for fhir primitive targetId"""
self.type = None
""" The operation code type that will be executed.
Type `Coding` (represented as `dict` in JSON). """
self.url = None
""" Request URL.
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
super(TestScriptSetupActionOperation, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperation, self).elementProperties()
js.extend([
("accept", "accept", str, False, None, False),
("_accept", "_accept",fhirprimitive.FHIRPrimitive, False, None, False),
("contentType", "contentType", str, False, None, False),
("_contentType", "_contentType",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", int, False, None, False),
("_destination", "_destination",fhirprimitive.FHIRPrimitive, False, None, False),
("encodeRequestUrl", "encodeRequestUrl", bool, False, None, True),
("_encodeRequestUrl", "_encodeRequestUrl",fhirprimitive.FHIRPrimitive, False, None, False),
("label", "label", str, False, None, False),
("_label", "_label",fhirprimitive.FHIRPrimitive, False, None, False),
("method", "method", str, False, None, False),
("_method", "_method",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", int, False, None, False),
("_origin", "_origin",fhirprimitive.FHIRPrimitive, False, None, False),
("params", "params", str, False, None, False),
("_params", "_params",fhirprimitive.FHIRPrimitive, False, None, False),
("requestHeader", "requestHeader", TestScriptSetupActionOperationRequestHeader, True, None, False),
("requestId", "requestId", str, False, None, False),
("_requestId", "_requestId",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", str, False, None, False),
("_resource", "_resource",fhirprimitive.FHIRPrimitive, False, None, False),
("responseId", "responseId", str, False, None, False),
("_responseId", "_responseId",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("targetId", "targetId", str, False, None, False),
("_targetId", "_targetId",fhirprimitive.FHIRPrimitive, False, None, False),
("type", "type", coding.Coding, False, None, False),
("url", "url", str, False, None, False),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptSetupActionOperationRequestHeader(backboneelement.BackboneElement):
""" Each operation can have one or more header elements.
Header elements would be used to set HTTP headers.
"""
resource_type = "TestScriptSetupActionOperationRequestHeader"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.field = None
""" HTTP header field name.
Type `str`. """
self._field = None
""" extension for fhir primitive field"""
self.value = None
""" HTTP headerfield value.
Type `str`. """
self._value = None
""" extension for fhir primitive value"""
super(TestScriptSetupActionOperationRequestHeader, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperationRequestHeader, self).elementProperties()
js.extend([
("field", "field", str, False, None, True),
("_field", "_field",fhirprimitive.FHIRPrimitive, False, None, False),
("value", "value", str, False, None, True),
("_value", "_value",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptTeardown(backboneelement.BackboneElement):
""" A series of required clean up steps.
A series of operations required to clean up after all the tests are
executed (successfully or otherwise).
"""
resource_type = "TestScriptTeardown"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" One or more teardown operations to perform.
List of `TestScriptTeardownAction` items (represented as `dict` in JSON). """
super(TestScriptTeardown, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardown, self).elementProperties()
js.extend([
("action", "action", TestScriptTeardownAction, True, None, True),
])
return js
class TestScriptTeardownAction(backboneelement.BackboneElement):
""" One or more teardown operations to perform.
The teardown action will only contain an operation.
"""
resource_type = "TestScriptTeardownAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.operation = None
""" The teardown operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTeardownAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardownAction, self).elementProperties()
js.extend([
("operation", "operation", TestScriptSetupActionOperation, False, None, True),
])
return js
class TestScriptTest(backboneelement.BackboneElement):
""" A test in this script.
"""
resource_type = "TestScriptTest"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A test operation or assert to perform.
List of `TestScriptTestAction` items (represented as `dict` in JSON). """
self.description = None
""" Tracking/reporting short description of the test.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.name = None
""" Tracking/logging name of this test.
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
super(TestScriptTest, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTest, self).elementProperties()
js.extend([
("action", "action", TestScriptTestAction, True, None, True),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("name", "name", str, False, None, False),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptTestAction(backboneelement.BackboneElement):
""" A test operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptTestAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The setup assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTestAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTestAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptVariable(backboneelement.BackboneElement):
""" Placeholder for evaluated elements.
Variable is set based either on element value in response body or on header
field value in the response headers.
"""
resource_type = "TestScriptVariable"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.defaultValue = None
""" Default, hard-coded, or user-defined value for this variable.
Type `str`. """
self._defaultValue = None
""" extension for fhir primitive defaultValue"""
self.description = None
""" Natural language description of the variable.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.expression = None
""" The FHIRPath expression against the fixture body.
Type `str`. """
self._expression = None
""" extension for fhir primitive expression"""
self.headerField = None
""" HTTP header field name for source.
Type `str`. """
self._headerField = None
""" extension for fhir primitive headerField"""
self.hint = None
""" Hint help text for default value to enter.
Type `str`. """
self._hint = None
""" extension for fhir primitive hint"""
self.name = None
""" Descriptive name for this variable.
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
self.path = None
""" XPath or JSONPath against the fixture body.
Type `str`. """
self._path = None
""" extension for fhir primitive path"""
self.sourceId = None
""" Fixture Id of source expression or headerField within this variable.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
super(TestScriptVariable, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptVariable, self).elementProperties()
js.extend([
("defaultValue", "defaultValue", str, False, None, False),
("_defaultValue", "_defaultValue",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("headerField", "headerField", str, False, None, False),
("_headerField", "_headerField",fhirprimitive.FHIRPrimitive, False, None, False),
("hint", "hint", str, False, None, False),
("_hint", "_hint",fhirprimitive.FHIRPrimitive, False, None, False),
("name", "name", str, False, None, True),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
("path", "path", str, False, None, False),
("_path", "_path",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import codeableconcept
from . import coding
from . import contactdetail
from . import fhirdate
from . import fhirreference
from . import identifier
from . import usagecontext
from . import fhirprimitive | fhirclient/r4models/testscript.py |
from . import domainresource
class TestScript(domainresource.DomainResource):
""" Describes a set of tests.
A structured set of tests against a FHIR server or client implementation to
determine compliance against the FHIR specification.
"""
resource_type = "TestScript"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self._copyright = None
""" extension for fhir primitive copyright"""
self.date = None
""" Date last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.description = None
""" Natural language description of the test script.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" An abstract server representing a destination or receiver in a
message exchange.
List of `TestScriptDestination` items (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self._experimental = None
""" extension for fhir primitive experimental"""
self.fixture = None
""" Fixture in the test script - by reference (uri).
List of `TestScriptFixture` items (represented as `dict` in JSON). """
self.identifier = None
""" Additional identifier for the test script.
Type `Identifier` (represented as `dict` in JSON). """
self.jurisdiction = None
""" Intended jurisdiction for test script (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.metadata = None
""" Required capability that is assumed to function correctly on the
FHIR server being tested.
Type `TestScriptMetadata` (represented as `dict` in JSON). """
self.name = None
""" Name for this test script (computer friendly).
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
self.origin = None
""" An abstract server representing a client or sender in a message
exchange.
List of `TestScriptOrigin` items (represented as `dict` in JSON). """
self.profile = None
""" Reference of the validation profile.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self._publisher = None
""" extension for fhir primitive publisher"""
self.purpose = None
""" Why this test script is defined.
Type `str`. """
self._purpose = None
""" extension for fhir primitive purpose"""
self.setup = None
""" A series of required setup operations before tests are executed.
Type `TestScriptSetup` (represented as `dict` in JSON). """
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self._status = None
""" extension for fhir primitive status"""
self.teardown = None
""" A series of required clean up steps.
Type `TestScriptTeardown` (represented as `dict` in JSON). """
self.test = None
""" A test in this script.
List of `TestScriptTest` items (represented as `dict` in JSON). """
self.title = None
""" Name for this test script (human friendly).
Type `str`. """
self._title = None
""" extension for fhir primitive title"""
self.url = None
""" Canonical identifier for this test script, represented as a URI
(globally unique).
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
self.useContext = None
""" The context that the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.variable = None
""" Placeholder for evaluated elements.
List of `TestScriptVariable` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the test script.
Type `str`. """
self._version = None
""" extension for fhir primitive version"""
super(TestScript, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScript, self).elementProperties()
js.extend([
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("_copyright", "_copyright",fhirprimitive.FHIRPrimitive, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", TestScriptDestination, True, None, False),
("experimental", "experimental", bool, False, None, False),
("_experimental", "_experimental",fhirprimitive.FHIRPrimitive, False, None, False),
("fixture", "fixture", TestScriptFixture, True, None, False),
("identifier", "identifier", identifier.Identifier, False, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("metadata", "metadata", TestScriptMetadata, False, None, False),
("name", "name", str, False, None, True),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", TestScriptOrigin, True, None, False),
("profile", "profile", fhirreference.FHIRReference, True, None, False),
("publisher", "publisher", str, False, None, False),
("_publisher", "_publisher",fhirprimitive.FHIRPrimitive, False, None, False),
("purpose", "purpose", str, False, None, False),
("_purpose", "_purpose",fhirprimitive.FHIRPrimitive, False, None, False),
("setup", "setup", TestScriptSetup, False, None, False),
("status", "status", str, False, None, True),
("_status", "_status",fhirprimitive.FHIRPrimitive, False, None, False),
("teardown", "teardown", TestScriptTeardown, False, None, False),
("test", "test", TestScriptTest, True, None, False),
("title", "title", str, False, None, False),
("_title", "_title",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("variable", "variable", TestScriptVariable, True, None, False),
("version", "version", str, False, None, False),
("_version", "_version",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import backboneelement
class TestScriptDestination(backboneelement.BackboneElement):
""" An abstract server representing a destination or receiver in a message
exchange.
An abstract server used in operations within this test script in the
destination element.
"""
resource_type = "TestScriptDestination"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract destination server starting at 1.
Type `int`. """
self._index = None
""" extension for fhir primitive index"""
self.profile = None
""" FHIR-Server | FHIR-SDC-FormManager | FHIR-SDC-FormReceiver | FHIR-
SDC-FormProcessor.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptDestination, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptDestination, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("_index", "_index",fhirprimitive.FHIRPrimitive, False, None, False),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptFixture(backboneelement.BackboneElement):
""" Fixture in the test script - by reference (uri).
Fixture in the test script - by reference (uri). All fixtures are required
for the test script to execute.
"""
resource_type = "TestScriptFixture"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.autocreate = None
""" Whether or not to implicitly create the fixture during setup.
Type `bool`. """
self._autocreate = None
""" extension for fhir primitive autocreate"""
self.autodelete = None
""" Whether or not to implicitly delete the fixture during teardown.
Type `bool`. """
self._autodelete = None
""" extension for fhir primitive autodelete"""
self.resource = None
""" Reference of the resource.
Type `FHIRReference` (represented as `dict` in JSON). """
super(TestScriptFixture, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptFixture, self).elementProperties()
js.extend([
("autocreate", "autocreate", bool, False, None, True),
("_autocreate", "_autocreate",fhirprimitive.FHIRPrimitive, False, None, False),
("autodelete", "autodelete", bool, False, None, True),
("_autodelete", "_autodelete",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", fhirreference.FHIRReference, False, None, False),
])
return js
class TestScriptMetadata(backboneelement.BackboneElement):
""" Required capability that is assumed to function correctly on the FHIR
server being tested.
The required capability must exist and are assumed to function correctly on
the FHIR server being tested.
"""
resource_type = "TestScriptMetadata"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capability = None
""" Capabilities that are assumed to function correctly on the FHIR
server being tested.
List of `TestScriptMetadataCapability` items (represented as `dict` in JSON). """
self.link = None
""" Links to the FHIR specification.
List of `TestScriptMetadataLink` items (represented as `dict` in JSON). """
super(TestScriptMetadata, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadata, self).elementProperties()
js.extend([
("capability", "capability", TestScriptMetadataCapability, True, None, True),
("link", "link", TestScriptMetadataLink, True, None, False),
])
return js
class TestScriptMetadataCapability(backboneelement.BackboneElement):
""" Capabilities that are assumed to function correctly on the FHIR server
being tested.
Capabilities that must exist and are assumed to function correctly on the
FHIR server being tested.
"""
resource_type = "TestScriptMetadataCapability"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.capabilities = None
""" Required Capability Statement.
Type `str`. """
self._capabilities = None
""" extension for fhir primitive capabilities"""
self.description = None
""" The expected capabilities of the server.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" Which server these requirements apply to.
Type `int`. """
self._destination = None
""" extension for fhir primitive destination"""
self.link = None
""" Links to the FHIR specification.
List of `str` items. """
self._link = None
""" extension for fhir primitive link"""
self.origin = None
""" Which origin server these requirements apply to.
List of `int` items. """
self._origin = None
""" extension for fhir primitive origin"""
self.required = None
""" Are the capabilities required?.
Type `bool`. """
self._required = None
""" extension for fhir primitive required"""
self.validated = None
""" Are the capabilities validated?.
Type `bool`. """
self._validated = None
""" extension for fhir primitive validated"""
super(TestScriptMetadataCapability, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataCapability, self).elementProperties()
js.extend([
("capabilities", "capabilities", str, False, None, True),
("_capabilities", "_capabilities",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", int, False, None, False),
("_destination", "_destination",fhirprimitive.FHIRPrimitive, False, None, False),
("link", "link", str, True, None, False),
("_link", "_link",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", int, True, None, False),
("_origin", "_origin",fhirprimitive.FHIRPrimitive, False, None, False),
("required", "required", bool, False, None, True),
("_required", "_required",fhirprimitive.FHIRPrimitive, False, None, False),
("validated", "validated", bool, False, None, True),
("_validated", "_validated",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptMetadataLink(backboneelement.BackboneElement):
""" Links to the FHIR specification.
A link to the FHIR specification that this test is covering.
"""
resource_type = "TestScriptMetadataLink"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.description = None
""" Short description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.url = None
""" URL to the specification.
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
super(TestScriptMetadataLink, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptMetadataLink, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptOrigin(backboneelement.BackboneElement):
""" An abstract server representing a client or sender in a message exchange.
An abstract server used in operations within this test script in the origin
element.
"""
resource_type = "TestScriptOrigin"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.index = None
""" The index of the abstract origin server starting at 1.
Type `int`. """
self._index = None
""" extension for fhir primitive index"""
self.profile = None
""" FHIR-Client | FHIR-SDC-FormFiller.
Type `Coding` (represented as `dict` in JSON). """
super(TestScriptOrigin, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptOrigin, self).elementProperties()
js.extend([
("index", "index", int, False, None, True),
("_index", "_index",fhirprimitive.FHIRPrimitive, False, None, False),
("profile", "profile", coding.Coding, False, None, True),
])
return js
class TestScriptSetup(backboneelement.BackboneElement):
""" A series of required setup operations before tests are executed.
"""
resource_type = "TestScriptSetup"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A setup operation or assert to perform.
List of `TestScriptSetupAction` items (represented as `dict` in JSON). """
super(TestScriptSetup, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetup, self).elementProperties()
js.extend([
("action", "action", TestScriptSetupAction, True, None, True),
])
return js
class TestScriptSetupAction(backboneelement.BackboneElement):
""" A setup operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptSetupAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptSetupAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptSetupActionAssert(backboneelement.BackboneElement):
""" The assertion to perform.
Evaluates the results of previous operations to determine if the server
under test behaves appropriately.
"""
resource_type = "TestScriptSetupActionAssert"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.compareToSourceExpression = None
""" The FHIRPath expression to evaluate against the source fixture.
Type `str`. """
self._compareToSourceExpression = None
""" extension for fhir primitive compareToSourceExpression"""
self.compareToSourceId = None
""" Id of the source fixture to be evaluated.
Type `str`. """
self._compareToSourceId = None
""" extension for fhir primitive compareToSourceId"""
self.compareToSourcePath = None
""" XPath or JSONPath expression to evaluate against the source fixture.
Type `str`. """
self._compareToSourcePath = None
""" extension for fhir primitive compareToSourcePath"""
self.contentType = None
""" Mime type to compare against the 'Content-Type' header.
Type `str`. """
self._contentType = None
""" extension for fhir primitive contentType"""
self.description = None
""" Tracking/reporting assertion description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.direction = None
""" response | request.
Type `str`. """
self._direction = None
""" extension for fhir primitive direction"""
self.expression = None
""" The FHIRPath expression to be evaluated.
Type `str`. """
self._expression = None
""" extension for fhir primitive expression"""
self.headerField = None
""" HTTP header field name.
Type `str`. """
self._headerField = None
""" extension for fhir primitive headerField"""
self.label = None
""" Tracking/logging assertion label.
Type `str`. """
self._label = None
""" extension for fhir primitive label"""
self.minimumId = None
""" Fixture Id of minimum content resource.
Type `str`. """
self._minimumId = None
""" extension for fhir primitive minimumId"""
self.navigationLinks = None
""" Perform validation on navigation links?.
Type `bool`. """
self._navigationLinks = None
""" extension for fhir primitive navigationLinks"""
self.operator = None
""" equals | notEquals | in | notIn | greaterThan | lessThan | empty |
notEmpty | contains | notContains | eval.
Type `str`. """
self._operator = None
""" extension for fhir primitive operator"""
self.path = None
""" XPath or JSONPath expression.
Type `str`. """
self._path = None
""" extension for fhir primitive path"""
self.requestMethod = None
""" delete | get | options | patch | post | put | head.
Type `str`. """
self._requestMethod = None
""" extension for fhir primitive requestMethod"""
self.requestURL = None
""" Request URL comparison value.
Type `str`. """
self._requestURL = None
""" extension for fhir primitive requestURL"""
self.resource = None
""" Resource type.
Type `str`. """
self._resource = None
""" extension for fhir primitive resource"""
self.response = None
""" okay | created | noContent | notModified | bad | forbidden |
notFound | methodNotAllowed | conflict | gone | preconditionFailed
| unprocessable.
Type `str`. """
self._response = None
""" extension for fhir primitive response"""
self.responseCode = None
""" HTTP response code to test.
Type `str`. """
self._responseCode = None
""" extension for fhir primitive responseCode"""
self.sourceId = None
""" Fixture Id of source expression or headerField.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
self.validateProfileId = None
""" Profile Id of validation profile reference.
Type `str`. """
self._validateProfileId = None
""" extension for fhir primitive validateProfileId"""
self.value = None
""" The value to compare to.
Type `str`. """
self._value = None
""" extension for fhir primitive value"""
self.warningOnly = None
""" Will this assert produce a warning only on error?.
Type `bool`. """
self._warningOnly = None
""" extension for fhir primitive warningOnly"""
super(TestScriptSetupActionAssert, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionAssert, self).elementProperties()
js.extend([
("compareToSourceExpression", "compareToSourceExpression", str, False, None, False),
("_compareToSourceExpression", "_compareToSourceExpression",fhirprimitive.FHIRPrimitive, False, None, False),
("compareToSourceId", "compareToSourceId", str, False, None, False),
("_compareToSourceId", "_compareToSourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("compareToSourcePath", "compareToSourcePath", str, False, None, False),
("_compareToSourcePath", "_compareToSourcePath",fhirprimitive.FHIRPrimitive, False, None, False),
("contentType", "contentType", str, False, None, False),
("_contentType", "_contentType",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("direction", "direction", str, False, None, False),
("_direction", "_direction",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("headerField", "headerField", str, False, None, False),
("_headerField", "_headerField",fhirprimitive.FHIRPrimitive, False, None, False),
("label", "label", str, False, None, False),
("_label", "_label",fhirprimitive.FHIRPrimitive, False, None, False),
("minimumId", "minimumId", str, False, None, False),
("_minimumId", "_minimumId",fhirprimitive.FHIRPrimitive, False, None, False),
("navigationLinks", "navigationLinks", bool, False, None, False),
("_navigationLinks", "_navigationLinks",fhirprimitive.FHIRPrimitive, False, None, False),
("operator", "operator", str, False, None, False),
("_operator", "_operator",fhirprimitive.FHIRPrimitive, False, None, False),
("path", "path", str, False, None, False),
("_path", "_path",fhirprimitive.FHIRPrimitive, False, None, False),
("requestMethod", "requestMethod", str, False, None, False),
("_requestMethod", "_requestMethod",fhirprimitive.FHIRPrimitive, False, None, False),
("requestURL", "requestURL", str, False, None, False),
("_requestURL", "_requestURL",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", str, False, None, False),
("_resource", "_resource",fhirprimitive.FHIRPrimitive, False, None, False),
("response", "response", str, False, None, False),
("_response", "_response",fhirprimitive.FHIRPrimitive, False, None, False),
("responseCode", "responseCode", str, False, None, False),
("_responseCode", "_responseCode",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("validateProfileId", "validateProfileId", str, False, None, False),
("_validateProfileId", "_validateProfileId",fhirprimitive.FHIRPrimitive, False, None, False),
("value", "value", str, False, None, False),
("_value", "_value",fhirprimitive.FHIRPrimitive, False, None, False),
("warningOnly", "warningOnly", bool, False, None, True),
("_warningOnly", "_warningOnly",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptSetupActionOperation(backboneelement.BackboneElement):
""" The setup operation to perform.
The operation to perform.
"""
resource_type = "TestScriptSetupActionOperation"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.accept = None
""" Mime type to accept in the payload of the response, with charset
etc..
Type `str`. """
self._accept = None
""" extension for fhir primitive accept"""
self.contentType = None
""" Mime type of the request payload contents, with charset etc..
Type `str`. """
self._contentType = None
""" extension for fhir primitive contentType"""
self.description = None
""" Tracking/reporting operation description.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.destination = None
""" Server responding to the request.
Type `int`. """
self._destination = None
""" extension for fhir primitive destination"""
self.encodeRequestUrl = None
""" Whether or not to send the request url in encoded format.
Type `bool`. """
self._encodeRequestUrl = None
""" extension for fhir primitive encodeRequestUrl"""
self.label = None
""" Tracking/logging operation label.
Type `str`. """
self._label = None
""" extension for fhir primitive label"""
self.method = None
""" delete | get | options | patch | post | put | head.
Type `str`. """
self._method = None
""" extension for fhir primitive method"""
self.origin = None
""" Server initiating the request.
Type `int`. """
self._origin = None
""" extension for fhir primitive origin"""
self.params = None
""" Explicitly defined path parameters.
Type `str`. """
self._params = None
""" extension for fhir primitive params"""
self.requestHeader = None
""" Each operation can have one or more header elements.
List of `TestScriptSetupActionOperationRequestHeader` items (represented as `dict` in JSON). """
self.requestId = None
""" Fixture Id of mapped request.
Type `str`. """
self._requestId = None
""" extension for fhir primitive requestId"""
self.resource = None
""" Resource type.
Type `str`. """
self._resource = None
""" extension for fhir primitive resource"""
self.responseId = None
""" Fixture Id of mapped response.
Type `str`. """
self._responseId = None
""" extension for fhir primitive responseId"""
self.sourceId = None
""" Fixture Id of body for PUT and POST requests.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
self.targetId = None
""" Id of fixture used for extracting the [id], [type], and [vid] for
GET requests.
Type `str`. """
self._targetId = None
""" extension for fhir primitive targetId"""
self.type = None
""" The operation code type that will be executed.
Type `Coding` (represented as `dict` in JSON). """
self.url = None
""" Request URL.
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
super(TestScriptSetupActionOperation, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperation, self).elementProperties()
js.extend([
("accept", "accept", str, False, None, False),
("_accept", "_accept",fhirprimitive.FHIRPrimitive, False, None, False),
("contentType", "contentType", str, False, None, False),
("_contentType", "_contentType",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("destination", "destination", int, False, None, False),
("_destination", "_destination",fhirprimitive.FHIRPrimitive, False, None, False),
("encodeRequestUrl", "encodeRequestUrl", bool, False, None, True),
("_encodeRequestUrl", "_encodeRequestUrl",fhirprimitive.FHIRPrimitive, False, None, False),
("label", "label", str, False, None, False),
("_label", "_label",fhirprimitive.FHIRPrimitive, False, None, False),
("method", "method", str, False, None, False),
("_method", "_method",fhirprimitive.FHIRPrimitive, False, None, False),
("origin", "origin", int, False, None, False),
("_origin", "_origin",fhirprimitive.FHIRPrimitive, False, None, False),
("params", "params", str, False, None, False),
("_params", "_params",fhirprimitive.FHIRPrimitive, False, None, False),
("requestHeader", "requestHeader", TestScriptSetupActionOperationRequestHeader, True, None, False),
("requestId", "requestId", str, False, None, False),
("_requestId", "_requestId",fhirprimitive.FHIRPrimitive, False, None, False),
("resource", "resource", str, False, None, False),
("_resource", "_resource",fhirprimitive.FHIRPrimitive, False, None, False),
("responseId", "responseId", str, False, None, False),
("_responseId", "_responseId",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
("targetId", "targetId", str, False, None, False),
("_targetId", "_targetId",fhirprimitive.FHIRPrimitive, False, None, False),
("type", "type", coding.Coding, False, None, False),
("url", "url", str, False, None, False),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptSetupActionOperationRequestHeader(backboneelement.BackboneElement):
""" Each operation can have one or more header elements.
Header elements would be used to set HTTP headers.
"""
resource_type = "TestScriptSetupActionOperationRequestHeader"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.field = None
""" HTTP header field name.
Type `str`. """
self._field = None
""" extension for fhir primitive field"""
self.value = None
""" HTTP headerfield value.
Type `str`. """
self._value = None
""" extension for fhir primitive value"""
super(TestScriptSetupActionOperationRequestHeader, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptSetupActionOperationRequestHeader, self).elementProperties()
js.extend([
("field", "field", str, False, None, True),
("_field", "_field",fhirprimitive.FHIRPrimitive, False, None, False),
("value", "value", str, False, None, True),
("_value", "_value",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptTeardown(backboneelement.BackboneElement):
""" A series of required clean up steps.
A series of operations required to clean up after all the tests are
executed (successfully or otherwise).
"""
resource_type = "TestScriptTeardown"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" One or more teardown operations to perform.
List of `TestScriptTeardownAction` items (represented as `dict` in JSON). """
super(TestScriptTeardown, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardown, self).elementProperties()
js.extend([
("action", "action", TestScriptTeardownAction, True, None, True),
])
return js
class TestScriptTeardownAction(backboneelement.BackboneElement):
""" One or more teardown operations to perform.
The teardown action will only contain an operation.
"""
resource_type = "TestScriptTeardownAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.operation = None
""" The teardown operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTeardownAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTeardownAction, self).elementProperties()
js.extend([
("operation", "operation", TestScriptSetupActionOperation, False, None, True),
])
return js
class TestScriptTest(backboneelement.BackboneElement):
""" A test in this script.
"""
resource_type = "TestScriptTest"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.action = None
""" A test operation or assert to perform.
List of `TestScriptTestAction` items (represented as `dict` in JSON). """
self.description = None
""" Tracking/reporting short description of the test.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.name = None
""" Tracking/logging name of this test.
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
super(TestScriptTest, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTest, self).elementProperties()
js.extend([
("action", "action", TestScriptTestAction, True, None, True),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("name", "name", str, False, None, False),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class TestScriptTestAction(backboneelement.BackboneElement):
""" A test operation or assert to perform.
Action would contain either an operation or an assertion.
"""
resource_type = "TestScriptTestAction"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assert_fhir = None
""" The setup assertion to perform.
Type `TestScriptSetupActionAssert` (represented as `dict` in JSON). """
self.operation = None
""" The setup operation to perform.
Type `TestScriptSetupActionOperation` (represented as `dict` in JSON). """
super(TestScriptTestAction, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptTestAction, self).elementProperties()
js.extend([
("assert_fhir", "assert", TestScriptSetupActionAssert, False, None, False),
("operation", "operation", TestScriptSetupActionOperation, False, None, False),
])
return js
class TestScriptVariable(backboneelement.BackboneElement):
""" Placeholder for evaluated elements.
Variable is set based either on element value in response body or on header
field value in the response headers.
"""
resource_type = "TestScriptVariable"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.defaultValue = None
""" Default, hard-coded, or user-defined value for this variable.
Type `str`. """
self._defaultValue = None
""" extension for fhir primitive defaultValue"""
self.description = None
""" Natural language description of the variable.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.expression = None
""" The FHIRPath expression against the fixture body.
Type `str`. """
self._expression = None
""" extension for fhir primitive expression"""
self.headerField = None
""" HTTP header field name for source.
Type `str`. """
self._headerField = None
""" extension for fhir primitive headerField"""
self.hint = None
""" Hint help text for default value to enter.
Type `str`. """
self._hint = None
""" extension for fhir primitive hint"""
self.name = None
""" Descriptive name for this variable.
Type `str`. """
self._name = None
""" extension for fhir primitive name"""
self.path = None
""" XPath or JSONPath against the fixture body.
Type `str`. """
self._path = None
""" extension for fhir primitive path"""
self.sourceId = None
""" Fixture Id of source expression or headerField within this variable.
Type `str`. """
self._sourceId = None
""" extension for fhir primitive sourceId"""
super(TestScriptVariable, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(TestScriptVariable, self).elementProperties()
js.extend([
("defaultValue", "defaultValue", str, False, None, False),
("_defaultValue", "_defaultValue",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("headerField", "headerField", str, False, None, False),
("_headerField", "_headerField",fhirprimitive.FHIRPrimitive, False, None, False),
("hint", "hint", str, False, None, False),
("_hint", "_hint",fhirprimitive.FHIRPrimitive, False, None, False),
("name", "name", str, False, None, True),
("_name", "_name",fhirprimitive.FHIRPrimitive, False, None, False),
("path", "path", str, False, None, False),
("_path", "_path",fhirprimitive.FHIRPrimitive, False, None, False),
("sourceId", "sourceId", str, False, None, False),
("_sourceId", "_sourceId",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import codeableconcept
from . import coding
from . import contactdetail
from . import fhirdate
from . import fhirreference
from . import identifier
from . import usagecontext
from . import fhirprimitive | 0.845863 | 0.462412 |
from __future__ import annotations
import unittest
from causets.embeddedcauset import EmbeddedCauset
from causets.sprinkledcauset import SprinkledCauset
from causets.spacetimes import BlackHoleSpacetime
from causets.shapes import CoordinateShape
from matplotlib import pyplot as plt
import causets.causetplotting as cplt
class TestCausetplotting(unittest.TestCase):
def setUp(self):
cplt.setDefaultColors('UniYork')
plt.figure(figsize=(8.0, 8.0))
def tearDown(self):
pass
def test_plotGeneric(self):
C: EmbeddedCauset = EmbeddedCauset(
shape=CoordinateShape(3, 'cuboid', edges=[4.0, 2.0, 2.0]),
coordinates=[[-1.5, 0.0, -0.3], [-0.5, 0.0, -0.1],
[0.5, 0.0, 0.1], [1.5, 0.0, 0.3]])
dims: List[int] = [1, 2, 0]
P = cplt.Plotter(C, dims=dims,
pastcones={'facecolor': 'none', 'alpha': 0.8},
futurecones={'facecolor': 'none', 'alpha': 0.8})
P([0.7])
ax = plt.gca()
if len(dims) > 2:
ax.xaxis.pane.fill = False
ax.yaxis.pane.fill = False
ax.zaxis.pane.fill = False
plt.show()
def test_plotSprinkle(self):
C: SprinkledCauset = SprinkledCauset(card=200,
spacetime=BlackHoleSpacetime(2))
dims = [1, 0]
e = C.PastInf.copy().pop()
events_Cone = e.Cone
cplt.plot(C, dims=dims,
events={'alpha': 0.05},
links=False, labels=False)
cplt.plot(list(events_Cone), dims=dims, spacetime=C.Spacetime,
events={'markerfacecolor': 'cs:darkblue'},
links=True, labels=False)
cplt.plot(e, dims=dims, spacetime=C.Spacetime,
events={'markerfacecolor': 'cs:red'},
pastcones={'alpha': 1.0,
'linewidth': 2.0},
futurecones={'alpha': 1.0,
'linewidth': 2.0},
time=[-1.0, 1.0])
C.Shape.plot(dims)
plt.show()
if __name__ == '__main__':
unittest.main() | test_causetplotting.py | from __future__ import annotations
import unittest
from causets.embeddedcauset import EmbeddedCauset
from causets.sprinkledcauset import SprinkledCauset
from causets.spacetimes import BlackHoleSpacetime
from causets.shapes import CoordinateShape
from matplotlib import pyplot as plt
import causets.causetplotting as cplt
class TestCausetplotting(unittest.TestCase):
def setUp(self):
cplt.setDefaultColors('UniYork')
plt.figure(figsize=(8.0, 8.0))
def tearDown(self):
pass
def test_plotGeneric(self):
C: EmbeddedCauset = EmbeddedCauset(
shape=CoordinateShape(3, 'cuboid', edges=[4.0, 2.0, 2.0]),
coordinates=[[-1.5, 0.0, -0.3], [-0.5, 0.0, -0.1],
[0.5, 0.0, 0.1], [1.5, 0.0, 0.3]])
dims: List[int] = [1, 2, 0]
P = cplt.Plotter(C, dims=dims,
pastcones={'facecolor': 'none', 'alpha': 0.8},
futurecones={'facecolor': 'none', 'alpha': 0.8})
P([0.7])
ax = plt.gca()
if len(dims) > 2:
ax.xaxis.pane.fill = False
ax.yaxis.pane.fill = False
ax.zaxis.pane.fill = False
plt.show()
def test_plotSprinkle(self):
C: SprinkledCauset = SprinkledCauset(card=200,
spacetime=BlackHoleSpacetime(2))
dims = [1, 0]
e = C.PastInf.copy().pop()
events_Cone = e.Cone
cplt.plot(C, dims=dims,
events={'alpha': 0.05},
links=False, labels=False)
cplt.plot(list(events_Cone), dims=dims, spacetime=C.Spacetime,
events={'markerfacecolor': 'cs:darkblue'},
links=True, labels=False)
cplt.plot(e, dims=dims, spacetime=C.Spacetime,
events={'markerfacecolor': 'cs:red'},
pastcones={'alpha': 1.0,
'linewidth': 2.0},
futurecones={'alpha': 1.0,
'linewidth': 2.0},
time=[-1.0, 1.0])
C.Shape.plot(dims)
plt.show()
if __name__ == '__main__':
unittest.main() | 0.575349 | 0.378172 |
import os
import tempfile
from pathlib import Path
from xeda import Design
from xeda.flow_runner import DefaultRunner
from xeda.flows import VivadoSynth
from xeda.flows.flow import FPGA
TESTS_DIR = Path(__file__).parent.absolute()
RESOURCES_DIR = TESTS_DIR / "resources"
EXAMPLES_DIR = TESTS_DIR.parent / "examples"
def test_vivado_synth_template() -> None:
path = RESOURCES_DIR / "design0/design0.toml"
assert path.exists()
design = Design.from_toml(RESOURCES_DIR / "design0/design0.toml")
settings = VivadoSynth.Settings(fpga=FPGA(part="abcd"), clock_period=5.5) # type: ignore
run_dir = Path.cwd() / "vivado_synth_run"
run_dir.mkdir(exist_ok=True)
flow = VivadoSynth(settings, design, run_dir) # type: ignore
tcl_file = flow.copy_from_template(
"vivado_synth.tcl", xdc_files=[], reports_tcl="reports_tcl"
)
with open(run_dir / tcl_file) as f:
vivado_tcl = f.read()
expected_lines = [
"set_property generic {G_IN_WIDTH=32} [current_fileset]",
"set_property generic {G_ITERATIVE=1'b1} [current_fileset]",
'set_property generic {G_STR=\\"abcd\\"} [current_fileset]',
"set_property generic {G_BITVECTOR=7'b0101001} [current_fileset]",
]
for line in expected_lines:
assert line in vivado_tcl
def test_vivado_synth_py() -> None:
path = RESOURCES_DIR / "design0/design0.toml"
# Append to PATH so if the actual tool exists, would take precedences.
os.environ["PATH"] += os.pathsep + os.path.join(TESTS_DIR, "fake_tools")
assert path.exists()
design = Design.from_toml(EXAMPLES_DIR / "vhdl" / "sqrt" / "sqrt.toml")
settings = dict(fpga=FPGA("xc7a12tcsg325-1"), clock_period=5.5)
with tempfile.TemporaryDirectory() as run_dir:
print("Xeda run dir: ", run_dir)
xeda_runner = DefaultRunner(run_dir, debug=True)
flow = xeda_runner.run_flow(VivadoSynth, design, settings)
settings_json = flow.run_path / "settings.json"
results_json = flow.run_path / "results.json"
assert settings_json.exists()
assert results_json.exists()
assert flow.succeeded
assert 0.3 < flow.results.runtime # type: ignore
if __name__ == "__main__":
test_vivado_synth_py() | tests/test_vivado.py | import os
import tempfile
from pathlib import Path
from xeda import Design
from xeda.flow_runner import DefaultRunner
from xeda.flows import VivadoSynth
from xeda.flows.flow import FPGA
TESTS_DIR = Path(__file__).parent.absolute()
RESOURCES_DIR = TESTS_DIR / "resources"
EXAMPLES_DIR = TESTS_DIR.parent / "examples"
def test_vivado_synth_template() -> None:
path = RESOURCES_DIR / "design0/design0.toml"
assert path.exists()
design = Design.from_toml(RESOURCES_DIR / "design0/design0.toml")
settings = VivadoSynth.Settings(fpga=FPGA(part="abcd"), clock_period=5.5) # type: ignore
run_dir = Path.cwd() / "vivado_synth_run"
run_dir.mkdir(exist_ok=True)
flow = VivadoSynth(settings, design, run_dir) # type: ignore
tcl_file = flow.copy_from_template(
"vivado_synth.tcl", xdc_files=[], reports_tcl="reports_tcl"
)
with open(run_dir / tcl_file) as f:
vivado_tcl = f.read()
expected_lines = [
"set_property generic {G_IN_WIDTH=32} [current_fileset]",
"set_property generic {G_ITERATIVE=1'b1} [current_fileset]",
'set_property generic {G_STR=\\"abcd\\"} [current_fileset]',
"set_property generic {G_BITVECTOR=7'b0101001} [current_fileset]",
]
for line in expected_lines:
assert line in vivado_tcl
def test_vivado_synth_py() -> None:
path = RESOURCES_DIR / "design0/design0.toml"
# Append to PATH so if the actual tool exists, would take precedences.
os.environ["PATH"] += os.pathsep + os.path.join(TESTS_DIR, "fake_tools")
assert path.exists()
design = Design.from_toml(EXAMPLES_DIR / "vhdl" / "sqrt" / "sqrt.toml")
settings = dict(fpga=FPGA("xc7a12tcsg325-1"), clock_period=5.5)
with tempfile.TemporaryDirectory() as run_dir:
print("Xeda run dir: ", run_dir)
xeda_runner = DefaultRunner(run_dir, debug=True)
flow = xeda_runner.run_flow(VivadoSynth, design, settings)
settings_json = flow.run_path / "settings.json"
results_json = flow.run_path / "results.json"
assert settings_json.exists()
assert results_json.exists()
assert flow.succeeded
assert 0.3 < flow.results.runtime # type: ignore
if __name__ == "__main__":
test_vivado_synth_py() | 0.422386 | 0.269341 |
import app.main.config as config
import Adyen
from random import randint
from flask import json
'''
Send Payment Request to Adyen
'''
def adyen_payments(frontend_request):
adyen = Adyen.Adyen()
adyen.client.platform = 'test'
adyen.client.xapikey = config.checkout_apikey
payment_info = frontend_request.get_json()
txvariant = payment_info["paymentMethod"]["type"]
payments_request = {
'amount': {
'value': 12500,
'currency': choose_currency(txvariant)
},
'channel': 'Web',
'reference': "Aditya's Test Shop" + str(randint(0, 10000)),
'shopperReference': "Aditya's Test Shop Shopper",
'returnUrl': "http://localhost:8080/api/handleShopperRedirect",
'countryCode': 'NL',
'shopperLocale': "en_US",
'merchantAccount': config.merchant_account
}
payments_request.update(payment_info)
if txvariant == 'alipay':
payments_request['countryCode'] = 'CN'
elif 'klarna' in txvariant:
payments_request['shopperEmail'] = "<EMAIL>"
payments_request['lineItems'] = [
{
'quantity': "1",
'amountExcludingTax': "450",
'taxPercentage': "1111",
'description': "Sunglasses",
'id': "Item #1",
'taxAmount': "50",
'amountIncludingTax': "500",
'taxCategory': "High"
},
{
'quantity': "1",
'amountExcludingTax': "450",
'taxPercentage': "1111",
'description': "Headphones",
'id': "Item #2",
'taxAmount': "50",
'amountIncludingTax': "500",
'taxCategory': "High"
}]
elif txvariant == 'directEbanking' or txvariant == 'giropay':
payments_request['countryCode'] = "DE"
elif txvariant == 'dotpay':
payments_request['countryCode'] = "PL"
elif txvariant == 'scheme':
payments_request['additionalData'] = {"allow3DS2": "true"}
payments_request['origin'] = "http://localhost:8080"
elif txvariant == 'ach' or txvariant == 'paypal':
payments_request['countryCode'] = 'US'
print("/payments request:\n" + str(payments_request))
payments_response = adyen.checkout.payments(payments_request)
print("/payments response:\n" + payments_response.raw_response.decode("UTF-8"))
return remove_unnecessary_data(payments_response.raw_response)
def choose_currency(payment_method):
if payment_method == "alipay":
return "CNY"
elif payment_method == "dotpay":
return "PLN"
elif payment_method == "boletobancario":
return "BRL"
elif payment_method == "ach" or payment_method == "paypal":
return "USD"
else:
return "EUR"
# Custom payment error class
class PaymentError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# Format response being passed back to frontend. Only leave resultCode and action
def remove_unnecessary_data(response):
dict_response = json.loads(response)
if "resultCode" in dict_response:
new_response = {"resultCode": dict_response["resultCode"]}
if "action" in dict_response:
new_response["action"] = dict_response["action"]
return json.dumps(new_response)
else:
raise PaymentError(response) | app/main/payments.py | import app.main.config as config
import Adyen
from random import randint
from flask import json
'''
Send Payment Request to Adyen
'''
def adyen_payments(frontend_request):
adyen = Adyen.Adyen()
adyen.client.platform = 'test'
adyen.client.xapikey = config.checkout_apikey
payment_info = frontend_request.get_json()
txvariant = payment_info["paymentMethod"]["type"]
payments_request = {
'amount': {
'value': 12500,
'currency': choose_currency(txvariant)
},
'channel': 'Web',
'reference': "Aditya's Test Shop" + str(randint(0, 10000)),
'shopperReference': "Aditya's Test Shop Shopper",
'returnUrl': "http://localhost:8080/api/handleShopperRedirect",
'countryCode': 'NL',
'shopperLocale': "en_US",
'merchantAccount': config.merchant_account
}
payments_request.update(payment_info)
if txvariant == 'alipay':
payments_request['countryCode'] = 'CN'
elif 'klarna' in txvariant:
payments_request['shopperEmail'] = "<EMAIL>"
payments_request['lineItems'] = [
{
'quantity': "1",
'amountExcludingTax': "450",
'taxPercentage': "1111",
'description': "Sunglasses",
'id': "Item #1",
'taxAmount': "50",
'amountIncludingTax': "500",
'taxCategory': "High"
},
{
'quantity': "1",
'amountExcludingTax': "450",
'taxPercentage': "1111",
'description': "Headphones",
'id': "Item #2",
'taxAmount': "50",
'amountIncludingTax': "500",
'taxCategory': "High"
}]
elif txvariant == 'directEbanking' or txvariant == 'giropay':
payments_request['countryCode'] = "DE"
elif txvariant == 'dotpay':
payments_request['countryCode'] = "PL"
elif txvariant == 'scheme':
payments_request['additionalData'] = {"allow3DS2": "true"}
payments_request['origin'] = "http://localhost:8080"
elif txvariant == 'ach' or txvariant == 'paypal':
payments_request['countryCode'] = 'US'
print("/payments request:\n" + str(payments_request))
payments_response = adyen.checkout.payments(payments_request)
print("/payments response:\n" + payments_response.raw_response.decode("UTF-8"))
return remove_unnecessary_data(payments_response.raw_response)
def choose_currency(payment_method):
if payment_method == "alipay":
return "CNY"
elif payment_method == "dotpay":
return "PLN"
elif payment_method == "boletobancario":
return "BRL"
elif payment_method == "ach" or payment_method == "paypal":
return "USD"
else:
return "EUR"
# Custom payment error class
class PaymentError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
# Format response being passed back to frontend. Only leave resultCode and action
def remove_unnecessary_data(response):
dict_response = json.loads(response)
if "resultCode" in dict_response:
new_response = {"resultCode": dict_response["resultCode"]}
if "action" in dict_response:
new_response["action"] = dict_response["action"]
return json.dumps(new_response)
else:
raise PaymentError(response) | 0.273866 | 0.119717 |
import os
import sys
import time
import datetime
from tkinter import Tk
from tkinter.filedialog import askdirectory
from PIL import Image
from pathlib import Path
def resource_path(relative_path):
try:
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
def apply_logo(path:str, image_name:str, logo_number, resize_multi:float = 0.1):
input_image = Image.open(path + '/' + image_name)
resized_width = input_image.size[0]*resize_multi
logo = Image.open(resource_path('logo{}.png'.format(str(logo_number)))).resize((
int(resized_width+(resized_width*0.2)),
int(resized_width/input_image.size[0]*input_image.size[1])
)
)
input_image.paste(logo, (
input_image.size[0] - int(logo.size[0]*1.1),
input_image.size[1] - int(logo.size[1]*1.1)
), logo)
pn = str(Path(__file__).parent.parent.parent.resolve()) + '/output'
if not os.path.exists(pn):
os.makedirs(pn)
input_image.save('{}/{}'.format(
pn,
str().join(image_name.split('.')[:-1]) + '.JPEG'
), 'JPEG')
def progressBar(current:float, total, barLength = 25):
percent = current * 100 / total
arrow = '-' * int(percent/100 * barLength - 1) + '>'
spaces = ' ' * (barLength - len(arrow))
print(
('Progress: [%s%s] %d %%' % (arrow, spaces, percent)) +
'(finished ' + str(current) + ' out of ' + str(total) + ')', end='\r')
Tk().withdraw()
path = askdirectory()
n,st = 1,time.time()
errorLog = {}
print('\n')
print('Process started! Watermarking all images from path: " ' + str(path) + ' "')
for image in os.listdir(path):
try:
apply_logo(path, image, 2, 0.1)
progressBar(n, len(os.listdir(path)))
n += 1
except Exception as e:
errorLog[image] = e
print(
'Process completed! Elapsed: ' +
str(int(time.time()-st)) + ' seconds or '
+ str((time.time()-st)/len(os.listdir(path))) + ' per image, at '
+ str(datetime.datetime.now())
)
if errorLog != {}:
print('\n\nHowever, some images were not able to be processed:\n')
for error in errorLog:
print(str(error) + ' : ' + str(errorLog.get(error))) | main.py | import os
import sys
import time
import datetime
from tkinter import Tk
from tkinter.filedialog import askdirectory
from PIL import Image
from pathlib import Path
def resource_path(relative_path):
try:
base_path = sys._MEIPASS
except Exception:
base_path = os.path.abspath(".")
return os.path.join(base_path, relative_path)
def apply_logo(path:str, image_name:str, logo_number, resize_multi:float = 0.1):
input_image = Image.open(path + '/' + image_name)
resized_width = input_image.size[0]*resize_multi
logo = Image.open(resource_path('logo{}.png'.format(str(logo_number)))).resize((
int(resized_width+(resized_width*0.2)),
int(resized_width/input_image.size[0]*input_image.size[1])
)
)
input_image.paste(logo, (
input_image.size[0] - int(logo.size[0]*1.1),
input_image.size[1] - int(logo.size[1]*1.1)
), logo)
pn = str(Path(__file__).parent.parent.parent.resolve()) + '/output'
if not os.path.exists(pn):
os.makedirs(pn)
input_image.save('{}/{}'.format(
pn,
str().join(image_name.split('.')[:-1]) + '.JPEG'
), 'JPEG')
def progressBar(current:float, total, barLength = 25):
percent = current * 100 / total
arrow = '-' * int(percent/100 * barLength - 1) + '>'
spaces = ' ' * (barLength - len(arrow))
print(
('Progress: [%s%s] %d %%' % (arrow, spaces, percent)) +
'(finished ' + str(current) + ' out of ' + str(total) + ')', end='\r')
Tk().withdraw()
path = askdirectory()
n,st = 1,time.time()
errorLog = {}
print('\n')
print('Process started! Watermarking all images from path: " ' + str(path) + ' "')
for image in os.listdir(path):
try:
apply_logo(path, image, 2, 0.1)
progressBar(n, len(os.listdir(path)))
n += 1
except Exception as e:
errorLog[image] = e
print(
'Process completed! Elapsed: ' +
str(int(time.time()-st)) + ' seconds or '
+ str((time.time()-st)/len(os.listdir(path))) + ' per image, at '
+ str(datetime.datetime.now())
)
if errorLog != {}:
print('\n\nHowever, some images were not able to be processed:\n')
for error in errorLog:
print(str(error) + ' : ' + str(errorLog.get(error))) | 0.115063 | 0.158826 |
from win32com.client import gencache, constants
DEBUG = False
class MSOutlook(object):
def __init__(self):
try:
self.oOutlookApp = gencache.EnsureDispatch("Outlook.Application")
self.outlookFound = True
except:
print "MSOutlook: unable to load Outlook"
self.outlookFound = False
self.records = []
def loadContacts(self, keys=None):
if not self.outlookFound: return
onMAPI = self.oOutlookApp.GetNamespace("MAPI")
ofContacts = onMAPI.GetDefaultFolder(constants.olFolderContacts)
if DEBUG: print "number of contacts:", len(ofContacts.Items)
for oc in range(len(ofContacts.Items)):
contact = ofContacts.Items.Item(oc + 1)
if contact.Class == constants.olContact:
if keys is None:
# no keys were specified, so build up a list of all keys
# that belong to some types we know we can deal with
good_types = int, str, unicode
keys = [key for key in contact._prop_map_get_
if isinstance(getattr(contact, key), good_types) ]
if DEBUG:
print "Fields\n=================================="
keys.sort()
for key in keys: print key
record = {}
for key in keys:
record[key] = getattr(contact, key)
self.records.append(record)
if DEBUG:
print oc, contact.FullName
if __name__ == '__main__':
if '-d' in sys.argv:
DEBUG = True
if DEBUG:
print "attempting to load Outlook"
oOutlook = MSOutlook()
if not oOutlook.outlookFound:
print "Outlook not found"
sys.exit(1)
fields = ['FullName', 'CompanyName',
'MailingAddressStreet', 'MailingAddressCity',
'MailingAddressState', 'MailingAddressPostalCode',
'HomeTelephoneNumber', 'BusinessTelephoneNumber',
'MobileTelephoneNumber', 'Email1Address', 'Body',
]
if DEBUG:
import time
print "loading records..."
startTime = time.time()
# to get all fields just call oOutlook.loadContacts()
# but getting a specific set of fields is much faster
oOutlook.loadContacts(fields)
if DEBUG:
print "loading took %f seconds" % (time.time() - startTime)
print "Number of contacts: %d" % len(oOutlook.records)
print "Contact: %s" % oOutlook.records[0]['FullName']
print "Body:\n%s" % oOutlook.records[0]['Body'] | lang/py/cookbook/v2/source/cb2_10_16_sol_1.py | from win32com.client import gencache, constants
DEBUG = False
class MSOutlook(object):
def __init__(self):
try:
self.oOutlookApp = gencache.EnsureDispatch("Outlook.Application")
self.outlookFound = True
except:
print "MSOutlook: unable to load Outlook"
self.outlookFound = False
self.records = []
def loadContacts(self, keys=None):
if not self.outlookFound: return
onMAPI = self.oOutlookApp.GetNamespace("MAPI")
ofContacts = onMAPI.GetDefaultFolder(constants.olFolderContacts)
if DEBUG: print "number of contacts:", len(ofContacts.Items)
for oc in range(len(ofContacts.Items)):
contact = ofContacts.Items.Item(oc + 1)
if contact.Class == constants.olContact:
if keys is None:
# no keys were specified, so build up a list of all keys
# that belong to some types we know we can deal with
good_types = int, str, unicode
keys = [key for key in contact._prop_map_get_
if isinstance(getattr(contact, key), good_types) ]
if DEBUG:
print "Fields\n=================================="
keys.sort()
for key in keys: print key
record = {}
for key in keys:
record[key] = getattr(contact, key)
self.records.append(record)
if DEBUG:
print oc, contact.FullName
if __name__ == '__main__':
if '-d' in sys.argv:
DEBUG = True
if DEBUG:
print "attempting to load Outlook"
oOutlook = MSOutlook()
if not oOutlook.outlookFound:
print "Outlook not found"
sys.exit(1)
fields = ['FullName', 'CompanyName',
'MailingAddressStreet', 'MailingAddressCity',
'MailingAddressState', 'MailingAddressPostalCode',
'HomeTelephoneNumber', 'BusinessTelephoneNumber',
'MobileTelephoneNumber', 'Email1Address', 'Body',
]
if DEBUG:
import time
print "loading records..."
startTime = time.time()
# to get all fields just call oOutlook.loadContacts()
# but getting a specific set of fields is much faster
oOutlook.loadContacts(fields)
if DEBUG:
print "loading took %f seconds" % (time.time() - startTime)
print "Number of contacts: %d" % len(oOutlook.records)
print "Contact: %s" % oOutlook.records[0]['FullName']
print "Body:\n%s" % oOutlook.records[0]['Body'] | 0.222278 | 0.050401 |
import scrapy
from scrapy.item import Field
class BooksItem(scrapy.Item):
"""A scrapy item with 4 fields for books.toscrape.com - Title, Price, Image URL and Details URL"""
title = Field()
price = Field()
image_url = Field()
details_url = Field()
class BooksSpider(scrapy.Spider):
"""Scrapy spider that will extract titles, prices, image URLs and detail URLs from all categories from books.toscrape.com"""
### Instantiates the spider with a name, allowed domains, and start URL
name = 'books_spider'
allowed_domains = ['books.toscrape.com']
start_urls = ['http://books.toscrape.com/']
### Will start by collecting all categories
### Then, for each category, it will call a function to scrape books from the category itself
### This is a good practice as it adapts to changes in categories in every run, as long as the page layout remains the same
### In general, it's best to avoid hard-coding a list of categories or URLs - Instead, getting them dynamically on the go
def parse(self, response):
for category in response.css('.nav-list > li > ul > li > a::attr(href)').getall():
yield scrapy.Request(url=response.urljoin(category), callback=self.parse_data)
### Will grab all elements of class .product_pod (which contain the products themselves)
### Will then grab and yield info for each element (title, price, image URL, details URL)
### If there is a next page, the function is called again in the next page
### It's also a good practice to try and make the selectors as subtle (or at least not as nested) as possible (ex: not something like "div > div > div > div > div:last > span")
def parse_data(self, response):
books = response.css('.product_pod')
next_page = response.css('.next > a::attr(href)').extract_first()
for book in books:
item = BooksItem()
item['title'] = book.css('h3 a::attr(title)').extract_first(),
item['price'] = book.css('.price_color::text').extract_first(),
item['image_url'] = response.urljoin(book.css('img::attr(src)').extract_first()),
item['details_url'] = response.urljoin(book.css('h3 a::attr(href)').extract_first())
yield item
if next_page:
yield scrapy.Request(url=response.urljoin(next_page), callback=self.parse_data) | books.py | import scrapy
from scrapy.item import Field
class BooksItem(scrapy.Item):
"""A scrapy item with 4 fields for books.toscrape.com - Title, Price, Image URL and Details URL"""
title = Field()
price = Field()
image_url = Field()
details_url = Field()
class BooksSpider(scrapy.Spider):
"""Scrapy spider that will extract titles, prices, image URLs and detail URLs from all categories from books.toscrape.com"""
### Instantiates the spider with a name, allowed domains, and start URL
name = 'books_spider'
allowed_domains = ['books.toscrape.com']
start_urls = ['http://books.toscrape.com/']
### Will start by collecting all categories
### Then, for each category, it will call a function to scrape books from the category itself
### This is a good practice as it adapts to changes in categories in every run, as long as the page layout remains the same
### In general, it's best to avoid hard-coding a list of categories or URLs - Instead, getting them dynamically on the go
def parse(self, response):
for category in response.css('.nav-list > li > ul > li > a::attr(href)').getall():
yield scrapy.Request(url=response.urljoin(category), callback=self.parse_data)
### Will grab all elements of class .product_pod (which contain the products themselves)
### Will then grab and yield info for each element (title, price, image URL, details URL)
### If there is a next page, the function is called again in the next page
### It's also a good practice to try and make the selectors as subtle (or at least not as nested) as possible (ex: not something like "div > div > div > div > div:last > span")
def parse_data(self, response):
books = response.css('.product_pod')
next_page = response.css('.next > a::attr(href)').extract_first()
for book in books:
item = BooksItem()
item['title'] = book.css('h3 a::attr(title)').extract_first(),
item['price'] = book.css('.price_color::text').extract_first(),
item['image_url'] = response.urljoin(book.css('img::attr(src)').extract_first()),
item['details_url'] = response.urljoin(book.css('h3 a::attr(href)').extract_first())
yield item
if next_page:
yield scrapy.Request(url=response.urljoin(next_page), callback=self.parse_data) | 0.523908 | 0.262245 |
from PIL import Image, ImageDraw, ImageColor
import random
import colorsys
from . import color
class Generator:
'''
Generator for a random background image.
Args:
width: The image width.
height: The image height.
columns: The number of shapes to fit along the x-axis.
rows: The number of shapes to fit along the y-axis.
offset: The internal offset of each shape.
background: The color of the image background.
foreground: The colors of the shapes in the image.
variation: The amount to vary the color of the shapes.
'''
def __init__(self, width, height, columns, rows,
offset=0, background='white', foreground='black',
blanks=True, variation=0):
self.width = width
self.height = height
self.columns = columns
self.rows = rows
self.cwidth = width / columns
self.rheight = height / rows
self.offset = offset
self.background = color.parse_color(background)
self.foreground = color.parse_colors(foreground)
self.blanks = blanks
try:
self.hvariation, self.svariation, self.lvariation = variation
except TypeError:
self.hvariation = self.svariation = self.lvariation = variation
def generate(self, seed=None):
'''
Generate an image.
Args:
seed: The initial internal state of the random generator.
Returns:
The image.
'''
if seed:
random.seed(seed)
else:
random.seed()
img = Image.new('RGB', (self.width, self.height), self.background)
drw = ImageDraw.Draw(img, 'RGBA')
for i in range(self.columns):
for j in range(self.rows):
poly = self.make_shape(i, j)
if poly:
color = self.make_color()
drw.polygon(poly, fill=color)
return img
def make_shape(self, *args):
'''
Generate the vertices of a randomly chosen shape (rectangle or
triangle).
Args: (see make_square)
Returns:
A list of the vertices of the shape or None for no shape.
'''
if self.blanks:
choice = random.randint(0, 6)
else:
choice = random.randint(1, 6)
if choice == 0:
return None
elif choice in [1, 2]:
return self.make_square(*args)
else:
return self.make_triangle(*args)
def make_square(self, x, y):
'''
Generate the vertices of a square.
Args:
x: The localized x-coordinate of the square to generate.
y: The localized y-coordinate of the square to generate.
Returns:
A list of the vertices of the square.
'''
x1 = x * self.cwidth + self.offset
y1 = y * self.rheight + self.offset
x2 = (x + 1) * self.cwidth - self.offset
y2 = (y + 1) * self.rheight - self.offset
return [(x1, y1),
(x2, y1),
(x2, y2),
(x1, y2)]
def make_triangle(self, *args):
'''
Generate the the vertices a randomly-oriented triangle.
Args: (see make_square)
Returns:
A list of the vertices of the triangle.
'''
points = self.make_square(*args)
points.remove(random.choice(points))
return points
def make_color(self):
'''
Generate a random foreground color using the provided foreground colors
and variation amounts.
Returns:
The altered color as an RGB tuple.
'''
red, green, blue = random.choice(self.foreground)
hue, lit, sat = colorsys.rgb_to_hls(red / 255, green / 255, blue / 255)
hue = int(hue * 360)
hue += random.randint(-self.hvariation / 2, self.hvariation / 2)
hue = max(0, min(hue, 360))
sat = int(sat * 100)
sat += random.randint(-self.svariation / 2, self.svariation / 2)
sat = max(0, min(sat, 100))
lit = int(lit * 100)
lit += random.randint(-self.lvariation / 2, self.lvariation / 2)
lit = max(0, min(lit, 100))
return ImageColor.getrgb(f'hsl({hue}, {sat}%, {lit}%)') | rngback/generator.py | from PIL import Image, ImageDraw, ImageColor
import random
import colorsys
from . import color
class Generator:
'''
Generator for a random background image.
Args:
width: The image width.
height: The image height.
columns: The number of shapes to fit along the x-axis.
rows: The number of shapes to fit along the y-axis.
offset: The internal offset of each shape.
background: The color of the image background.
foreground: The colors of the shapes in the image.
variation: The amount to vary the color of the shapes.
'''
def __init__(self, width, height, columns, rows,
offset=0, background='white', foreground='black',
blanks=True, variation=0):
self.width = width
self.height = height
self.columns = columns
self.rows = rows
self.cwidth = width / columns
self.rheight = height / rows
self.offset = offset
self.background = color.parse_color(background)
self.foreground = color.parse_colors(foreground)
self.blanks = blanks
try:
self.hvariation, self.svariation, self.lvariation = variation
except TypeError:
self.hvariation = self.svariation = self.lvariation = variation
def generate(self, seed=None):
'''
Generate an image.
Args:
seed: The initial internal state of the random generator.
Returns:
The image.
'''
if seed:
random.seed(seed)
else:
random.seed()
img = Image.new('RGB', (self.width, self.height), self.background)
drw = ImageDraw.Draw(img, 'RGBA')
for i in range(self.columns):
for j in range(self.rows):
poly = self.make_shape(i, j)
if poly:
color = self.make_color()
drw.polygon(poly, fill=color)
return img
def make_shape(self, *args):
'''
Generate the vertices of a randomly chosen shape (rectangle or
triangle).
Args: (see make_square)
Returns:
A list of the vertices of the shape or None for no shape.
'''
if self.blanks:
choice = random.randint(0, 6)
else:
choice = random.randint(1, 6)
if choice == 0:
return None
elif choice in [1, 2]:
return self.make_square(*args)
else:
return self.make_triangle(*args)
def make_square(self, x, y):
'''
Generate the vertices of a square.
Args:
x: The localized x-coordinate of the square to generate.
y: The localized y-coordinate of the square to generate.
Returns:
A list of the vertices of the square.
'''
x1 = x * self.cwidth + self.offset
y1 = y * self.rheight + self.offset
x2 = (x + 1) * self.cwidth - self.offset
y2 = (y + 1) * self.rheight - self.offset
return [(x1, y1),
(x2, y1),
(x2, y2),
(x1, y2)]
def make_triangle(self, *args):
'''
Generate the the vertices a randomly-oriented triangle.
Args: (see make_square)
Returns:
A list of the vertices of the triangle.
'''
points = self.make_square(*args)
points.remove(random.choice(points))
return points
def make_color(self):
'''
Generate a random foreground color using the provided foreground colors
and variation amounts.
Returns:
The altered color as an RGB tuple.
'''
red, green, blue = random.choice(self.foreground)
hue, lit, sat = colorsys.rgb_to_hls(red / 255, green / 255, blue / 255)
hue = int(hue * 360)
hue += random.randint(-self.hvariation / 2, self.hvariation / 2)
hue = max(0, min(hue, 360))
sat = int(sat * 100)
sat += random.randint(-self.svariation / 2, self.svariation / 2)
sat = max(0, min(sat, 100))
lit = int(lit * 100)
lit += random.randint(-self.lvariation / 2, self.lvariation / 2)
lit = max(0, min(lit, 100))
return ImageColor.getrgb(f'hsl({hue}, {sat}%, {lit}%)') | 0.9292 | 0.656961 |
import re
from collections import defaultdict
from math import ceil
path = []
max_workers = 5
min_cost = 60
diff = ord('A')-1
infinite = 99999
def parseLine(line):
# Pulls the two single capital letters into a tuple
return re.search("\s([A-Z])\s.*\s([A-Z])\s", line).groups()
def requirementsMet(task_id, pre_reqs, c):
if (task_id in path + tasks) or (c not in pre_reqs):
return False # already seen it or not valid
# check all requirements have already been met
return all([(req in path) for req in pre_reqs])
def stepTime(step):
return min_cost + (ord(step) - diff)
def newTime(worker, time):
if worker[0] == infinite:
return worker
return (worker[0] - time, worker[1])
with open('./input.txt') as input:
rules = [parseLine(l) for l in input.readlines()]
dag = defaultdict(list)
for r in rules:
dag[r[1]].append(r[0])
dag[r[0]] # make sure we create an empty list for all nodes
# start with the nodes that have no pre-reqs
tasks = sorted([k for k,v in dag.items() if v == []])
while tasks:
c = tasks[0]
path.append(c)
tasks = sorted(tasks[1:] + [k for k,v in dag.items() if requirementsMet(k, v, c)])
print("Sorted Instructions: ", "".join(path))
time = 0
# Use an infinite time for workers who are not busy to make sorting easier.
workers = [(infinite, None) for _ in range(max_workers)]
# start with the nodes that have no pre-reqs
tasks = sorted([k for k,v in dag.items() if v == []])
path = []
while tasks or any(0 < w[0] < infinite for w in workers):
available_workers = [w for w in workers if w[0] == infinite]
busy_workers = [w for w in workers if w[0] != infinite]
# assign tasks to workers
span = min(len(tasks), len(available_workers))
for i in range(span):
available_workers[i] = (stepTime(tasks[i]), tasks[i])
# remove the assigned tasks from the 'ready' list
tasks = tasks[span:]
workers = sorted(busy_workers + available_workers)
# take the next complete task
completed = workers[0]
this_time = completed[0]
time += this_time
# reduce all 'in progress' tasks by the time it took to complete this one
workers = [newTime(w, this_time) for w in workers[1:]] + [(infinite, None)] # add the worker that just finished
c = completed[1]
path.append(c)
tasks = sorted(tasks + [k for k,v in dag.items() if requirementsMet(k, v, c)])
print("Time to assemble:", time) | day7/sol.py |
import re
from collections import defaultdict
from math import ceil
path = []
max_workers = 5
min_cost = 60
diff = ord('A')-1
infinite = 99999
def parseLine(line):
# Pulls the two single capital letters into a tuple
return re.search("\s([A-Z])\s.*\s([A-Z])\s", line).groups()
def requirementsMet(task_id, pre_reqs, c):
if (task_id in path + tasks) or (c not in pre_reqs):
return False # already seen it or not valid
# check all requirements have already been met
return all([(req in path) for req in pre_reqs])
def stepTime(step):
return min_cost + (ord(step) - diff)
def newTime(worker, time):
if worker[0] == infinite:
return worker
return (worker[0] - time, worker[1])
with open('./input.txt') as input:
rules = [parseLine(l) for l in input.readlines()]
dag = defaultdict(list)
for r in rules:
dag[r[1]].append(r[0])
dag[r[0]] # make sure we create an empty list for all nodes
# start with the nodes that have no pre-reqs
tasks = sorted([k for k,v in dag.items() if v == []])
while tasks:
c = tasks[0]
path.append(c)
tasks = sorted(tasks[1:] + [k for k,v in dag.items() if requirementsMet(k, v, c)])
print("Sorted Instructions: ", "".join(path))
time = 0
# Use an infinite time for workers who are not busy to make sorting easier.
workers = [(infinite, None) for _ in range(max_workers)]
# start with the nodes that have no pre-reqs
tasks = sorted([k for k,v in dag.items() if v == []])
path = []
while tasks or any(0 < w[0] < infinite for w in workers):
available_workers = [w for w in workers if w[0] == infinite]
busy_workers = [w for w in workers if w[0] != infinite]
# assign tasks to workers
span = min(len(tasks), len(available_workers))
for i in range(span):
available_workers[i] = (stepTime(tasks[i]), tasks[i])
# remove the assigned tasks from the 'ready' list
tasks = tasks[span:]
workers = sorted(busy_workers + available_workers)
# take the next complete task
completed = workers[0]
this_time = completed[0]
time += this_time
# reduce all 'in progress' tasks by the time it took to complete this one
workers = [newTime(w, this_time) for w in workers[1:]] + [(infinite, None)] # add the worker that just finished
c = completed[1]
path.append(c)
tasks = sorted(tasks + [k for k,v in dag.items() if requirementsMet(k, v, c)])
print("Time to assemble:", time) | 0.317638 | 0.241579 |
import json
import vogen
from typing import List
import argparse
Parser=argparse.ArgumentParser
def main():
#显示默认帮助
def pyvogen_default(args):
print("PyVogen命令行工具\n\npm 包管理器\nversion 显示版本信息\n\n可在此找到更多帮助:https://gitee.com/oxygendioxide/vogen")
parser = Parser(prog='pyvogen')
#print(parser)
parser.set_defaults(func=pyvogen_default)
subparsers = parser.add_subparsers(help='sub-command help')
#显示版本信息
def showversion(args):
import sys
import onnxruntime
print("pyvogen version: {}".format(vogen.__version__))
print("onnxruntime version: {}".format(onnxruntime.__version__))
print("python version: {}".format(sys.version))
parser_version=subparsers.add_parser("version",help="显示版本信息")
parser_version.set_defaults(func=showversion)
#包管理器
parser_pm=subparsers.add_parser("pm",help="包管理器")
subparsers_pm=parser_pm.add_subparsers(help='')
#安装
def pm_install(args):
from vogen import pm
install_func=pm.install
if(args.local):
install_func=pm.install_local
elif(args.online):
install_func=pm.install_online
for i in args.name:
install_func(i,force=args.force)
parser_pm_install=subparsers_pm.add_parser("install",help="安装")
parser_pm_install.add_argument('name',type=str,nargs='+')
parser_pm_install.add_argument('-l',"--local",action='store_true',help='从本地包安装')
parser_pm_install.add_argument('-o',"--online",action='store_true',help="下载在线包并安装")
parser_pm_install.add_argument('-F',"--force",action='store_true',help="强制覆盖现有文件")
parser_pm_install.set_defaults(func=pm_install)
#列出已安装音源
def pm_list(args):
from vogen import pm
pkglist=pm.list()
if(args.json):
print(json.dumps([{"name":i} for i in pkglist]))
else:
print("\n".join(pkglist))
parser_pm_list=subparsers_pm.add_parser("list",help="列出已安装音源")
parser_pm_list.set_defaults(func=pm_list)
parser_pm_list.add_argument("-j","--json",action='store_true',help="以json格式输出")
#卸载
def pm_uninstall(args):
from vogen import pm
pm.uninstall(args.id)
parser_pm_uninstall=subparsers_pm.add_parser("uninstall",help="卸载")
parser_pm_uninstall.add_argument("id")
parser_pm_uninstall.set_defaults(func=pm_uninstall)
#设置
def config(args):#输出当前设置
from vogen import config
from tabulate import tabulate
if(args.json):
print(json.dumps(config.config))
else:
print(tabulate(config.config.items(),headers=["Key","Value"]))
parser_config=subparsers.add_parser("config",help="设置")
parser_config.set_defaults(func=config)
parser_config.add_argument("-j","--json",action='store_true',help="以json格式输出")
subparsers_config=parser_config.add_subparsers(help='')
#修改设置
def config_set(args):
from vogen import config
config.set(args.key,args.value)
parser_config_set=subparsers_config.add_parser("set",help="修改设置")
parser_config_set.set_defaults(func=config_set)
parser_config_set.add_argument('key',type=str)
parser_config_set.add_argument('value',type=str)
#合成
def synth(args):
import os
import wavio
from vogen import synth
from vogen.synth import utils
infile=args.infile
if(args.outfile==""):
outfile=infile[:-4]+".wav"
else:
outfile=args.outfile
#如果输出文件已存在
if(os.path.isfile(outfile)):
print(outfile+" 已存在,是否覆盖?\ny:覆盖并合成 n:保留并放弃合成")
instr=input()
while(len(instr)==0 or not(instr[0] in ("y","n","Y","N"))):
print("y:覆盖并合成 n:保留并放弃合成")
instr=input()
if(instr[0] in ("n","N")):
return
wavio.write(outfile,synth.synth(vogen.loadfile(infile,False)),utils.Params.fs)
parser_synth=subparsers.add_parser("synth",help="合成")
parser_synth.set_defaults(func=synth)
parser_synth.add_argument("infile",type=str,help="输入文件")
parser_synth.add_argument("outfile",type=str,nargs='?',default="",help="输出文件")
parser_synth.add_argument("-F,--force",action="store_true",help="强制覆盖现有文件")
args = parser.parse_args()
#print(args)
args.func(args)
if(__name__=="__main__"):
main() | vogen/__main__.py |
import json
import vogen
from typing import List
import argparse
Parser=argparse.ArgumentParser
def main():
#显示默认帮助
def pyvogen_default(args):
print("PyVogen命令行工具\n\npm 包管理器\nversion 显示版本信息\n\n可在此找到更多帮助:https://gitee.com/oxygendioxide/vogen")
parser = Parser(prog='pyvogen')
#print(parser)
parser.set_defaults(func=pyvogen_default)
subparsers = parser.add_subparsers(help='sub-command help')
#显示版本信息
def showversion(args):
import sys
import onnxruntime
print("pyvogen version: {}".format(vogen.__version__))
print("onnxruntime version: {}".format(onnxruntime.__version__))
print("python version: {}".format(sys.version))
parser_version=subparsers.add_parser("version",help="显示版本信息")
parser_version.set_defaults(func=showversion)
#包管理器
parser_pm=subparsers.add_parser("pm",help="包管理器")
subparsers_pm=parser_pm.add_subparsers(help='')
#安装
def pm_install(args):
from vogen import pm
install_func=pm.install
if(args.local):
install_func=pm.install_local
elif(args.online):
install_func=pm.install_online
for i in args.name:
install_func(i,force=args.force)
parser_pm_install=subparsers_pm.add_parser("install",help="安装")
parser_pm_install.add_argument('name',type=str,nargs='+')
parser_pm_install.add_argument('-l',"--local",action='store_true',help='从本地包安装')
parser_pm_install.add_argument('-o',"--online",action='store_true',help="下载在线包并安装")
parser_pm_install.add_argument('-F',"--force",action='store_true',help="强制覆盖现有文件")
parser_pm_install.set_defaults(func=pm_install)
#列出已安装音源
def pm_list(args):
from vogen import pm
pkglist=pm.list()
if(args.json):
print(json.dumps([{"name":i} for i in pkglist]))
else:
print("\n".join(pkglist))
parser_pm_list=subparsers_pm.add_parser("list",help="列出已安装音源")
parser_pm_list.set_defaults(func=pm_list)
parser_pm_list.add_argument("-j","--json",action='store_true',help="以json格式输出")
#卸载
def pm_uninstall(args):
from vogen import pm
pm.uninstall(args.id)
parser_pm_uninstall=subparsers_pm.add_parser("uninstall",help="卸载")
parser_pm_uninstall.add_argument("id")
parser_pm_uninstall.set_defaults(func=pm_uninstall)
#设置
def config(args):#输出当前设置
from vogen import config
from tabulate import tabulate
if(args.json):
print(json.dumps(config.config))
else:
print(tabulate(config.config.items(),headers=["Key","Value"]))
parser_config=subparsers.add_parser("config",help="设置")
parser_config.set_defaults(func=config)
parser_config.add_argument("-j","--json",action='store_true',help="以json格式输出")
subparsers_config=parser_config.add_subparsers(help='')
#修改设置
def config_set(args):
from vogen import config
config.set(args.key,args.value)
parser_config_set=subparsers_config.add_parser("set",help="修改设置")
parser_config_set.set_defaults(func=config_set)
parser_config_set.add_argument('key',type=str)
parser_config_set.add_argument('value',type=str)
#合成
def synth(args):
import os
import wavio
from vogen import synth
from vogen.synth import utils
infile=args.infile
if(args.outfile==""):
outfile=infile[:-4]+".wav"
else:
outfile=args.outfile
#如果输出文件已存在
if(os.path.isfile(outfile)):
print(outfile+" 已存在,是否覆盖?\ny:覆盖并合成 n:保留并放弃合成")
instr=input()
while(len(instr)==0 or not(instr[0] in ("y","n","Y","N"))):
print("y:覆盖并合成 n:保留并放弃合成")
instr=input()
if(instr[0] in ("n","N")):
return
wavio.write(outfile,synth.synth(vogen.loadfile(infile,False)),utils.Params.fs)
parser_synth=subparsers.add_parser("synth",help="合成")
parser_synth.set_defaults(func=synth)
parser_synth.add_argument("infile",type=str,help="输入文件")
parser_synth.add_argument("outfile",type=str,nargs='?',default="",help="输出文件")
parser_synth.add_argument("-F,--force",action="store_true",help="强制覆盖现有文件")
args = parser.parse_args()
#print(args)
args.func(args)
if(__name__=="__main__"):
main() | 0.07832 | 0.050518 |
from oslo_utils import importutils
import testtools
from oslo_messaging.tests import utils as test_utils
# NOTE(jamespage) matchmaker tied directly to eventlet
# which is not yet py3 compatible - skip if import fails
matchmaker_ring = (
importutils.try_import('oslo_messaging._drivers.matchmaker_ring'))
@testtools.skipIf(not matchmaker_ring, "matchmaker/eventlet unavailable")
class MatchmakerRingTest(test_utils.BaseTestCase):
def setUp(self):
super(MatchmakerRingTest, self).setUp()
self.ring_data = {
"conductor": ["controller1", "node1", "node2", "node3"],
"scheduler": ["controller1", "node1", "node2", "node3"],
"network": ["controller1", "node1", "node2", "node3"],
"cert": ["controller1"],
"console": ["controller1"],
"consoleauth": ["controller1"]}
self.matcher = matchmaker_ring.MatchMakerRing(self.ring_data)
def test_direct(self):
self.assertEqual(
self.matcher.queues('cert.controller1'),
[('cert.controller1', 'controller1')])
self.assertEqual(
self.matcher.queues('conductor.node1'),
[('conductor.node1', 'node1')])
def test_fanout(self):
self.assertEqual(
self.matcher.queues('fanout~conductor'),
[('fanout~conductor.controller1', 'controller1'),
('fanout~conductor.node1', 'node1'),
('fanout~conductor.node2', 'node2'),
('fanout~conductor.node3', 'node3')])
def test_bare_topic(self):
# Round robins through the hosts on the topic
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.controller1', 'controller1')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node1', 'node1')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node2', 'node2')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node3', 'node3')])
# Cycles loop
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.controller1', 'controller1')]) | tools/dockerize/webportal/usr/lib/python2.7/site-packages/oslo_messaging/tests/drivers/test_matchmaker_ring.py |
from oslo_utils import importutils
import testtools
from oslo_messaging.tests import utils as test_utils
# NOTE(jamespage) matchmaker tied directly to eventlet
# which is not yet py3 compatible - skip if import fails
matchmaker_ring = (
importutils.try_import('oslo_messaging._drivers.matchmaker_ring'))
@testtools.skipIf(not matchmaker_ring, "matchmaker/eventlet unavailable")
class MatchmakerRingTest(test_utils.BaseTestCase):
def setUp(self):
super(MatchmakerRingTest, self).setUp()
self.ring_data = {
"conductor": ["controller1", "node1", "node2", "node3"],
"scheduler": ["controller1", "node1", "node2", "node3"],
"network": ["controller1", "node1", "node2", "node3"],
"cert": ["controller1"],
"console": ["controller1"],
"consoleauth": ["controller1"]}
self.matcher = matchmaker_ring.MatchMakerRing(self.ring_data)
def test_direct(self):
self.assertEqual(
self.matcher.queues('cert.controller1'),
[('cert.controller1', 'controller1')])
self.assertEqual(
self.matcher.queues('conductor.node1'),
[('conductor.node1', 'node1')])
def test_fanout(self):
self.assertEqual(
self.matcher.queues('fanout~conductor'),
[('fanout~conductor.controller1', 'controller1'),
('fanout~conductor.node1', 'node1'),
('fanout~conductor.node2', 'node2'),
('fanout~conductor.node3', 'node3')])
def test_bare_topic(self):
# Round robins through the hosts on the topic
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.controller1', 'controller1')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node1', 'node1')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node2', 'node2')])
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.node3', 'node3')])
# Cycles loop
self.assertEqual(
self.matcher.queues('scheduler'),
[('scheduler.controller1', 'controller1')]) | 0.451327 | 0.300598 |
import matplotlib.pyplot as plt
import numpy as np
import os
import torch
from data_process import label_to_init_vector
from plotting import image_fancy
from settings import BETA, GAUSSIAN_STDEV, DIR_OUTPUT
import matplotlib as mpl
mpl.rcParams["mathtext.default"]
mpl.rcParams["text.usetex"] = False
mpl.rcParams["text.latex.preamble"] = [r'\usepackage{bm}', r'\usepackage{amsmath}']
print(mpl.rcParams["text.usetex"])
class RBM_custom():
# modified class structure from git repo: https://github.com/GabrielBianconi/pytorch-rbm
def __init__(self, num_visible, num_hidden, k, learning_rate=1e-3, weight_decay=1e-4):
self.num_visible = num_visible
self.num_hidden = num_hidden
self.k = k
self.learning_rate = learning_rate
self.weight_decay = weight_decay
self.weights = torch.randn(num_visible, num_hidden) * 0.1
self.visible_bias = torch.ones(num_visible) * 0.5
self.hidden_bias = torch.zeros(num_hidden)
def sample_hidden(self, visible_state):
hidden_activations = torch.matmul(visible_state, self.weights) + self.hidden_bias
hidden_probabilities = self._sigmoid(hidden_activations)
hidden_sampled = torch.bernoulli(hidden_probabilities)
hidden_sampled_phys = -1 + hidden_sampled * 2
return hidden_sampled_phys
def sample_visible(self, hidden_state):
visible_activations = torch.matmul(hidden_state, self.weights.t()) + self.visible_bias
visible_probabilities = self._sigmoid(visible_activations)
visible_sampled = torch.bernoulli(visible_probabilities)
visible_sampled_phys = -1 + visible_sampled * 2
return visible_sampled_phys
def contrastive_divergence(self, input_data):
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data)
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4)
hidden_sampled = positive_hidden_sampled
for step in range(self.k):
visible_sampled = self.sample_visible(hidden_sampled)
hidden_sampled = self.sample_hidden(visible_sampled)
negative_visible_sampled = visible_sampled
negative_hidden_sampled = hidden_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * self.learning_rate
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * self.learning_rate
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * self.learning_rate
self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def _sigmoid(self, x):
return 1 / (1 + torch.exp(-x))
def _random_probabilities(self, num):
random_probabilities = torch.rand(num)
return random_probabilities
class RBM_gaussian_custom():
def __init__(self, num_visible, num_hidden, k, learning_rate=1e-3, weight_decay=1e-4, init_weights=None, use_fields=False):
self.num_visible = num_visible
self.num_hidden = num_hidden
self.k = k
self.learning_rate = learning_rate
self.weight_decay = weight_decay
if init_weights is not None:
self.weights = init_weights
else:
use_normal = True
#print("Setting random weights: use_normal=%s" % use_normal)
if use_normal:
self.weights = 0.1 * torch.randn(num_visible, num_hidden).float()
else:
scale = np.sqrt(6) / np.sqrt(num_visible + num_hidden) # gaussian-binary ref cites bengio and glorot [40] on this choice
self.weights = 2 * scale * torch.rand(num_visible, num_hidden).float() - scale
print(scale, torch.min(self.weights), torch.max(self.weights))
self.visible_bias = torch.zeros(num_visible).float()
self.hidden_bias = torch.zeros(num_hidden).float()
self.use_fields = use_fields
if not self.use_fields:
self.visible_bias = torch.zeros(num_visible).float()
self.hidden_bias = torch.zeros(num_hidden).float()
def sample_hidden(self, visible_state, stdev=GAUSSIAN_STDEV):
hidden_activations = torch.matmul(visible_state, self.weights) + self.hidden_bias
if stdev > 0:
hidden_sampled = torch.normal(hidden_activations, stdev) # ***************** NEW
else:
hidden_sampled = hidden_activations
return hidden_sampled
def sample_hidden_forcebinary(self, visible_state):
visible_activations = torch.matmul(visible_state, self.weights) # + self.hidden_bias
hidden_probabilities = self._sigmoid(2 * BETA * visible_activations) # self._sigmoid(visible_activations)
hidden_sampled = torch.bernoulli(hidden_probabilities) # **************************** NEW
return hidden_sampled
def sample_visible(self, hidden_state, beta=BETA):
visible_activations = torch.matmul(hidden_state, self.weights.t()) + self.visible_bias
visible_probabilities = self._sigmoid(2 * beta * visible_activations) # self._sigmoid(visible_activations)
visible_sampled = torch.bernoulli(visible_probabilities) # ***************** NEW
visible_sampled_phys = -1 + visible_sampled * 2
return visible_sampled_phys
def contrastive_divergence_orig(self, input_data):
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data)
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4)
hidden_sampled = positive_hidden_sampled
for step in range(self.k):
visible_sampled = self.sample_visible(hidden_sampled)
hidden_sampled = self.sample_hidden(visible_sampled)
negative_visible_sampled = visible_sampled
negative_hidden_sampled = hidden_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * self.learning_rate # / batch_size
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * self.learning_rate # / batch_size
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * self.learning_rate # / batch_size
self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def contrastive_divergence(self, input_data):
learning_rate_scaled = self.learning_rate / input_data.shape[0] # larger batches mean higher learning rate
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data, stdev=0) # math says it should be the mean
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4) - use CD-k
visible_sampled = input_data
for step in range(self.k):
hidden_sampled = self.sample_hidden(visible_sampled)
visible_sampled = self.sample_visible(hidden_sampled)
negative_hidden_sampled = hidden_sampled
negative_visible_sampled = visible_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * learning_rate_scaled
if self.use_fields:
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * learning_rate_scaled
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * 2 * learning_rate_scaled
#self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def _sigmoid(self, x):
return 1 / (1 + torch.exp(-x))
def _random_probabilities(self, num):
random_probabilities = torch.rand(num)
return random_probabilities
def load_rbm_trained(self, fpath, weights_id='Q'):
with open(fpath, 'rb') as f:
arr = np.load(fpath)[weights_id]
self.weights = torch.from_numpy(arr).float()
return
def plot_model(self, title='def', outdir=None):
"""
Makes K + 2 plots
where K = self.num_hidden
"""
if outdir is None:
outdir = DIR_OUTPUT + os.sep + 'training'
for col in range(self.num_hidden):
plt.imshow(self.weights[:, col].view(28, 28), interpolation='none')
plt.colorbar()
plot_title = 'trained_weights_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
plt.title(title)
plt.imshow(self.visible_bias.view(28, 28), interpolation='none')
plt.colorbar()
plot_title = 'trained_visibleField_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
plt.plot(self.hidden_bias)
plt.hlines(0.0, 0.0, self.num_hidden, linestyles='--', colors='k')
plot_title = 'trained_hiddenField_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
return
def get_sample_images(self, num_images, k=20):
# do k steps of annealing on random initial state to arrive at final sampled images
# output shape: (k steps X num images X visible dimension)
switch_A = k/4
switch_B = 3 * k/4
def anneal_schedule(step):
# TODO note such beta schedules have unclear interpretations at later epochs
"""
if step > switch:
beta_step = 20
else:
beta_step = 0.5
return beta_step
"""
if step > switch_B:
beta_step = 20
elif step > switch_A:
beta_step = 2
else:
beta_step = 0.1
return beta_step
# initial states are coin flips up/down
a = 0.5 * torch.ones(num_images, self.num_visible)
visible_block = torch.bernoulli(a) * 2 - 1
# track timeseries of the chain
visible_timeseries = torch.zeros(k, num_images, self.num_visible)
visible_timeseries[0, :, :] = visible_block
for step in range(k):
beta_step = anneal_schedule(step)
hidden_block = self.sample_hidden(visible_block, stdev=1/np.sqrt(beta_step))
visible_block = self.sample_visible(hidden_block, beta=beta_step)
visible_timeseries[step, :, :] = visible_block
return visible_timeseries
def get_sample_images_targetted(self, num_images, init_vector, k=20):
# do k steps of annealing on random initial state to arrive at final sampled images
# init_vector: init_vector of digit to generate similar examples of
# output shape: (k steps X num images X visible dimension)
switch_A = k/4
switch_B = k - 2 # i.e. 18 if k = 20, gives one step of cleaning
def anneal_schedule(step):
# TODO note such beta schedules have unclear interpretations at later epochs
"""
if step > switch:
beta_step = 20
else:
beta_step = 0.5
return beta_step
"""
if step > switch_B:
beta_step = 20 # was 20, can set instead to 2 to keep static
elif step > switch_A:
beta_step = 2
else:
beta_step = 2
return beta_step
# initial states are based on class
init_cond_tiled = np.tile(init_vector, (num_images,1))
visible_block = torch.from_numpy(init_cond_tiled).float()
# track timeseries of the chain
visible_timeseries = torch.zeros(k, num_images, self.num_visible)
visible_timeseries[0, :, :] = visible_block
for step in range(k):
beta_step = anneal_schedule(step)
hidden_block = self.sample_hidden(visible_block, stdev=1/np.sqrt(beta_step))
visible_block = self.sample_visible(hidden_block, beta=beta_step)
visible_timeseries[step, :, :] = visible_block
return visible_timeseries
def plot_sample_images(self, visible_timeseries, outdir, only_last=True):
visible_timeseries_numpy = visible_timeseries.numpy()
num_steps = visible_timeseries_numpy.shape[0]
num_images = visible_timeseries_numpy.shape[1]
steps_to_plot = list(range(0, num_steps, 5)) + [num_steps - 1]
if only_last:
steps_to_plot = [-1]
for idx in range(num_images):
for k in steps_to_plot:
image = visible_timeseries_numpy[k, idx, :].reshape((28, 28))
plt.figure()
image_fancy(image, ax=plt.gca(), show_labels=False)
plt.gca().grid(False)
plt.title('Trajectory: %d | Step: %d' % (num_images, k))
plt.savefig(outdir + os.sep + 'traj%d_step%d.pdf' % (idx, k));
plt.savefig(outdir + os.sep + 'traj%d_step%d.png' % (idx, k));
plt.close()
return
if __name__ == '__main__':
sample_trained_rbm = True
mode = 'targetted'
assert mode in ['targetted', 'blanket']
if sample_trained_rbm:
# pick data to load
runtype = 'hopfield'
run = 0
num_hidden = 50
total_epochs = 100
batch = 100
cdk = 20
use_fields = False
ais_steps = 200
beta = 2
assert beta == 2
epoch_idx = [5, 15] #[5, 10, 15, 20, 49]
# load data
custompath = False
bigruns = DIR_OUTPUT + os.sep + 'archive' + os.sep + 'big_runs' + os.sep + 'rbm'
if custompath:
subdir = '%s_%dhidden_0fields_2.00beta_100batch_%depochs_20cdk_1.00E-04eta_200ais' % (runtype, num_hidden, total_epochs)
fname = 'weights_%dhidden_0fields_20cdk_0stepsAIS_2.00beta.npz' % num_hidden
weightsobj = np.load(bigruns + os.sep + subdir + os.sep + '%d_' % run + fname)
assert not use_fields
else:
subdir = '%s_%dhidden_%dfields_%.2fbeta_%dbatch_%depochs_%dcdk_1.00E-04eta_%dais' % (runtype, num_hidden, use_fields, beta, batch, total_epochs, cdk, ais_steps)
fmod = '_%dhidden_%dfields_%dcdk_%dstepsAIS_%.2fbeta.npz' % (num_hidden, use_fields, cdk, ais_steps, beta)
weightsobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'weights' + fmod)
if use_fields:
visibleobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'visiblefield' + fmod)
hiddenobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'hiddenfield' + fmod)
weights_timeseries = weightsobj['weights']
if use_fields:
visiblefield_timeseries = visibleobj['visiblefield']
hiddenfield_timeseries = hiddenobj['hiddenfield']
if mode == 'blanket':
for idx in epoch_idx:
if idx in [0] and runtype == 'normal':
continue
outdir = DIR_OUTPUT + os.sep + 'samples' + os.sep + 'blanket' + os.sep + \
'%s_%dhidden_%dfields' % (runtype, num_hidden, use_fields) + os.sep + 'epoch%d' % idx
if not os.path.exists(outdir):
os.makedirs(outdir)
# re-build rbm
rbm = RBM_gaussian_custom(28**2, num_hidden, 0, init_weights=None, use_fields=False, learning_rate=0)
rbm.weights = torch.from_numpy(weights_timeseries[:, :, idx]).float()
if use_fields:
rbm.visible_bias = torch.from_numpy(visiblefield_timeseries[:, idx]).float()
rbm.hidden_bias = torch.from_numpy(hiddenfield_timeseries[:, idx]).float()
# generate samples
num_images = 10
k_steps = 40
visible_block = rbm.get_sample_images(num_images, k=k_steps)
rbm.plot_sample_images(visible_block, outdir, only_last=False)
else:
basedir = DIR_OUTPUT + os.sep + 'samples' + os.sep + 'targetted' + os.sep + \
'%s_%dhidden_%dfields' % (runtype, num_hidden, use_fields)
if not os.path.exists(basedir):
os.makedirs(basedir)
for digit in range(10):
# pick and save image of init vector
init_vector = label_to_init_vector(digit, prespecified=True)
plt.figure()
image_fancy(init_vector.reshape((28, 28)), ax=plt.gca(), show_labels=False)
plt.gca().grid(False)
plt.title('Sample for class: %d' % (digit))
plt.savefig(basedir + os.sep + 'init_example_%d.pdf' % (digit));
plt.close()
for idx in epoch_idx:
if idx in [0] and runtype == 'normal':
continue
outdir = basedir + os.sep + 'epoch%d_digit%d' % (idx, digit)
if not os.path.exists(outdir):
os.makedirs(outdir)
# re-build rbm
rbm = RBM_gaussian_custom(28 ** 2, num_hidden, 0, init_weights=None, use_fields=False,
learning_rate=0)
rbm.weights = torch.from_numpy(weights_timeseries[:, :, idx]).float()
if use_fields:
rbm.visible_bias = torch.from_numpy(visiblefield_timeseries[:, idx]).float()
rbm.hidden_bias = torch.from_numpy(hiddenfield_timeseries[:, idx]).float()
# generate samples
num_images = 80
k_steps = 20
visible_block = rbm.get_sample_images_targetted(num_images, init_vector, k=k_steps)
rbm.plot_sample_images(visible_block, outdir, only_last=True) | RBM/custom_rbm.py | import matplotlib.pyplot as plt
import numpy as np
import os
import torch
from data_process import label_to_init_vector
from plotting import image_fancy
from settings import BETA, GAUSSIAN_STDEV, DIR_OUTPUT
import matplotlib as mpl
mpl.rcParams["mathtext.default"]
mpl.rcParams["text.usetex"] = False
mpl.rcParams["text.latex.preamble"] = [r'\usepackage{bm}', r'\usepackage{amsmath}']
print(mpl.rcParams["text.usetex"])
class RBM_custom():
# modified class structure from git repo: https://github.com/GabrielBianconi/pytorch-rbm
def __init__(self, num_visible, num_hidden, k, learning_rate=1e-3, weight_decay=1e-4):
self.num_visible = num_visible
self.num_hidden = num_hidden
self.k = k
self.learning_rate = learning_rate
self.weight_decay = weight_decay
self.weights = torch.randn(num_visible, num_hidden) * 0.1
self.visible_bias = torch.ones(num_visible) * 0.5
self.hidden_bias = torch.zeros(num_hidden)
def sample_hidden(self, visible_state):
hidden_activations = torch.matmul(visible_state, self.weights) + self.hidden_bias
hidden_probabilities = self._sigmoid(hidden_activations)
hidden_sampled = torch.bernoulli(hidden_probabilities)
hidden_sampled_phys = -1 + hidden_sampled * 2
return hidden_sampled_phys
def sample_visible(self, hidden_state):
visible_activations = torch.matmul(hidden_state, self.weights.t()) + self.visible_bias
visible_probabilities = self._sigmoid(visible_activations)
visible_sampled = torch.bernoulli(visible_probabilities)
visible_sampled_phys = -1 + visible_sampled * 2
return visible_sampled_phys
def contrastive_divergence(self, input_data):
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data)
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4)
hidden_sampled = positive_hidden_sampled
for step in range(self.k):
visible_sampled = self.sample_visible(hidden_sampled)
hidden_sampled = self.sample_hidden(visible_sampled)
negative_visible_sampled = visible_sampled
negative_hidden_sampled = hidden_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * self.learning_rate
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * self.learning_rate
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * self.learning_rate
self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def _sigmoid(self, x):
return 1 / (1 + torch.exp(-x))
def _random_probabilities(self, num):
random_probabilities = torch.rand(num)
return random_probabilities
class RBM_gaussian_custom():
def __init__(self, num_visible, num_hidden, k, learning_rate=1e-3, weight_decay=1e-4, init_weights=None, use_fields=False):
self.num_visible = num_visible
self.num_hidden = num_hidden
self.k = k
self.learning_rate = learning_rate
self.weight_decay = weight_decay
if init_weights is not None:
self.weights = init_weights
else:
use_normal = True
#print("Setting random weights: use_normal=%s" % use_normal)
if use_normal:
self.weights = 0.1 * torch.randn(num_visible, num_hidden).float()
else:
scale = np.sqrt(6) / np.sqrt(num_visible + num_hidden) # gaussian-binary ref cites bengio and glorot [40] on this choice
self.weights = 2 * scale * torch.rand(num_visible, num_hidden).float() - scale
print(scale, torch.min(self.weights), torch.max(self.weights))
self.visible_bias = torch.zeros(num_visible).float()
self.hidden_bias = torch.zeros(num_hidden).float()
self.use_fields = use_fields
if not self.use_fields:
self.visible_bias = torch.zeros(num_visible).float()
self.hidden_bias = torch.zeros(num_hidden).float()
def sample_hidden(self, visible_state, stdev=GAUSSIAN_STDEV):
hidden_activations = torch.matmul(visible_state, self.weights) + self.hidden_bias
if stdev > 0:
hidden_sampled = torch.normal(hidden_activations, stdev) # ***************** NEW
else:
hidden_sampled = hidden_activations
return hidden_sampled
def sample_hidden_forcebinary(self, visible_state):
visible_activations = torch.matmul(visible_state, self.weights) # + self.hidden_bias
hidden_probabilities = self._sigmoid(2 * BETA * visible_activations) # self._sigmoid(visible_activations)
hidden_sampled = torch.bernoulli(hidden_probabilities) # **************************** NEW
return hidden_sampled
def sample_visible(self, hidden_state, beta=BETA):
visible_activations = torch.matmul(hidden_state, self.weights.t()) + self.visible_bias
visible_probabilities = self._sigmoid(2 * beta * visible_activations) # self._sigmoid(visible_activations)
visible_sampled = torch.bernoulli(visible_probabilities) # ***************** NEW
visible_sampled_phys = -1 + visible_sampled * 2
return visible_sampled_phys
def contrastive_divergence_orig(self, input_data):
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data)
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4)
hidden_sampled = positive_hidden_sampled
for step in range(self.k):
visible_sampled = self.sample_visible(hidden_sampled)
hidden_sampled = self.sample_hidden(visible_sampled)
negative_visible_sampled = visible_sampled
negative_hidden_sampled = hidden_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * self.learning_rate # / batch_size
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * self.learning_rate # / batch_size
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * self.learning_rate # / batch_size
self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def contrastive_divergence(self, input_data):
learning_rate_scaled = self.learning_rate / input_data.shape[0] # larger batches mean higher learning rate
# Positive phase (WIKI: Steps 1, 2)
positive_hidden_sampled = self.sample_hidden(input_data, stdev=0) # math says it should be the mean
positive_gradient = torch.matmul(input_data.t(), positive_hidden_sampled) # WIKI 2: v dot h
# Negative phase (WIKI: Steps 3, 4) - use CD-k
visible_sampled = input_data
for step in range(self.k):
hidden_sampled = self.sample_hidden(visible_sampled)
visible_sampled = self.sample_visible(hidden_sampled)
negative_hidden_sampled = hidden_sampled
negative_visible_sampled = visible_sampled
negative_gradient = torch.matmul(negative_visible_sampled.t(), negative_hidden_sampled) # WIKI 4: v' dot h'
# Update parameters (WIKI: Steps 5, 6) -- note no momentum OR batch size affected learning rate
# batch_size = input_data.size(0)
self.weights += (positive_gradient - negative_gradient) * learning_rate_scaled
if self.use_fields:
self.visible_bias += torch.sum(input_data - negative_visible_sampled, dim=0) * learning_rate_scaled
self.hidden_bias += torch.sum(positive_hidden_sampled - negative_hidden_sampled, dim=0) * 2 * learning_rate_scaled
#self.weights -= self.weights * self.weight_decay # L2 weight decay
# Compute reconstruction error
error = torch.sum((input_data - negative_visible_sampled)**2)
return error
def _sigmoid(self, x):
return 1 / (1 + torch.exp(-x))
def _random_probabilities(self, num):
random_probabilities = torch.rand(num)
return random_probabilities
def load_rbm_trained(self, fpath, weights_id='Q'):
with open(fpath, 'rb') as f:
arr = np.load(fpath)[weights_id]
self.weights = torch.from_numpy(arr).float()
return
def plot_model(self, title='def', outdir=None):
"""
Makes K + 2 plots
where K = self.num_hidden
"""
if outdir is None:
outdir = DIR_OUTPUT + os.sep + 'training'
for col in range(self.num_hidden):
plt.imshow(self.weights[:, col].view(28, 28), interpolation='none')
plt.colorbar()
plot_title = 'trained_weights_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
plt.title(title)
plt.imshow(self.visible_bias.view(28, 28), interpolation='none')
plt.colorbar()
plot_title = 'trained_visibleField_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
plt.plot(self.hidden_bias)
plt.hlines(0.0, 0.0, self.num_hidden, linestyles='--', colors='k')
plot_title = 'trained_hiddenField_col%d_%s' % (col, title)
plt.title(plot_title)
plt.savefig(outdir + os.sep + plot_title + '.pdf')
plt.close()
return
def get_sample_images(self, num_images, k=20):
# do k steps of annealing on random initial state to arrive at final sampled images
# output shape: (k steps X num images X visible dimension)
switch_A = k/4
switch_B = 3 * k/4
def anneal_schedule(step):
# TODO note such beta schedules have unclear interpretations at later epochs
"""
if step > switch:
beta_step = 20
else:
beta_step = 0.5
return beta_step
"""
if step > switch_B:
beta_step = 20
elif step > switch_A:
beta_step = 2
else:
beta_step = 0.1
return beta_step
# initial states are coin flips up/down
a = 0.5 * torch.ones(num_images, self.num_visible)
visible_block = torch.bernoulli(a) * 2 - 1
# track timeseries of the chain
visible_timeseries = torch.zeros(k, num_images, self.num_visible)
visible_timeseries[0, :, :] = visible_block
for step in range(k):
beta_step = anneal_schedule(step)
hidden_block = self.sample_hidden(visible_block, stdev=1/np.sqrt(beta_step))
visible_block = self.sample_visible(hidden_block, beta=beta_step)
visible_timeseries[step, :, :] = visible_block
return visible_timeseries
def get_sample_images_targetted(self, num_images, init_vector, k=20):
# do k steps of annealing on random initial state to arrive at final sampled images
# init_vector: init_vector of digit to generate similar examples of
# output shape: (k steps X num images X visible dimension)
switch_A = k/4
switch_B = k - 2 # i.e. 18 if k = 20, gives one step of cleaning
def anneal_schedule(step):
# TODO note such beta schedules have unclear interpretations at later epochs
"""
if step > switch:
beta_step = 20
else:
beta_step = 0.5
return beta_step
"""
if step > switch_B:
beta_step = 20 # was 20, can set instead to 2 to keep static
elif step > switch_A:
beta_step = 2
else:
beta_step = 2
return beta_step
# initial states are based on class
init_cond_tiled = np.tile(init_vector, (num_images,1))
visible_block = torch.from_numpy(init_cond_tiled).float()
# track timeseries of the chain
visible_timeseries = torch.zeros(k, num_images, self.num_visible)
visible_timeseries[0, :, :] = visible_block
for step in range(k):
beta_step = anneal_schedule(step)
hidden_block = self.sample_hidden(visible_block, stdev=1/np.sqrt(beta_step))
visible_block = self.sample_visible(hidden_block, beta=beta_step)
visible_timeseries[step, :, :] = visible_block
return visible_timeseries
def plot_sample_images(self, visible_timeseries, outdir, only_last=True):
visible_timeseries_numpy = visible_timeseries.numpy()
num_steps = visible_timeseries_numpy.shape[0]
num_images = visible_timeseries_numpy.shape[1]
steps_to_plot = list(range(0, num_steps, 5)) + [num_steps - 1]
if only_last:
steps_to_plot = [-1]
for idx in range(num_images):
for k in steps_to_plot:
image = visible_timeseries_numpy[k, idx, :].reshape((28, 28))
plt.figure()
image_fancy(image, ax=plt.gca(), show_labels=False)
plt.gca().grid(False)
plt.title('Trajectory: %d | Step: %d' % (num_images, k))
plt.savefig(outdir + os.sep + 'traj%d_step%d.pdf' % (idx, k));
plt.savefig(outdir + os.sep + 'traj%d_step%d.png' % (idx, k));
plt.close()
return
if __name__ == '__main__':
sample_trained_rbm = True
mode = 'targetted'
assert mode in ['targetted', 'blanket']
if sample_trained_rbm:
# pick data to load
runtype = 'hopfield'
run = 0
num_hidden = 50
total_epochs = 100
batch = 100
cdk = 20
use_fields = False
ais_steps = 200
beta = 2
assert beta == 2
epoch_idx = [5, 15] #[5, 10, 15, 20, 49]
# load data
custompath = False
bigruns = DIR_OUTPUT + os.sep + 'archive' + os.sep + 'big_runs' + os.sep + 'rbm'
if custompath:
subdir = '%s_%dhidden_0fields_2.00beta_100batch_%depochs_20cdk_1.00E-04eta_200ais' % (runtype, num_hidden, total_epochs)
fname = 'weights_%dhidden_0fields_20cdk_0stepsAIS_2.00beta.npz' % num_hidden
weightsobj = np.load(bigruns + os.sep + subdir + os.sep + '%d_' % run + fname)
assert not use_fields
else:
subdir = '%s_%dhidden_%dfields_%.2fbeta_%dbatch_%depochs_%dcdk_1.00E-04eta_%dais' % (runtype, num_hidden, use_fields, beta, batch, total_epochs, cdk, ais_steps)
fmod = '_%dhidden_%dfields_%dcdk_%dstepsAIS_%.2fbeta.npz' % (num_hidden, use_fields, cdk, ais_steps, beta)
weightsobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'weights' + fmod)
if use_fields:
visibleobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'visiblefield' + fmod)
hiddenobj = np.load(bigruns + os.sep + subdir + os.sep + 'run%d' % run + os.sep + 'hiddenfield' + fmod)
weights_timeseries = weightsobj['weights']
if use_fields:
visiblefield_timeseries = visibleobj['visiblefield']
hiddenfield_timeseries = hiddenobj['hiddenfield']
if mode == 'blanket':
for idx in epoch_idx:
if idx in [0] and runtype == 'normal':
continue
outdir = DIR_OUTPUT + os.sep + 'samples' + os.sep + 'blanket' + os.sep + \
'%s_%dhidden_%dfields' % (runtype, num_hidden, use_fields) + os.sep + 'epoch%d' % idx
if not os.path.exists(outdir):
os.makedirs(outdir)
# re-build rbm
rbm = RBM_gaussian_custom(28**2, num_hidden, 0, init_weights=None, use_fields=False, learning_rate=0)
rbm.weights = torch.from_numpy(weights_timeseries[:, :, idx]).float()
if use_fields:
rbm.visible_bias = torch.from_numpy(visiblefield_timeseries[:, idx]).float()
rbm.hidden_bias = torch.from_numpy(hiddenfield_timeseries[:, idx]).float()
# generate samples
num_images = 10
k_steps = 40
visible_block = rbm.get_sample_images(num_images, k=k_steps)
rbm.plot_sample_images(visible_block, outdir, only_last=False)
else:
basedir = DIR_OUTPUT + os.sep + 'samples' + os.sep + 'targetted' + os.sep + \
'%s_%dhidden_%dfields' % (runtype, num_hidden, use_fields)
if not os.path.exists(basedir):
os.makedirs(basedir)
for digit in range(10):
# pick and save image of init vector
init_vector = label_to_init_vector(digit, prespecified=True)
plt.figure()
image_fancy(init_vector.reshape((28, 28)), ax=plt.gca(), show_labels=False)
plt.gca().grid(False)
plt.title('Sample for class: %d' % (digit))
plt.savefig(basedir + os.sep + 'init_example_%d.pdf' % (digit));
plt.close()
for idx in epoch_idx:
if idx in [0] and runtype == 'normal':
continue
outdir = basedir + os.sep + 'epoch%d_digit%d' % (idx, digit)
if not os.path.exists(outdir):
os.makedirs(outdir)
# re-build rbm
rbm = RBM_gaussian_custom(28 ** 2, num_hidden, 0, init_weights=None, use_fields=False,
learning_rate=0)
rbm.weights = torch.from_numpy(weights_timeseries[:, :, idx]).float()
if use_fields:
rbm.visible_bias = torch.from_numpy(visiblefield_timeseries[:, idx]).float()
rbm.hidden_bias = torch.from_numpy(hiddenfield_timeseries[:, idx]).float()
# generate samples
num_images = 80
k_steps = 20
visible_block = rbm.get_sample_images_targetted(num_images, init_vector, k=k_steps)
rbm.plot_sample_images(visible_block, outdir, only_last=True) | 0.67854 | 0.409516 |
import datetime
import hashlib
from termcolor import colored
#Create a 'Block' class
class Block :
#Initialize values relating to the blockchain
index = 0
data = None
hash = None
timestamp = datetime.datetime.now()
previousHash = 0x0
next = None
#Number only sored once, or as I like to call it, 'incrementing variable'
nonce = 0
def __init__(self, data) :
self.data = data
#Convert all the values to a hash using the hashlib library
def hashBlock () :
hash = hashlib.sha256()
hash.update (
str(Block.nonce).encode('utf-8') +
str(Block.index).encode('utf-8') +
str(Block.data).encode('utf-8') +
str(Block.previousHash).encode('utf-8') +
str(Block.timestamp).encode('utf-8')
)
#Return the hash
return hash.hexdigest()
#Create a new blockchain class
class Blockchain :
#The maximum nonce
maxNonce = 2**32
#mining difficulty variable
diff = 14
#Target hash
target = 2**(256-diff)
#This is the Genesis block
block = Block("Genesis")
#Stores the newest addition to the block
head = block
#Create a new block in the blockchain
def generateBlock (block) :
#Change some values
Block.previousHash = Block.hashBlock()
Block.index = Block.index + 1
Block.next = block
Blockchain.block = Block.next
def mine (self, block) :
#Repeat maxNonce times
for n in range(blockchain.maxNonce) :
#Check if the proposed block is to be accepted
if int(Block.hashBlock(), 16) <= blockchain.target :
Blockchain.generateBlock(block)
#Store in txt file
file = open('mineHistory.txt', 'a')
file.write("Hash: " + Block.hashBlock() + "\nNonce: " + str(Block.nonce) + "\n")
break
else :
#Increase the nonce
Block.nonce = Block.nonce + 1
blockchain = Blockchain()
value = 0
#Repeat forever
while True:
response = input(colored("Enter m to mine a new block\n Press t to view all hashes generated\nPress v to see the simulated value of each block: ", "green"))
if response == 't' :
file = open('mineHistory.txt', 'r')
print (colored("Reading responses: ", "green"))
print (colored(file.read(), "red"))
elif response == 'm' :
block = input("What data do you want the block to hold? ")
times = int(input("How many times do you want to mine? "))
for n in range(times) :
blockchain.mine(block)
print ("Block mined")
value = Block.nonce/2
elif response == 'v' :
print ("Relative value of blocks: " + str(value) + "\nThe value is based on the difficulty to mine")
else :
print(colored("Sorry, please type that again", "red")) | main.py | import datetime
import hashlib
from termcolor import colored
#Create a 'Block' class
class Block :
#Initialize values relating to the blockchain
index = 0
data = None
hash = None
timestamp = datetime.datetime.now()
previousHash = 0x0
next = None
#Number only sored once, or as I like to call it, 'incrementing variable'
nonce = 0
def __init__(self, data) :
self.data = data
#Convert all the values to a hash using the hashlib library
def hashBlock () :
hash = hashlib.sha256()
hash.update (
str(Block.nonce).encode('utf-8') +
str(Block.index).encode('utf-8') +
str(Block.data).encode('utf-8') +
str(Block.previousHash).encode('utf-8') +
str(Block.timestamp).encode('utf-8')
)
#Return the hash
return hash.hexdigest()
#Create a new blockchain class
class Blockchain :
#The maximum nonce
maxNonce = 2**32
#mining difficulty variable
diff = 14
#Target hash
target = 2**(256-diff)
#This is the Genesis block
block = Block("Genesis")
#Stores the newest addition to the block
head = block
#Create a new block in the blockchain
def generateBlock (block) :
#Change some values
Block.previousHash = Block.hashBlock()
Block.index = Block.index + 1
Block.next = block
Blockchain.block = Block.next
def mine (self, block) :
#Repeat maxNonce times
for n in range(blockchain.maxNonce) :
#Check if the proposed block is to be accepted
if int(Block.hashBlock(), 16) <= blockchain.target :
Blockchain.generateBlock(block)
#Store in txt file
file = open('mineHistory.txt', 'a')
file.write("Hash: " + Block.hashBlock() + "\nNonce: " + str(Block.nonce) + "\n")
break
else :
#Increase the nonce
Block.nonce = Block.nonce + 1
blockchain = Blockchain()
value = 0
#Repeat forever
while True:
response = input(colored("Enter m to mine a new block\n Press t to view all hashes generated\nPress v to see the simulated value of each block: ", "green"))
if response == 't' :
file = open('mineHistory.txt', 'r')
print (colored("Reading responses: ", "green"))
print (colored(file.read(), "red"))
elif response == 'm' :
block = input("What data do you want the block to hold? ")
times = int(input("How many times do you want to mine? "))
for n in range(times) :
blockchain.mine(block)
print ("Block mined")
value = Block.nonce/2
elif response == 'v' :
print ("Relative value of blocks: " + str(value) + "\nThe value is based on the difficulty to mine")
else :
print(colored("Sorry, please type that again", "red")) | 0.561215 | 0.379666 |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
def focal_loss(input_values, gamma):
"""Computes the focal loss"""
p = torch.exp(-input_values)
loss = (1 - p) ** gamma * input_values
return loss.mean()
class FocalLoss(nn.Module):
def __init__(self, weight=None, gamma=0.):
super(FocalLoss, self).__init__()
assert gamma >= 0
self.gamma = gamma
self.weight = weight
def forward(self, input, target):
return focal_loss(F.cross_entropy(input, target, reduction='none', weight=self.weight), self.gamma)
def cb_focal_loss(class_num_list):
beta = 0.99
effective_num = 1.0 - np.power(beta, class_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(class_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda()
return FocalLoss(weight=per_cls_weights, gamma=0.5).cuda()
def focal_loss_weighted(input_values, gamma, path_weights):
"""Computes the focal loss"""
p = torch.exp(-input_values)
loss = path_weights * (1 - p) ** gamma * input_values
return loss.mean()
class FocalLoss_weighted(nn.Module):
def __init__(self, weight=None, gamma=0.):
super(FocalLoss_weighted, self).__init__()
assert gamma >= 0
self.gamma = gamma
self.weight = weight
def forward(self, input, target, path_weights):
if type(path_weights) is list:
path_weights = torch.FloatTensor(path_weights).cuda()
return focal_loss_weighted(F.cross_entropy(input, target, reduction='none', weight=self.weight), self.gamma, path_weights)
def cb_focal_loss_weighted(class_num_list):
beta = 0.99
effective_num = 1.0 - np.power(beta, class_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(class_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda()
return FocalLoss_weighted(weight=per_cls_weights, gamma=0.5).cuda() | ldam.py | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
def focal_loss(input_values, gamma):
"""Computes the focal loss"""
p = torch.exp(-input_values)
loss = (1 - p) ** gamma * input_values
return loss.mean()
class FocalLoss(nn.Module):
def __init__(self, weight=None, gamma=0.):
super(FocalLoss, self).__init__()
assert gamma >= 0
self.gamma = gamma
self.weight = weight
def forward(self, input, target):
return focal_loss(F.cross_entropy(input, target, reduction='none', weight=self.weight), self.gamma)
def cb_focal_loss(class_num_list):
beta = 0.99
effective_num = 1.0 - np.power(beta, class_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(class_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda()
return FocalLoss(weight=per_cls_weights, gamma=0.5).cuda()
def focal_loss_weighted(input_values, gamma, path_weights):
"""Computes the focal loss"""
p = torch.exp(-input_values)
loss = path_weights * (1 - p) ** gamma * input_values
return loss.mean()
class FocalLoss_weighted(nn.Module):
def __init__(self, weight=None, gamma=0.):
super(FocalLoss_weighted, self).__init__()
assert gamma >= 0
self.gamma = gamma
self.weight = weight
def forward(self, input, target, path_weights):
if type(path_weights) is list:
path_weights = torch.FloatTensor(path_weights).cuda()
return focal_loss_weighted(F.cross_entropy(input, target, reduction='none', weight=self.weight), self.gamma, path_weights)
def cb_focal_loss_weighted(class_num_list):
beta = 0.99
effective_num = 1.0 - np.power(beta, class_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(class_num_list)
per_cls_weights = torch.FloatTensor(per_cls_weights).cuda()
return FocalLoss_weighted(weight=per_cls_weights, gamma=0.5).cuda() | 0.946855 | 0.67688 |
from construct.core import *
from construct.lib import *
""" TLS 1.3 related strcutres """
## RFC8446 section 4.2
ExtensionType = Enum( BytesInteger(2),
server_name = 0,
max_fragment_length = 1,
status_request = 5,
supported_groups = 10,
signature_algorithms = 13,
use_srtp = 14,
heartbeat = 15,
application_layer_protocol_negotiation = 16,
signed_certificate_timestamp = 18,
client_certificate_type = 19,
server_certificate_type = 20,
padding = 21,
pre_shared_key = 41,
early_data = 42,
supported_versions = 43,
cookie = 44,
psk_key_exchange_modes = 45,
certificate_authorities = 47,
oid_filters = 48,
post_handshake_auth = 49,
signature_algorithms_cert = 50,
key_share = 51,
)
Extension = Struct(
'extension_type' / ExtensionType,
'extension_data' / Prefixed(BytesInteger(2), GreedyBytes)
)
## RFC8446 section 4.2.11
PskIdentity = Struct(
'identity' / Prefixed( BytesInteger(2), GreedyBytes),
'obfuscated_ticket_age' / Bytes(4)
)
## RFC8446 section 4.4.2
CertificateType = Enum( BytesInteger(1),
X509 = 0,
RawPublicKey = 1
)
CertificateEntry = Struct(
Switch( this._.certificate_type, {
'RawPublicKey': Prefixed( BytesInteger(3), GreedyBytes),
'X509': Prefixed(3, GreedyBytes)
}
),
'extensions' / Prefixed( BytesInteger(2), GreedyRange(Extension))
)
Certificate = Struct(
'certificate_request_context' / Prefixed( BytesInteger(1), GreedyBytes),
'certificate_list' / Prefixed(3, GreedyRange(CertificateEntry))
)
## RFC8446 4.2.3
SignatureScheme = Enum( Bytes(2),
rsa_pkcs1_sha256 = b'\x04\x01',
rsa_pkcs1_sha384 = b'\x05\x01',
rsa_pkcs1_sha512 = b'\x06\x01',
ecdsa_secp256r1_sha256 = b'\x04\x03',
ecdsa_secp384r1_sha384 = b'\x05\x03',
ecdsa_secp521r1_sha512 = b'\x06\x03',
rsa_pss_rsae_sha256 = b'\x08\x04',
rsa_pss_rsae_sha384 = b'\x08\x05',
rsa_pss_rsae_sha512 = b'\x08\x06',
ed25519 = b'\x08\x07',
ed448 = b'\x08\x08',
rsa_pss_pss_sha256 = b'\x08\x09',
rsa_pss_pss_sha384 = b'\x08\x0a',
rsa_pss_pss_sha512 = b'\x08\x0b',
rsa_pkcs1_sha1 = b'\x02\x01',
ecdsa_sha1 = b'\x02\x03',
) | pylurk/extensions/tls13_tls13_struct.py | from construct.core import *
from construct.lib import *
""" TLS 1.3 related strcutres """
## RFC8446 section 4.2
ExtensionType = Enum( BytesInteger(2),
server_name = 0,
max_fragment_length = 1,
status_request = 5,
supported_groups = 10,
signature_algorithms = 13,
use_srtp = 14,
heartbeat = 15,
application_layer_protocol_negotiation = 16,
signed_certificate_timestamp = 18,
client_certificate_type = 19,
server_certificate_type = 20,
padding = 21,
pre_shared_key = 41,
early_data = 42,
supported_versions = 43,
cookie = 44,
psk_key_exchange_modes = 45,
certificate_authorities = 47,
oid_filters = 48,
post_handshake_auth = 49,
signature_algorithms_cert = 50,
key_share = 51,
)
Extension = Struct(
'extension_type' / ExtensionType,
'extension_data' / Prefixed(BytesInteger(2), GreedyBytes)
)
## RFC8446 section 4.2.11
PskIdentity = Struct(
'identity' / Prefixed( BytesInteger(2), GreedyBytes),
'obfuscated_ticket_age' / Bytes(4)
)
## RFC8446 section 4.4.2
CertificateType = Enum( BytesInteger(1),
X509 = 0,
RawPublicKey = 1
)
CertificateEntry = Struct(
Switch( this._.certificate_type, {
'RawPublicKey': Prefixed( BytesInteger(3), GreedyBytes),
'X509': Prefixed(3, GreedyBytes)
}
),
'extensions' / Prefixed( BytesInteger(2), GreedyRange(Extension))
)
Certificate = Struct(
'certificate_request_context' / Prefixed( BytesInteger(1), GreedyBytes),
'certificate_list' / Prefixed(3, GreedyRange(CertificateEntry))
)
## RFC8446 4.2.3
SignatureScheme = Enum( Bytes(2),
rsa_pkcs1_sha256 = b'\x04\x01',
rsa_pkcs1_sha384 = b'\x05\x01',
rsa_pkcs1_sha512 = b'\x06\x01',
ecdsa_secp256r1_sha256 = b'\x04\x03',
ecdsa_secp384r1_sha384 = b'\x05\x03',
ecdsa_secp521r1_sha512 = b'\x06\x03',
rsa_pss_rsae_sha256 = b'\x08\x04',
rsa_pss_rsae_sha384 = b'\x08\x05',
rsa_pss_rsae_sha512 = b'\x08\x06',
ed25519 = b'\x08\x07',
ed448 = b'\x08\x08',
rsa_pss_pss_sha256 = b'\x08\x09',
rsa_pss_pss_sha384 = b'\x08\x0a',
rsa_pss_pss_sha512 = b'\x08\x0b',
rsa_pkcs1_sha1 = b'\x02\x01',
ecdsa_sha1 = b'\x02\x03',
) | 0.591251 | 0.179064 |
from pygame import *
from random import randint
mixer.init()
music=mixer.Sound('space.ogg')
music.play()
font.init()
font2 = font.SysFont('Arial',36)
font1= font.SysFont('Arial',80)
lose=font1.render("You Lose",True,(255,69,0))
win1=font1.render("You win!",True,(0,200,0))
score = 0
lost= 0
max_lost=3
goal=10
max_speed=3
ttt=0
class GameSprite(sprite.Sprite):
def __init__(self,player_image,player_x,player_y,size_x,size_y,player_speed):
super().__init__()
self.image=transform.scale(image.load(player_image),(size_x,size_y))
self.speed=player_speed
#Хитбокс для спрайтов
self.rect=self.image.get_rect()
self.rect.x=player_x
self.rect.y=player_y
def reset(self):
window.blit(self.image,(self.rect.x,self.rect.y))
ww=700
wh=500
class Player(GameSprite):
def update(self):
keys= key.get_pressed()
if keys[K_LEFT] and self.rect.x >5:
self.rect.x -= self.speed
if keys[K_RIGHT] and self.rect.x < ww -70:
self.rect.x += self.speed
def fire(self):
bullet= Bullet("bullet.png",self.rect.centerx,self.rect.top,15,20,-15)
bullets.add(bullet)
class Enemy(GameSprite):
def update(self):
self.rect.y += self.speed
global lost
if self.rect.y > wh:
self.rect.x= randint(80,ww-80)
self.rect.y=0
lost=lost+1
class Bullet(GameSprite):
def update(self):
self.rect.y +=self.speed
if self.rect.y < 0:
self.kill()
window=display.set_mode((ww,wh))
display.set_caption("shooter")
background=transform.scale(image.load("galaxy.jpg"),(ww,wh))
clock=time.Clock()
FPS=30
player = Player("rocket.png",250,430,80,50,7)
'''ufo= Enemy("ufo.png",200,0,2)'''
monsters= sprite.Group()
for i in range(1,6):
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,max_speed))
monsters.add(ufo)
bullets= sprite.Group()
fire=mixer.Sound('fire.ogg')
finish=False
game= True
while game:
for e in event.get():
if e.type == QUIT:
game= False
elif e.type == KEYDOWN:
if e.key == K_SPACE:
fire.play()
player.fire()
if not finish:
window.blit(background,(0,0))
ufo.update()
player.update()
monsters.update()
text=font2.render("Счет: "+str(score),1,(255,255,255))
propyck=font2.render("Пропущено: "+str(lost),True,(255,255,255))
player.reset()
ufo.reset()
bullets.update()
monsters.draw(window)
bullets.draw(window)
window.blit(text,(10,20))
window.blit(propyck,(10,50))
collides= sprite.groupcollide(monsters,bullets,True,True)
for c in collides:
score = score+1
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,2))
monsters.add(ufo)
if sprite.spritecollide(player,monsters,False) or lost >= max_lost:
finish = True
window.blit(lose,(200,200))
ttt=1
if score >= goal:
finish = True
window.blit(win1,(200,200))
display.update()
else:
finish=False
score=0
lost=0
for b in bullets:
b.kill()
for m in monsters:
m.kill()
time.delay(3000)
for i in range(1,6):
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,3))
monsters.add(ufo)
if ttt == 1:
max_speed=max_speed+1
ttt=0
clock.tick(FPS) | shooter_game.py | from pygame import *
from random import randint
mixer.init()
music=mixer.Sound('space.ogg')
music.play()
font.init()
font2 = font.SysFont('Arial',36)
font1= font.SysFont('Arial',80)
lose=font1.render("You Lose",True,(255,69,0))
win1=font1.render("You win!",True,(0,200,0))
score = 0
lost= 0
max_lost=3
goal=10
max_speed=3
ttt=0
class GameSprite(sprite.Sprite):
def __init__(self,player_image,player_x,player_y,size_x,size_y,player_speed):
super().__init__()
self.image=transform.scale(image.load(player_image),(size_x,size_y))
self.speed=player_speed
#Хитбокс для спрайтов
self.rect=self.image.get_rect()
self.rect.x=player_x
self.rect.y=player_y
def reset(self):
window.blit(self.image,(self.rect.x,self.rect.y))
ww=700
wh=500
class Player(GameSprite):
def update(self):
keys= key.get_pressed()
if keys[K_LEFT] and self.rect.x >5:
self.rect.x -= self.speed
if keys[K_RIGHT] and self.rect.x < ww -70:
self.rect.x += self.speed
def fire(self):
bullet= Bullet("bullet.png",self.rect.centerx,self.rect.top,15,20,-15)
bullets.add(bullet)
class Enemy(GameSprite):
def update(self):
self.rect.y += self.speed
global lost
if self.rect.y > wh:
self.rect.x= randint(80,ww-80)
self.rect.y=0
lost=lost+1
class Bullet(GameSprite):
def update(self):
self.rect.y +=self.speed
if self.rect.y < 0:
self.kill()
window=display.set_mode((ww,wh))
display.set_caption("shooter")
background=transform.scale(image.load("galaxy.jpg"),(ww,wh))
clock=time.Clock()
FPS=30
player = Player("rocket.png",250,430,80,50,7)
'''ufo= Enemy("ufo.png",200,0,2)'''
monsters= sprite.Group()
for i in range(1,6):
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,max_speed))
monsters.add(ufo)
bullets= sprite.Group()
fire=mixer.Sound('fire.ogg')
finish=False
game= True
while game:
for e in event.get():
if e.type == QUIT:
game= False
elif e.type == KEYDOWN:
if e.key == K_SPACE:
fire.play()
player.fire()
if not finish:
window.blit(background,(0,0))
ufo.update()
player.update()
monsters.update()
text=font2.render("Счет: "+str(score),1,(255,255,255))
propyck=font2.render("Пропущено: "+str(lost),True,(255,255,255))
player.reset()
ufo.reset()
bullets.update()
monsters.draw(window)
bullets.draw(window)
window.blit(text,(10,20))
window.blit(propyck,(10,50))
collides= sprite.groupcollide(monsters,bullets,True,True)
for c in collides:
score = score+1
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,2))
monsters.add(ufo)
if sprite.spritecollide(player,monsters,False) or lost >= max_lost:
finish = True
window.blit(lose,(200,200))
ttt=1
if score >= goal:
finish = True
window.blit(win1,(200,200))
display.update()
else:
finish=False
score=0
lost=0
for b in bullets:
b.kill()
for m in monsters:
m.kill()
time.delay(3000)
for i in range(1,6):
ufo= Enemy("ufo.png",randint(80,ww-80),-40,80,50,randint(1,3))
monsters.add(ufo)
if ttt == 1:
max_speed=max_speed+1
ttt=0
clock.tick(FPS) | 0.119113 | 0.165998 |
from user import User
from credential import Info
def create_account(F_name,L_name,S_name,E_mail):
new_user = User(F_name,L_name,S_name,E_mail)
return new_user
def create_credential(instagram,E_mail):
new_cred = Info(instagram,E_mail)
return new_cred
def save_account(user):
user.save_user()
def save_credentials(credentials):
credential.save_info()
def display_users():
return User.display_users()
def display_creds():
return Info.display_info()
def main():
print(" ")
print("Hey there, Welcome to password generator!!")
print(" ")
print("To get started type in a short code as below!!")
print("")
print("")
print("-")
print("""Here to guide you!!
cc - Create new _account
ex - Exit password locker
dac - Display Accoounts
gs - Generate passwords""")
print(" ")
short_code = input() .lower()
if short_code =='cc':
print(" ")
print("-")
print("CREATE A NEW ACCOUNT!")
print(" ")
print(" ")
print("what is your first name?..")
print(" ")
F_name =input()
print("What is your middle name?..")
print(" ")
M_name= input()
print("What is your surname name?..")
print(" ")
S_name= input()
print("What is your email address?..")
print(" ")
E_mail= input()
print ("what is your instagram password?..")
print(" ")
instagram =input()
print("What is your email password?..")
print(" ")
E_mail= input()
save_account(create_account(F_name,M_name,S_name,E_mail))
print('\n')
save_credentials(create_credential(instagram,E_mail))
print('\n')
print("-")
print(f"New Account {F_name } { M_name} { instagram } has been created")
print('\n')
elif short_code =='dac':
if display_users():
print(" ")
print("The user name")
print(" ")
print('\n')
for user in display_users():
print(f"{user.F_name}{user.M_name}{user.S_name}")
for credential in display_creds():
print (f"{instagram}")
print(" ")
else:
print('\n')
print("-")
print(" ")
print("PLEASE CREATE AN ACCOUNT ")
print("You have not created an account yet :( ")
print(" ")
elif short_code == 'gs':
print(" ")
print(" ")
print("TO GENERATE A PASSWORD ADD IN YOUR FIRST NAME AND INSTAGRAM BELOW!!")
print(" ")
list_of_inputs = [c for c in input()]
list_of_inputs.reverse()
print (list_of_inputs)
elif short_code == "ex":
print("-")
print(" ")
print("Thank You")
print("Chao Now")
print(" ")
print("-")
else:
print("-")
print(" ")
print("Retry!!")
print(" ")
print("Please choose from the option provided")
print(" ")
if __name__ == '__main__':
main() | run.py | from user import User
from credential import Info
def create_account(F_name,L_name,S_name,E_mail):
new_user = User(F_name,L_name,S_name,E_mail)
return new_user
def create_credential(instagram,E_mail):
new_cred = Info(instagram,E_mail)
return new_cred
def save_account(user):
user.save_user()
def save_credentials(credentials):
credential.save_info()
def display_users():
return User.display_users()
def display_creds():
return Info.display_info()
def main():
print(" ")
print("Hey there, Welcome to password generator!!")
print(" ")
print("To get started type in a short code as below!!")
print("")
print("")
print("-")
print("""Here to guide you!!
cc - Create new _account
ex - Exit password locker
dac - Display Accoounts
gs - Generate passwords""")
print(" ")
short_code = input() .lower()
if short_code =='cc':
print(" ")
print("-")
print("CREATE A NEW ACCOUNT!")
print(" ")
print(" ")
print("what is your first name?..")
print(" ")
F_name =input()
print("What is your middle name?..")
print(" ")
M_name= input()
print("What is your surname name?..")
print(" ")
S_name= input()
print("What is your email address?..")
print(" ")
E_mail= input()
print ("what is your instagram password?..")
print(" ")
instagram =input()
print("What is your email password?..")
print(" ")
E_mail= input()
save_account(create_account(F_name,M_name,S_name,E_mail))
print('\n')
save_credentials(create_credential(instagram,E_mail))
print('\n')
print("-")
print(f"New Account {F_name } { M_name} { instagram } has been created")
print('\n')
elif short_code =='dac':
if display_users():
print(" ")
print("The user name")
print(" ")
print('\n')
for user in display_users():
print(f"{user.F_name}{user.M_name}{user.S_name}")
for credential in display_creds():
print (f"{instagram}")
print(" ")
else:
print('\n')
print("-")
print(" ")
print("PLEASE CREATE AN ACCOUNT ")
print("You have not created an account yet :( ")
print(" ")
elif short_code == 'gs':
print(" ")
print(" ")
print("TO GENERATE A PASSWORD ADD IN YOUR FIRST NAME AND INSTAGRAM BELOW!!")
print(" ")
list_of_inputs = [c for c in input()]
list_of_inputs.reverse()
print (list_of_inputs)
elif short_code == "ex":
print("-")
print(" ")
print("Thank You")
print("Chao Now")
print(" ")
print("-")
else:
print("-")
print(" ")
print("Retry!!")
print(" ")
print("Please choose from the option provided")
print(" ")
if __name__ == '__main__':
main() | 0.135518 | 0.046834 |
import gtk
import gtk.glade
import subprocess
import shlex
from service import environment
from model.setting import Setting
from service.settings import Settings
from service.config import Config
from gui.entrydlg import EntryDlg
class TMUXCSSHGUI:
# Constant: Name of main window
__NAME_MAIN_WINDOW_GLADE_XML = 'main.glade'
__NAME_MAIN_WINDOW = 'windowMain'
__NAME_BEDITSETTING = 'bEditSetting'
__NAME_BRUNSETTING = 'bRunSetting'
__NAME_BREMOVESETTING = 'bRemoveSetting'
__NAME_NONE_SELECTED_LABEL = 'lChooseInfo'
__NAME_SETTINGS_FORM = 'vboxSettingsForm'
__NAME_VBOX_SC = 'vboxSC'
__NAME_VBOX_FILENAME = 'vboxFilename'
__NAME_VBOX_CONFIGSETTINGS = 'vboxConfigSettings'
# Constant: Name of treeview list holding the settings
__NAME_TREEVIEW_SETTINGS_LIST = 'treeviewSettings'
__windowStateMaximized=False
def __init__(self):
"""Constructor"""
# Initialize configs
self.__config=Config()
# Read glade file
self.__gladeXML=gtk.glade.XML(environment.NAME_GLADE_XML_PATH+self.__NAME_MAIN_WINDOW_GLADE_XML)
# Connect signals to self
self.__gladeXML.signal_autoconnect(self)
# Get treeview list for settings
self.__treeviewSettings=self.__gladeXML.get_widget(self.__NAME_TREEVIEW_SETTINGS_LIST)
# Create render for displaying the text
renderer=gtk.CellRendererText()
# Create column
column=gtk.TreeViewColumn('Settings name/key', renderer)
column.set_attributes(renderer, text=0)
# Add column to list
self.__treeviewSettings.append_column(column)
# Create list
self.__liststoreSettings=gtk.ListStore(str)
# Add list to treeview list
self.__treeviewSettings.set_model(self.__liststoreSettings)
# Show main window
self.__windowMain=self.__gladeXML.get_widget(self.__NAME_MAIN_WINDOW)
# Size
windowSizeX=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_X)
windowSizeY=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_Y)
if windowSizeX is not None and windowSizeY is not None:
self.__windowMain.resize(int(windowSizeX), (int(windowSizeY)))
# End if
# Position
windowPositionX=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_X)
windowPositionY=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_Y)
if windowPositionX is not None and windowPositionY is not None:
self.__windowMain.move(int(windowPositionX), int(windowPositionY))
# End if
# Check, if window should be started minimized
startMinimized=self.__config.getBool(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED)
if startMinimized is None or not startMinimized:
# Set default value
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED, False)
# Show window
self.__windowMain.show()
# End if
# Get several gui components
# Menu items
self.__miEdit=self.__gladeXML.get_widget('menuItemEdit')
self.__miRemove=self.__gladeXML.get_widget('menuItemRemove')
# Settings-list button area
self.__bEditSetting=self.__gladeXML.get_widget(self.__NAME_BEDITSETTING)
self.__bRemoveSetting=self.__gladeXML.get_widget(self.__NAME_BREMOVESETTING)
self.__bRunSetting=self.__gladeXML.get_widget(self.__NAME_BRUNSETTING)
# Form area
self.__noneSelectedLabel=self.__gladeXML.get_widget(self.__NAME_NONE_SELECTED_LABEL)
self.__settingsForm=self.__gladeXML.get_widget(self.__NAME_SETTINGS_FORM)
# Boxes for server connection string -sc
self.__vboxSC=self.__gladeXML.get_widget(self.__NAME_VBOX_SC)
self.__vboxFilename=self.__gladeXML.get_widget(self.__NAME_VBOX_FILENAME)
self.__vboxConfigSettings=self.__gladeXML.get_widget(self.__NAME_VBOX_CONFIGSETTINGS)
# Create and setup systray-icon/.menu
self.__systrayIcon=gtk.StatusIcon()
self.__systrayIcon.set_from_stock(gtk.STOCK_ABOUT)
self.__systrayIcon.connect('activate', self.on_systrayIcon_activate)
self.__systrayIcon.connect('popup-menu', self.on_systrayIcon_popup_menu)
self.__systrayIcon.set_title(environment.APP_NAME)
self.__systrayIcon.set_tooltip(environment.APP_NAME)
# Load settings
self.__loadSettings()
# End def
def __loadSettings(self):
# Initialize settings-service-object
self.__serviceSettings=Settings()
# Update settings list in gui
self.__updateGUISettingsList()
# End def
def __updateGUISettingsList(self):
# Reset internal selection
self.__currentSelectedSetting=None
# Clear list
self.__liststoreSettings.clear()
# Insert items
for settingEntry in self.__serviceSettings.yieldSettings():
self.__liststoreSettings.append([settingEntry.getKey()])
# End for
# End def
def __updateGUI(self):
# Update list
self.__updateGUISettingsList()
# Form area
self.__switchFormVisibility()
# End def
def on_destroy(self, object):
# Call quit
self.__quit()
# End def
def __quit(self):
# Actually quit
gtk.main_quit()
# End def
def __runSetting(self, position):
cmnd=self.__config.get(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_COMMAND)
params=self.__config.get(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_PARAMETER)
if cmnd is None and params is None:
cmnd=environment.TERMINAL_EMULATOR_CMND
self.__config.set(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_COMMAND, cmnd)
params=environment.TERMINAL_EMULATOR_PARAMETER
self.__config.set(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_PARAMETER, params)
dlg=gtk.MessageDialog(self.__windowMain, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO, gtk.BUTTONS_OK,
u'Terminal-Emulator-Command not set yet. Edit the config-file ('+environment.APP_CONFIG_FILE+'). Setting default.')
dlg.run()
dlg.destroy()
# End if
# Command and parameters available
if cmnd and params:
setting=self.__serviceSettings[position];
subprocess.Popen(cmnd+' '+params+' "'+environment.TMUX_CSSH_COMMAND+' '+setting.getValue()+'" ', shell=True)
# End if
# End def
def on_treeviewSettings_cursor_changed(self, treeviewObject):
self.__settingSelected=True
# Get selected rows from treeview object
selectedRows=treeviewObject.get_selection().get_selected_rows()[1]
# Clear current settings selection
self.__currentSelectedSetting=None
if len(selectedRows)>0:
# Get selected row number for settings-object
selectedRow=selectedRows[0][0]
# Fill form
if selectedRow>=0 and selectedRow<len(self.__serviceSettings):
self.__currentSelectedSetting=selectedRow
self.__showFormFromSettingsObject(self.__serviceSettings[selectedRow])
else:
self.__showFormFromSettingsObject(None)
# End if
# End if
self.__settingSelected=False
# End def
def on_treeviewSettings_row_activated(self, *params):
self.__runSetting(self.__currentSelectedSetting)
# End def
def __switchFormVisibility(self):
# Setting activated in list
if self.__currentSelectedSetting is None:
# Menu items
self.__miEdit.set_sensitive(False)
self.__miRemove.set_sensitive(False)
# Button area in Settings-List
self.__bEditSetting.set_sensitive(False)
self.__bRemoveSetting.set_sensitive(False)
self.__bRunSetting.set_sensitive(False)
# Form area
self.__noneSelectedLabel.show()
self.__settingsForm.hide()
# No setting activated
else:
# Menu items
self.__miEdit.set_sensitive(True)
self.__miRemove.set_sensitive(True)
# Button area in Settings-List
self.__bEditSetting.set_sensitive(True)
self.__bRemoveSetting.set_sensitive(True)
self.__bRunSetting.set_sensitive(True)
# Form area
self.__noneSelectedLabel.hide()
self.__settingsForm.show()
# End if
# End def
def __showFormFromSettingsObject(self, settingObject):
# Switch form visibility
self.__switchFormVisibility()
# Get array with parsed settings from setting object
settingsFromObject=self.__serviceSettings.getParsedParametersFromSettingObject(settingObject=settingObject)
# Add user to form
fUser=self.__gladeXML.get_widget('fUser')
fUser.set_text(settingsFromObject.user[0].strip() if settingsFromObject.user is not None else '')
# Add certificate/identity to form
fIdentity=self.__gladeXML.get_widget('fCertificateIdentity')
if fIdentity.get_filename() is not None:
fIdentity.unselect_filename(fIdentity.get_filename())
# End if
if settingsFromObject.certificateIdentity is not None:
fIdentity.set_filename(settingsFromObject.certificateIdentity[0])
# End if
# Add server connection strings to form
# Remove old
for vboxSCChildren in self.__vboxSC.get_children():
self.__vboxSC.remove(vboxSCChildren)
# End for
# Add new
if settingsFromObject.sshConnectString is not None:
for serverConnectionString in settingsFromObject.sshConnectString:
self.__addSCItem(serverConnectionString[0])
# End for
# End if
# Add additional ssh args to form
fAdditionalSSHArguments=self.__gladeXML.get_widget('fAdditionalSSHArguments')
fAdditionalSSHArguments.set_text(settingsFromObject.additionalSSHArgs[0].strip() if settingsFromObject.additionalSSHArgs is not None else '')
# Add TMUX Session name to form
fTMUXSessionName=self.__gladeXML.get_widget('fTMUXSessionName')
fTMUXSessionName.set_text(settingsFromObject.tmuxSessionName[0].strip() if settingsFromObject.tmuxSessionName is not None else '')
# Add Epoch time to form
cbSetEpochTime=self.__gladeXML.get_widget('cbSetEpochTime')
cbSetEpochTime.set_active(settingsFromObject.setEpochTime)
# Add new session to form
cbNewSession=self.__gladeXML.get_widget('cbNewSession')
cbNewSession.set_active(settingsFromObject.newSession)
# Add quiet mode to form
cbQuietMode=self.__gladeXML.get_widget('cbQuietMode')
cbQuietMode.set_active(settingsFromObject.quietMode)
# Add dont-synchronization mode to form
cbDontSynchronizeMode=self.__gladeXML.get_widget('cbDontSynchronizeMode')
cbDontSynchronizeMode.set_active(not settingsFromObject.synchronizePanes)
# Add filenames to form
# Remove old
for vboxFilenameChildren in self.__vboxFilename.get_children():
self.__vboxFilename.remove(vboxFilenameChildren)
# End for
# Add new
if settingsFromObject.filename is not None:
for filename in settingsFromObject.filename:
self.__addFilenameItem(filename[0])
# End for
# End if
# Add config settings to form
# Remove old
for vboxConfigSettingsChildren in self.__vboxConfigSettings.get_children():
self.__vboxConfigSettings.remove(vboxConfigSettingsChildren)
# End for
# Add new
if settingsFromObject.configSetting is not None:
for configSetting in settingsFromObject.configSetting:
self.__addConfigSettingItem(configSetting[0])
# End for
# End if
# End def
def __saveFormData(self):
if not self.__settingSelected and self.__currentSelectedSetting is not None:
# Get current setting
setting=self.__serviceSettings[self.__currentSelectedSetting]
# Gather settings from form
valueArray=[]
# Set epoch time
f=self.__gladeXML.get_widget('cbSetEpochTime')
if f.get_active(): valueArray.append('-set')
# New session
f=self.__gladeXML.get_widget('cbNewSession')
if f.get_active(): valueArray.append('-ns')
# Quiet mode
f=self.__gladeXML.get_widget('cbQuietMode')
if f.get_active(): valueArray.append('-q')
# Quiet mode
f=self.__gladeXML.get_widget('cbDontSynchronizeMode')
if f.get_active(): valueArray.append('-ds')
# TMUX Session name
f=self.__gladeXML.get_widget('fTMUXSessionName')
if f.get_text().strip(): valueArray.append('-ts "'+f.get_text().strip()+'"')
# SSH user name
f=self.__gladeXML.get_widget('fUser')
if f.get_text().strip(): valueArray.append('-u "'+f.get_text().strip()+'"')
# Certificate / Identity
f=self.__gladeXML.get_widget('fCertificateIdentity')
if f.get_filename() is not None: valueArray.append('-c "'+f.get_filename()+'"')
# Additional SSH arguments
f=self.__gladeXML.get_widget('fAdditionalSSHArguments')
if f.get_text().strip(): valueArray.append('-sa "'+f.get_text().strip()+'"')
# Server connection strings, -sc
self.__addValuesFromVBox(valueArray, self.__vboxSC, '-sc')
# Filenames with server connection strings, -f
self.__addValuesFromVBox(valueArray, self.__vboxFilename, '-f')
# Config settings -cs
self.__addValuesFromVBox(valueArray, self.__vboxConfigSettings, '-cs')
# Store to settings
setting.setValue(' '.join(valueArray))
self.__serviceSettings[self.__currentSelectedSetting]=setting
# Save settings
# already saved with [...]-__setitem__
#self.__serviceSettings.save()
# End if
# End def
def __addValuesFromVBox(self, valueArray, vbox, parameterShortcut):
# Walk through all hbox children
for childHBOX in vbox.get_children():
# Get HBOX children
childHBOXChildren=childHBOX.get_children()
if len(childHBOXChildren)>0:
# Get second child of hbox, for it's the actually value
f=childHBOXChildren[1]
# Entry field
if type(f)==gtk.Entry and f.get_text().strip(): valueArray.append(parameterShortcut+' "'+f.get_text().strip()+'"')
# FileChooseButton
if type(f)==gtk.FileChooserButton and f.get_filename() is not None: valueArray.append(parameterShortcut+' "'+f.get_filename()+'"')
# Combobox field
if type(f)==gtk.ComboBoxEntry and f.get_active_text().strip(): valueArray.append(parameterShortcut+' "'+f.get_active_text().strip()+'"')
# End if
# End for
# End def
def on_bAddSetting_clicked(self, view):
settingName=EntryDlg(labeltext="Insert the name of the new setting:").Run()
# Not empty, add config setting
if settingName and settingName is not None:
# Create and add setting
setting=Setting()
setting.setKey(settingName)
setting.setValue('')
self.__serviceSettings.add(setting)
# Refresh list
self.__updateGUI()
# Set new item as selected
self.__treeviewSettings.set_cursor(len(self.__liststoreSettings)-1)
# End if
# End def
def on_bEditSetting_clicked(self, view):
# Get setting
setting=self.__serviceSettings[self.__currentSelectedSetting];
dlg=EntryDlg(labeltext="Insert the new name of the setting:", defaultvalue=setting.getKey(), parentWindow=self.__windowMain)
settingName=dlg.Run()
del dlg
# Not empty, add config setting
if settingName and settingName is not None:
# Update setting
setting.setKey(settingName)
self.__serviceSettings[self.__currentSelectedSetting]=setting
# Refresh list
self.__updateGUI()
# End if
# End def
def on_bRemoveSetting_clicked(self, view):
# Get setting
setting=self.__serviceSettings[self.__currentSelectedSetting];
# Create and display confirm dialog
dlg=gtk.MessageDialog(self.__windowMain, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK_CANCEL, u'Do you want to remove the config setting \''+setting.getKey()+'\' ?')
response=dlg.run()
dlg.destroy()
# Ok, delete setting
if response==gtk.RESPONSE_OK:
# Delete
del self.__serviceSettings[self.__currentSelectedSetting]
# Refresh list
self.__updateGUI()
# End if
# End def
def on_bRunSetting_clicked(self, view):
self.__runSetting(self.__currentSelectedSetting)
# End def
def __addSCItem(self, value=""):
# Create new Hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-sc')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
fEntry=gtk.Entry()
fEntry.set_text(value)
fEntry.connect('focus-out-event', self.on_formitem_changed)
fEntry.show()
hbox.pack_start(fEntry)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxSC, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxSC.add(hbox)
# Focus entry field
if not value: fEntry.grab_focus()
# End def
def on_bAddSC_clicked(self, view):
self.__addSCItem()
# End def
def on_remove_item(self, view, *data):
# Remove hbox from vbox
if len(data)>0:
vbox=data[0]
hbox=data[1]
vbox.remove(hbox)
self.__saveFormData()
# End if
# End def
def on_formitem_changed(self, *params):
if not self.__settingSelected:
self.__saveFormData()
# End if
# End def
def __addFilenameItem(self, value=''):
# Create new hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-f')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
fFilename=gtk.FileChooserButton('File for -sc items')
fFilename.set_filename(value)
fFilename.connect('file-set', self.on_formitem_changed)
fFilename.show()
hbox.pack_start(fFilename)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxFilename, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxFilename.add(hbox)
# Focus file field
if not value: fFilename.grab_focus()
# End def
def on_bAddFilename_clicked(self, view):
self.__addFilenameItem()
# End def
def __addConfigSettingItem(self, value=''):
# Create new hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-cs')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
cbConfigSettings=gtk.combo_box_entry_new_text()
cbConfigSettings.append_text('')
cbConfigSettings.set_active(0)
# Fill with available settings
counter=1
for setting in self.__serviceSettings.yieldSettings():
cbConfigSettings.append_text(setting.getKey())
# Activate entry
if setting.getKey()==value:
cbConfigSettings.set_active(counter)
# End f
counter=counter+1
# End for
cbConfigSettings.connect('changed', self.on_formitem_changed)
cbConfigSettings.connect('focus-out-event', self.on_formitem_changed)
cbConfigSettings.show()
hbox.pack_start(cbConfigSettings)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxConfigSettings, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxConfigSettings.add(hbox)
# Focus entry field
if not value: cbConfigSettings.grab_focus()
# End def
def on_bAddConfigSetting_clicked(self, view):
self.__addConfigSettingItem()
# End def
def on_windowMain_window_state_event(self, view, event):
self.__windowStateMaximized=(view.get_window().get_state() & gtk.gdk.WINDOW_STATE_MAXIMIZED)==gtk.gdk.WINDOW_STATE_MAXIMIZED
# End def
def on_windowMain_check_resize_event(self, *params):
# Save size
size=self.__windowMain.get_size()
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_X, size[0])
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_Y, size[1])
# Save position
position=self.__windowMain.get_position()
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_X, position[0])
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_Y, position[1])
# End def
def on_menuItemAdd_activate(self, view):
self.on_bAddSetting_clicked(None)
# End def
def on_menuItemEdit_activate(self, view):
self.on_bEditSetting_clicked(None)
# End def
def on_menuItemRemove_activate(self, view):
self.on_bRemoveSetting_clicked(None)
# End def
def on_menuItemQuit_activate(self, view):
self.__quit()
# End def
def on_menuItemAbout_activate(self, view):
dlg=gtk.AboutDialog()
dlg.set_name(environment.APP_NAME)
dlg.set_version(environment.APP_VERSION)
dlg.set_website(environment.APP_WEBSITE)
dlg.set_authors(environment.AUTHOR_NAME)
dlg.set_license(environment.APP_LICENSE_TEXT)
dlg.run()
dlg.destroy()
del dlg
# End def
def on_systrayIcon_activate(self, icon):
menu=gtk.Menu()
for i in range(0, len(self.__serviceSettings)):
# Get setting object
settingEntry=self.__serviceSettings[i]
# Create and add menu item
item=gtk.MenuItem(settingEntry.getKey())
item.set_use_underline(False)
item.connect('activate', self.on_systrayIcon_setting_activate, i)
item.show()
menu.append(item)
# End for
menu.popup(None, None, None, 0, 0, self.__systrayIcon)
# End def
def on_systrayIcon_popup_menu(self, icon, eventButton, eventTime):
menu=self.__gladeXML.get_widget('popmenuSystray')
menu.popup(None, None, None, eventButton, eventTime, self.__systrayIcon)
# End def
def on_systrayIcon_setting_activate(self, view, *params):
self.__runSetting(params[0])
# End def
def on_menuitemShowHideMainwindow_activate(self, view):
if self.__windowMain.props.visible:
self.__windowMain.hide()
else:
self.__windowMain.show()
# End if
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED, not self.__windowMain.props.visible)
# End def
def on_bClearCertificateIdentity_clicked(self, view):
fIdentity=self.__gladeXML.get_widget('fCertificateIdentity')
if fIdentity.get_filename() is not None:
fIdentity.unselect_filename(fIdentity.get_filename())
self.__saveFormData()
# End if
# End def
# End class | gui/main.py |
import gtk
import gtk.glade
import subprocess
import shlex
from service import environment
from model.setting import Setting
from service.settings import Settings
from service.config import Config
from gui.entrydlg import EntryDlg
class TMUXCSSHGUI:
# Constant: Name of main window
__NAME_MAIN_WINDOW_GLADE_XML = 'main.glade'
__NAME_MAIN_WINDOW = 'windowMain'
__NAME_BEDITSETTING = 'bEditSetting'
__NAME_BRUNSETTING = 'bRunSetting'
__NAME_BREMOVESETTING = 'bRemoveSetting'
__NAME_NONE_SELECTED_LABEL = 'lChooseInfo'
__NAME_SETTINGS_FORM = 'vboxSettingsForm'
__NAME_VBOX_SC = 'vboxSC'
__NAME_VBOX_FILENAME = 'vboxFilename'
__NAME_VBOX_CONFIGSETTINGS = 'vboxConfigSettings'
# Constant: Name of treeview list holding the settings
__NAME_TREEVIEW_SETTINGS_LIST = 'treeviewSettings'
__windowStateMaximized=False
def __init__(self):
"""Constructor"""
# Initialize configs
self.__config=Config()
# Read glade file
self.__gladeXML=gtk.glade.XML(environment.NAME_GLADE_XML_PATH+self.__NAME_MAIN_WINDOW_GLADE_XML)
# Connect signals to self
self.__gladeXML.signal_autoconnect(self)
# Get treeview list for settings
self.__treeviewSettings=self.__gladeXML.get_widget(self.__NAME_TREEVIEW_SETTINGS_LIST)
# Create render for displaying the text
renderer=gtk.CellRendererText()
# Create column
column=gtk.TreeViewColumn('Settings name/key', renderer)
column.set_attributes(renderer, text=0)
# Add column to list
self.__treeviewSettings.append_column(column)
# Create list
self.__liststoreSettings=gtk.ListStore(str)
# Add list to treeview list
self.__treeviewSettings.set_model(self.__liststoreSettings)
# Show main window
self.__windowMain=self.__gladeXML.get_widget(self.__NAME_MAIN_WINDOW)
# Size
windowSizeX=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_X)
windowSizeY=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_Y)
if windowSizeX is not None and windowSizeY is not None:
self.__windowMain.resize(int(windowSizeX), (int(windowSizeY)))
# End if
# Position
windowPositionX=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_X)
windowPositionY=self.__config.get(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_Y)
if windowPositionX is not None and windowPositionY is not None:
self.__windowMain.move(int(windowPositionX), int(windowPositionY))
# End if
# Check, if window should be started minimized
startMinimized=self.__config.getBool(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED)
if startMinimized is None or not startMinimized:
# Set default value
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED, False)
# Show window
self.__windowMain.show()
# End if
# Get several gui components
# Menu items
self.__miEdit=self.__gladeXML.get_widget('menuItemEdit')
self.__miRemove=self.__gladeXML.get_widget('menuItemRemove')
# Settings-list button area
self.__bEditSetting=self.__gladeXML.get_widget(self.__NAME_BEDITSETTING)
self.__bRemoveSetting=self.__gladeXML.get_widget(self.__NAME_BREMOVESETTING)
self.__bRunSetting=self.__gladeXML.get_widget(self.__NAME_BRUNSETTING)
# Form area
self.__noneSelectedLabel=self.__gladeXML.get_widget(self.__NAME_NONE_SELECTED_LABEL)
self.__settingsForm=self.__gladeXML.get_widget(self.__NAME_SETTINGS_FORM)
# Boxes for server connection string -sc
self.__vboxSC=self.__gladeXML.get_widget(self.__NAME_VBOX_SC)
self.__vboxFilename=self.__gladeXML.get_widget(self.__NAME_VBOX_FILENAME)
self.__vboxConfigSettings=self.__gladeXML.get_widget(self.__NAME_VBOX_CONFIGSETTINGS)
# Create and setup systray-icon/.menu
self.__systrayIcon=gtk.StatusIcon()
self.__systrayIcon.set_from_stock(gtk.STOCK_ABOUT)
self.__systrayIcon.connect('activate', self.on_systrayIcon_activate)
self.__systrayIcon.connect('popup-menu', self.on_systrayIcon_popup_menu)
self.__systrayIcon.set_title(environment.APP_NAME)
self.__systrayIcon.set_tooltip(environment.APP_NAME)
# Load settings
self.__loadSettings()
# End def
def __loadSettings(self):
# Initialize settings-service-object
self.__serviceSettings=Settings()
# Update settings list in gui
self.__updateGUISettingsList()
# End def
def __updateGUISettingsList(self):
# Reset internal selection
self.__currentSelectedSetting=None
# Clear list
self.__liststoreSettings.clear()
# Insert items
for settingEntry in self.__serviceSettings.yieldSettings():
self.__liststoreSettings.append([settingEntry.getKey()])
# End for
# End def
def __updateGUI(self):
# Update list
self.__updateGUISettingsList()
# Form area
self.__switchFormVisibility()
# End def
def on_destroy(self, object):
# Call quit
self.__quit()
# End def
def __quit(self):
# Actually quit
gtk.main_quit()
# End def
def __runSetting(self, position):
cmnd=self.__config.get(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_COMMAND)
params=self.__config.get(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_PARAMETER)
if cmnd is None and params is None:
cmnd=environment.TERMINAL_EMULATOR_CMND
self.__config.set(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_COMMAND, cmnd)
params=environment.TERMINAL_EMULATOR_PARAMETER
self.__config.set(Config.SECTION_TERMINAL, Config.OPTION_TERMINAL_PARAMETER, params)
dlg=gtk.MessageDialog(self.__windowMain, gtk.DIALOG_MODAL, gtk.MESSAGE_INFO, gtk.BUTTONS_OK,
u'Terminal-Emulator-Command not set yet. Edit the config-file ('+environment.APP_CONFIG_FILE+'). Setting default.')
dlg.run()
dlg.destroy()
# End if
# Command and parameters available
if cmnd and params:
setting=self.__serviceSettings[position];
subprocess.Popen(cmnd+' '+params+' "'+environment.TMUX_CSSH_COMMAND+' '+setting.getValue()+'" ', shell=True)
# End if
# End def
def on_treeviewSettings_cursor_changed(self, treeviewObject):
self.__settingSelected=True
# Get selected rows from treeview object
selectedRows=treeviewObject.get_selection().get_selected_rows()[1]
# Clear current settings selection
self.__currentSelectedSetting=None
if len(selectedRows)>0:
# Get selected row number for settings-object
selectedRow=selectedRows[0][0]
# Fill form
if selectedRow>=0 and selectedRow<len(self.__serviceSettings):
self.__currentSelectedSetting=selectedRow
self.__showFormFromSettingsObject(self.__serviceSettings[selectedRow])
else:
self.__showFormFromSettingsObject(None)
# End if
# End if
self.__settingSelected=False
# End def
def on_treeviewSettings_row_activated(self, *params):
self.__runSetting(self.__currentSelectedSetting)
# End def
def __switchFormVisibility(self):
# Setting activated in list
if self.__currentSelectedSetting is None:
# Menu items
self.__miEdit.set_sensitive(False)
self.__miRemove.set_sensitive(False)
# Button area in Settings-List
self.__bEditSetting.set_sensitive(False)
self.__bRemoveSetting.set_sensitive(False)
self.__bRunSetting.set_sensitive(False)
# Form area
self.__noneSelectedLabel.show()
self.__settingsForm.hide()
# No setting activated
else:
# Menu items
self.__miEdit.set_sensitive(True)
self.__miRemove.set_sensitive(True)
# Button area in Settings-List
self.__bEditSetting.set_sensitive(True)
self.__bRemoveSetting.set_sensitive(True)
self.__bRunSetting.set_sensitive(True)
# Form area
self.__noneSelectedLabel.hide()
self.__settingsForm.show()
# End if
# End def
def __showFormFromSettingsObject(self, settingObject):
# Switch form visibility
self.__switchFormVisibility()
# Get array with parsed settings from setting object
settingsFromObject=self.__serviceSettings.getParsedParametersFromSettingObject(settingObject=settingObject)
# Add user to form
fUser=self.__gladeXML.get_widget('fUser')
fUser.set_text(settingsFromObject.user[0].strip() if settingsFromObject.user is not None else '')
# Add certificate/identity to form
fIdentity=self.__gladeXML.get_widget('fCertificateIdentity')
if fIdentity.get_filename() is not None:
fIdentity.unselect_filename(fIdentity.get_filename())
# End if
if settingsFromObject.certificateIdentity is not None:
fIdentity.set_filename(settingsFromObject.certificateIdentity[0])
# End if
# Add server connection strings to form
# Remove old
for vboxSCChildren in self.__vboxSC.get_children():
self.__vboxSC.remove(vboxSCChildren)
# End for
# Add new
if settingsFromObject.sshConnectString is not None:
for serverConnectionString in settingsFromObject.sshConnectString:
self.__addSCItem(serverConnectionString[0])
# End for
# End if
# Add additional ssh args to form
fAdditionalSSHArguments=self.__gladeXML.get_widget('fAdditionalSSHArguments')
fAdditionalSSHArguments.set_text(settingsFromObject.additionalSSHArgs[0].strip() if settingsFromObject.additionalSSHArgs is not None else '')
# Add TMUX Session name to form
fTMUXSessionName=self.__gladeXML.get_widget('fTMUXSessionName')
fTMUXSessionName.set_text(settingsFromObject.tmuxSessionName[0].strip() if settingsFromObject.tmuxSessionName is not None else '')
# Add Epoch time to form
cbSetEpochTime=self.__gladeXML.get_widget('cbSetEpochTime')
cbSetEpochTime.set_active(settingsFromObject.setEpochTime)
# Add new session to form
cbNewSession=self.__gladeXML.get_widget('cbNewSession')
cbNewSession.set_active(settingsFromObject.newSession)
# Add quiet mode to form
cbQuietMode=self.__gladeXML.get_widget('cbQuietMode')
cbQuietMode.set_active(settingsFromObject.quietMode)
# Add dont-synchronization mode to form
cbDontSynchronizeMode=self.__gladeXML.get_widget('cbDontSynchronizeMode')
cbDontSynchronizeMode.set_active(not settingsFromObject.synchronizePanes)
# Add filenames to form
# Remove old
for vboxFilenameChildren in self.__vboxFilename.get_children():
self.__vboxFilename.remove(vboxFilenameChildren)
# End for
# Add new
if settingsFromObject.filename is not None:
for filename in settingsFromObject.filename:
self.__addFilenameItem(filename[0])
# End for
# End if
# Add config settings to form
# Remove old
for vboxConfigSettingsChildren in self.__vboxConfigSettings.get_children():
self.__vboxConfigSettings.remove(vboxConfigSettingsChildren)
# End for
# Add new
if settingsFromObject.configSetting is not None:
for configSetting in settingsFromObject.configSetting:
self.__addConfigSettingItem(configSetting[0])
# End for
# End if
# End def
def __saveFormData(self):
if not self.__settingSelected and self.__currentSelectedSetting is not None:
# Get current setting
setting=self.__serviceSettings[self.__currentSelectedSetting]
# Gather settings from form
valueArray=[]
# Set epoch time
f=self.__gladeXML.get_widget('cbSetEpochTime')
if f.get_active(): valueArray.append('-set')
# New session
f=self.__gladeXML.get_widget('cbNewSession')
if f.get_active(): valueArray.append('-ns')
# Quiet mode
f=self.__gladeXML.get_widget('cbQuietMode')
if f.get_active(): valueArray.append('-q')
# Quiet mode
f=self.__gladeXML.get_widget('cbDontSynchronizeMode')
if f.get_active(): valueArray.append('-ds')
# TMUX Session name
f=self.__gladeXML.get_widget('fTMUXSessionName')
if f.get_text().strip(): valueArray.append('-ts "'+f.get_text().strip()+'"')
# SSH user name
f=self.__gladeXML.get_widget('fUser')
if f.get_text().strip(): valueArray.append('-u "'+f.get_text().strip()+'"')
# Certificate / Identity
f=self.__gladeXML.get_widget('fCertificateIdentity')
if f.get_filename() is not None: valueArray.append('-c "'+f.get_filename()+'"')
# Additional SSH arguments
f=self.__gladeXML.get_widget('fAdditionalSSHArguments')
if f.get_text().strip(): valueArray.append('-sa "'+f.get_text().strip()+'"')
# Server connection strings, -sc
self.__addValuesFromVBox(valueArray, self.__vboxSC, '-sc')
# Filenames with server connection strings, -f
self.__addValuesFromVBox(valueArray, self.__vboxFilename, '-f')
# Config settings -cs
self.__addValuesFromVBox(valueArray, self.__vboxConfigSettings, '-cs')
# Store to settings
setting.setValue(' '.join(valueArray))
self.__serviceSettings[self.__currentSelectedSetting]=setting
# Save settings
# already saved with [...]-__setitem__
#self.__serviceSettings.save()
# End if
# End def
def __addValuesFromVBox(self, valueArray, vbox, parameterShortcut):
# Walk through all hbox children
for childHBOX in vbox.get_children():
# Get HBOX children
childHBOXChildren=childHBOX.get_children()
if len(childHBOXChildren)>0:
# Get second child of hbox, for it's the actually value
f=childHBOXChildren[1]
# Entry field
if type(f)==gtk.Entry and f.get_text().strip(): valueArray.append(parameterShortcut+' "'+f.get_text().strip()+'"')
# FileChooseButton
if type(f)==gtk.FileChooserButton and f.get_filename() is not None: valueArray.append(parameterShortcut+' "'+f.get_filename()+'"')
# Combobox field
if type(f)==gtk.ComboBoxEntry and f.get_active_text().strip(): valueArray.append(parameterShortcut+' "'+f.get_active_text().strip()+'"')
# End if
# End for
# End def
def on_bAddSetting_clicked(self, view):
settingName=EntryDlg(labeltext="Insert the name of the new setting:").Run()
# Not empty, add config setting
if settingName and settingName is not None:
# Create and add setting
setting=Setting()
setting.setKey(settingName)
setting.setValue('')
self.__serviceSettings.add(setting)
# Refresh list
self.__updateGUI()
# Set new item as selected
self.__treeviewSettings.set_cursor(len(self.__liststoreSettings)-1)
# End if
# End def
def on_bEditSetting_clicked(self, view):
# Get setting
setting=self.__serviceSettings[self.__currentSelectedSetting];
dlg=EntryDlg(labeltext="Insert the new name of the setting:", defaultvalue=setting.getKey(), parentWindow=self.__windowMain)
settingName=dlg.Run()
del dlg
# Not empty, add config setting
if settingName and settingName is not None:
# Update setting
setting.setKey(settingName)
self.__serviceSettings[self.__currentSelectedSetting]=setting
# Refresh list
self.__updateGUI()
# End if
# End def
def on_bRemoveSetting_clicked(self, view):
# Get setting
setting=self.__serviceSettings[self.__currentSelectedSetting];
# Create and display confirm dialog
dlg=gtk.MessageDialog(self.__windowMain, gtk.DIALOG_MODAL, gtk.MESSAGE_QUESTION, gtk.BUTTONS_OK_CANCEL, u'Do you want to remove the config setting \''+setting.getKey()+'\' ?')
response=dlg.run()
dlg.destroy()
# Ok, delete setting
if response==gtk.RESPONSE_OK:
# Delete
del self.__serviceSettings[self.__currentSelectedSetting]
# Refresh list
self.__updateGUI()
# End if
# End def
def on_bRunSetting_clicked(self, view):
self.__runSetting(self.__currentSelectedSetting)
# End def
def __addSCItem(self, value=""):
# Create new Hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-sc')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
fEntry=gtk.Entry()
fEntry.set_text(value)
fEntry.connect('focus-out-event', self.on_formitem_changed)
fEntry.show()
hbox.pack_start(fEntry)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxSC, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxSC.add(hbox)
# Focus entry field
if not value: fEntry.grab_focus()
# End def
def on_bAddSC_clicked(self, view):
self.__addSCItem()
# End def
def on_remove_item(self, view, *data):
# Remove hbox from vbox
if len(data)>0:
vbox=data[0]
hbox=data[1]
vbox.remove(hbox)
self.__saveFormData()
# End if
# End def
def on_formitem_changed(self, *params):
if not self.__settingSelected:
self.__saveFormData()
# End if
# End def
def __addFilenameItem(self, value=''):
# Create new hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-f')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
fFilename=gtk.FileChooserButton('File for -sc items')
fFilename.set_filename(value)
fFilename.connect('file-set', self.on_formitem_changed)
fFilename.show()
hbox.pack_start(fFilename)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxFilename, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxFilename.add(hbox)
# Focus file field
if not value: fFilename.grab_focus()
# End def
def on_bAddFilename_clicked(self, view):
self.__addFilenameItem()
# End def
def __addConfigSettingItem(self, value=''):
# Create new hbox
hbox=gtk.HBox(False, 5)
# Add label to hbox
lLabel=gtk.Label('-cs')
lLabel.set_property('width-request', 50)
lLabel.set_property('xalign', 0)
lLabel.show()
hbox.pack_start(lLabel, False)
# Add entry field to hbox
cbConfigSettings=gtk.combo_box_entry_new_text()
cbConfigSettings.append_text('')
cbConfigSettings.set_active(0)
# Fill with available settings
counter=1
for setting in self.__serviceSettings.yieldSettings():
cbConfigSettings.append_text(setting.getKey())
# Activate entry
if setting.getKey()==value:
cbConfigSettings.set_active(counter)
# End f
counter=counter+1
# End for
cbConfigSettings.connect('changed', self.on_formitem_changed)
cbConfigSettings.connect('focus-out-event', self.on_formitem_changed)
cbConfigSettings.show()
hbox.pack_start(cbConfigSettings)
# Add remove button to hbox
bRemove=gtk.Button('X')
bRemove.connect('clicked', self.on_remove_item, self.__vboxConfigSettings, hbox)
bRemove.show();
hbox.pack_start(bRemove, False)
# Show hbox
hbox.show()
self.__vboxConfigSettings.add(hbox)
# Focus entry field
if not value: cbConfigSettings.grab_focus()
# End def
def on_bAddConfigSetting_clicked(self, view):
self.__addConfigSettingItem()
# End def
def on_windowMain_window_state_event(self, view, event):
self.__windowStateMaximized=(view.get_window().get_state() & gtk.gdk.WINDOW_STATE_MAXIMIZED)==gtk.gdk.WINDOW_STATE_MAXIMIZED
# End def
def on_windowMain_check_resize_event(self, *params):
# Save size
size=self.__windowMain.get_size()
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_X, size[0])
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_SIZE_Y, size[1])
# Save position
position=self.__windowMain.get_position()
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_X, position[0])
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_POSITION_Y, position[1])
# End def
def on_menuItemAdd_activate(self, view):
self.on_bAddSetting_clicked(None)
# End def
def on_menuItemEdit_activate(self, view):
self.on_bEditSetting_clicked(None)
# End def
def on_menuItemRemove_activate(self, view):
self.on_bRemoveSetting_clicked(None)
# End def
def on_menuItemQuit_activate(self, view):
self.__quit()
# End def
def on_menuItemAbout_activate(self, view):
dlg=gtk.AboutDialog()
dlg.set_name(environment.APP_NAME)
dlg.set_version(environment.APP_VERSION)
dlg.set_website(environment.APP_WEBSITE)
dlg.set_authors(environment.AUTHOR_NAME)
dlg.set_license(environment.APP_LICENSE_TEXT)
dlg.run()
dlg.destroy()
del dlg
# End def
def on_systrayIcon_activate(self, icon):
menu=gtk.Menu()
for i in range(0, len(self.__serviceSettings)):
# Get setting object
settingEntry=self.__serviceSettings[i]
# Create and add menu item
item=gtk.MenuItem(settingEntry.getKey())
item.set_use_underline(False)
item.connect('activate', self.on_systrayIcon_setting_activate, i)
item.show()
menu.append(item)
# End for
menu.popup(None, None, None, 0, 0, self.__systrayIcon)
# End def
def on_systrayIcon_popup_menu(self, icon, eventButton, eventTime):
menu=self.__gladeXML.get_widget('popmenuSystray')
menu.popup(None, None, None, eventButton, eventTime, self.__systrayIcon)
# End def
def on_systrayIcon_setting_activate(self, view, *params):
self.__runSetting(params[0])
# End def
def on_menuitemShowHideMainwindow_activate(self, view):
if self.__windowMain.props.visible:
self.__windowMain.hide()
else:
self.__windowMain.show()
# End if
self.__config.set(Config.SECTION_WINDOW, Config.OPTION_WINDOW_START_MINIMIZED, not self.__windowMain.props.visible)
# End def
def on_bClearCertificateIdentity_clicked(self, view):
fIdentity=self.__gladeXML.get_widget('fCertificateIdentity')
if fIdentity.get_filename() is not None:
fIdentity.unselect_filename(fIdentity.get_filename())
self.__saveFormData()
# End if
# End def
# End class | 0.408631 | 0.043063 |
import os
import h5py
import shutil
import numpy as np
from copy import deepcopy
from contextlib import contextmanager
from gonzales.simulator.space import Space
import gonzales.simulator.simulation as sim
# ---------------------------------------------------------
# Utility functions
# ---------------------------------------------------------
@contextmanager
def make_temp_result_dir(dir_name):
dir_path = os.path.dirname(os.path.join(os.path.abspath(__file__)))
temp_dir = os.path.join(dir_path, dir_name)
os.mkdir(temp_dir)
try:
yield temp_dir
finally:
shutil.rmtree(temp_dir)
def _are_results_equal(sim1, sim2, n_steps):
with h5py.File(sim1.output_filepath, 'r') as pp, h5py.File(sim2.output_filepath, 'r') as bh:
pos_data_pp = pp['results/position']
pos_data_bh = bh['results/position']
vel_data_pp = pp['results/velocity']
vel_data_bh = bh['results/velocity']
for k in range(n_steps + 1):
np.testing.assert_equal(pos_data_bh[k], pos_data_pp[k])
np.testing.assert_equal(vel_data_bh[k], vel_data_pp[k])
def _are_results_close(sim1, sim2, n_steps):
with h5py.File(sim1.output_filepath, 'r') as pp, h5py.File(sim2.output_filepath, 'r') as bh:
pos_data_pp = pp['results/position']
pos_data_bh = bh['results/position']
vel_data_pp = pp['results/velocity']
vel_data_bh = bh['results/velocity']
for k in range(n_steps + 1):
assert _are_vecs_close(pos_data_bh[k], pos_data_pp[k])
assert _are_vecs_close(vel_data_bh[k], vel_data_pp[k])
def _are_vecs_close(vec, vec_ref, rtol=1e-2, atol=1e-15):
"""
:param vec: estimated vec
:param vec_ref: vec to which we compare (correct one)
:param rtol: relative tolerance
:param atol: absolute tolerance
:return: bool
"""
x = np.mean(np.linalg.norm(vec - vec_ref, axis=1))
y = np.mean(np.linalg.norm(vec_ref, axis=1) * rtol + atol)
return x <= y
# ---------------------------------------------------------
# Tests
# ---------------------------------------------------------
def vel_func(pos_vec):
return np.array((0, 0, 0))
def mass_func(pos_vec):
return 1.0
def test_sim_barnes_hut_theta_zero():
# test Barnes-Hut simulation relative to Cython brute force simulation, for theta = 0
with make_temp_result_dir('temp_results') as res_dir:
pp_file = os.path.join(res_dir, 'test_pp.hdf5')
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
G = 1.0
eps = 1.0e-3
theta = 0.
n_steps = 100
step_size = 0.01
n = 1000
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
space1 = deepcopy(space)
sim_pp = sim.PPSimulation(space, pp_file, G, eps)
sim_bh = sim.BHSimulation(space1, bh_file, G, eps, cube_length, np.array((0., 0., 0.)), theta)
sim_pp.run(n_steps, step_size)
sim_bh.run(n_steps, step_size)
# result comparsion
_are_results_equal(sim_pp, sim_bh, n_steps)
def test_sim_barnes_hut_theta_non_zero():
# test Barnes-Hut simulation relative to Cython brute force simulation, for theta = 0.5
with make_temp_result_dir('temp_results') as res_dir:
pp_file = os.path.join(res_dir, 'test_pp.hdf5')
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
G = 1.0
eps = 1.0e-3
theta = 0.5
n_steps = 100
step_size = 0.01
n = 1000
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
space1 = deepcopy(space)
sim_pp = sim.PPSimulation(space, pp_file, G, eps)
sim_bh = sim.BHSimulation(space1, bh_file, G, eps, 10 * cube_length, np.array((0., 0., 0.)), theta)
sim_pp.run(n_steps, step_size)
sim_bh.run(n_steps, step_size)
# result comparsion
_are_results_close(sim_pp, sim_bh, n_steps)
def test_energy_conservation_barnes_hut():
# test Barnes-Hut energy conservation
with make_temp_result_dir('temp_results') as res_dir:
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
n = 1000
G = 1.0
eps = 1.0e-3
theta = 0.75
n_steps = 1000
step_size = 0.001
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
sim_bh = sim.BHSimulation(space, bh_file, G, eps, 10 * cube_length, np.array((0., 0., 0.)), theta)
sim_bh.add_result('energy', n_steps)
sim_bh.run(n_steps, step_size)
with h5py.File(sim_bh.output_filepath, 'r') as bh:
e_in = bh['results']['energy'][0]
e_fin = bh['results']['energy'][1]
assert np.abs(e_in - e_fin) / np.abs(e_in) <= 0.01 | test/test_simulation.py | import os
import h5py
import shutil
import numpy as np
from copy import deepcopy
from contextlib import contextmanager
from gonzales.simulator.space import Space
import gonzales.simulator.simulation as sim
# ---------------------------------------------------------
# Utility functions
# ---------------------------------------------------------
@contextmanager
def make_temp_result_dir(dir_name):
dir_path = os.path.dirname(os.path.join(os.path.abspath(__file__)))
temp_dir = os.path.join(dir_path, dir_name)
os.mkdir(temp_dir)
try:
yield temp_dir
finally:
shutil.rmtree(temp_dir)
def _are_results_equal(sim1, sim2, n_steps):
with h5py.File(sim1.output_filepath, 'r') as pp, h5py.File(sim2.output_filepath, 'r') as bh:
pos_data_pp = pp['results/position']
pos_data_bh = bh['results/position']
vel_data_pp = pp['results/velocity']
vel_data_bh = bh['results/velocity']
for k in range(n_steps + 1):
np.testing.assert_equal(pos_data_bh[k], pos_data_pp[k])
np.testing.assert_equal(vel_data_bh[k], vel_data_pp[k])
def _are_results_close(sim1, sim2, n_steps):
with h5py.File(sim1.output_filepath, 'r') as pp, h5py.File(sim2.output_filepath, 'r') as bh:
pos_data_pp = pp['results/position']
pos_data_bh = bh['results/position']
vel_data_pp = pp['results/velocity']
vel_data_bh = bh['results/velocity']
for k in range(n_steps + 1):
assert _are_vecs_close(pos_data_bh[k], pos_data_pp[k])
assert _are_vecs_close(vel_data_bh[k], vel_data_pp[k])
def _are_vecs_close(vec, vec_ref, rtol=1e-2, atol=1e-15):
"""
:param vec: estimated vec
:param vec_ref: vec to which we compare (correct one)
:param rtol: relative tolerance
:param atol: absolute tolerance
:return: bool
"""
x = np.mean(np.linalg.norm(vec - vec_ref, axis=1))
y = np.mean(np.linalg.norm(vec_ref, axis=1) * rtol + atol)
return x <= y
# ---------------------------------------------------------
# Tests
# ---------------------------------------------------------
def vel_func(pos_vec):
return np.array((0, 0, 0))
def mass_func(pos_vec):
return 1.0
def test_sim_barnes_hut_theta_zero():
# test Barnes-Hut simulation relative to Cython brute force simulation, for theta = 0
with make_temp_result_dir('temp_results') as res_dir:
pp_file = os.path.join(res_dir, 'test_pp.hdf5')
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
G = 1.0
eps = 1.0e-3
theta = 0.
n_steps = 100
step_size = 0.01
n = 1000
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
space1 = deepcopy(space)
sim_pp = sim.PPSimulation(space, pp_file, G, eps)
sim_bh = sim.BHSimulation(space1, bh_file, G, eps, cube_length, np.array((0., 0., 0.)), theta)
sim_pp.run(n_steps, step_size)
sim_bh.run(n_steps, step_size)
# result comparsion
_are_results_equal(sim_pp, sim_bh, n_steps)
def test_sim_barnes_hut_theta_non_zero():
# test Barnes-Hut simulation relative to Cython brute force simulation, for theta = 0.5
with make_temp_result_dir('temp_results') as res_dir:
pp_file = os.path.join(res_dir, 'test_pp.hdf5')
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
G = 1.0
eps = 1.0e-3
theta = 0.5
n_steps = 100
step_size = 0.01
n = 1000
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
space1 = deepcopy(space)
sim_pp = sim.PPSimulation(space, pp_file, G, eps)
sim_bh = sim.BHSimulation(space1, bh_file, G, eps, 10 * cube_length, np.array((0., 0., 0.)), theta)
sim_pp.run(n_steps, step_size)
sim_bh.run(n_steps, step_size)
# result comparsion
_are_results_close(sim_pp, sim_bh, n_steps)
def test_energy_conservation_barnes_hut():
# test Barnes-Hut energy conservation
with make_temp_result_dir('temp_results') as res_dir:
bh_file = os.path.join(res_dir, 'test_bh.hdf5')
n = 1000
G = 1.0
eps = 1.0e-3
theta = 0.75
n_steps = 1000
step_size = 0.001
cube_length = int(np.sqrt(n))
space = Space()
space.add_cuboid(n, np.array((0., 0., 0.)), cube_length, cube_length, cube_length, vel_func, mass_func)
sim_bh = sim.BHSimulation(space, bh_file, G, eps, 10 * cube_length, np.array((0., 0., 0.)), theta)
sim_bh.add_result('energy', n_steps)
sim_bh.run(n_steps, step_size)
with h5py.File(sim_bh.output_filepath, 'r') as bh:
e_in = bh['results']['energy'][0]
e_fin = bh['results']['energy'][1]
assert np.abs(e_in - e_fin) / np.abs(e_in) <= 0.01 | 0.482917 | 0.522385 |
import logging
import signal
from contextlib import contextmanager
from threading import Thread
from time import sleep
from typing import Union, Callable, Generator, List
from wsgiref.simple_server import make_server, WSGIServer
LOGGER = logging.getLogger(__name__)
class DevelopmentServer:
def __init__(self, wsgi_app: Callable, host: str = None, port: int = 0, stop_signals: List[int] = None):
self.wsgi_app = wsgi_app
self.host = host or "127.0.0.1"
self.port = port
self.stop_signals = stop_signals or [signal.SIGTERM, signal.SIGINT]
self.server: Union[WSGIServer, None] = None
self._thread: Union[Thread, None] = None
self._is_running = False
def start(self, blocking: bool = True) -> int:
LOGGER.debug("Creating WSGI server for host %s and port %d", self.host, self.port)
self._register_stop_signals()
self.server = make_server(self.host, self.port, self.wsgi_app)
self._thread = Thread(target=self.server.serve_forever)
self._thread.start()
self._is_running = True
if blocking:
LOGGER.info(
"Starting development server in blocking mode at http://%s:%d/", self.host, self.server.server_port
)
self._wait_until_stopped()
else:
LOGGER.info("Development server is now running at http://%s:%d/", self.host, self.port)
return self.server.server_port
def _wait_until_stopped(self):
while self._is_running:
sleep(0.5)
def _register_stop_signals(self):
for stop_signal in self.stop_signals:
LOGGER.debug("Registering signal %d as stop signal", stop_signal)
signal.signal(stop_signal, self._stop_from_signal)
def _stop_from_signal(self, signum: int, __):
LOGGER.info("Received signal %d", signum)
self.stop()
def stop(self):
LOGGER.info("Stopping development server")
self._is_running = False
self.server.shutdown()
@contextmanager
def start_in_context(self) -> Generator[int, None, None]:
port = self.start(blocking=False)
try:
yield port
finally:
self.stop() | restit/development_server.py | import logging
import signal
from contextlib import contextmanager
from threading import Thread
from time import sleep
from typing import Union, Callable, Generator, List
from wsgiref.simple_server import make_server, WSGIServer
LOGGER = logging.getLogger(__name__)
class DevelopmentServer:
def __init__(self, wsgi_app: Callable, host: str = None, port: int = 0, stop_signals: List[int] = None):
self.wsgi_app = wsgi_app
self.host = host or "127.0.0.1"
self.port = port
self.stop_signals = stop_signals or [signal.SIGTERM, signal.SIGINT]
self.server: Union[WSGIServer, None] = None
self._thread: Union[Thread, None] = None
self._is_running = False
def start(self, blocking: bool = True) -> int:
LOGGER.debug("Creating WSGI server for host %s and port %d", self.host, self.port)
self._register_stop_signals()
self.server = make_server(self.host, self.port, self.wsgi_app)
self._thread = Thread(target=self.server.serve_forever)
self._thread.start()
self._is_running = True
if blocking:
LOGGER.info(
"Starting development server in blocking mode at http://%s:%d/", self.host, self.server.server_port
)
self._wait_until_stopped()
else:
LOGGER.info("Development server is now running at http://%s:%d/", self.host, self.port)
return self.server.server_port
def _wait_until_stopped(self):
while self._is_running:
sleep(0.5)
def _register_stop_signals(self):
for stop_signal in self.stop_signals:
LOGGER.debug("Registering signal %d as stop signal", stop_signal)
signal.signal(stop_signal, self._stop_from_signal)
def _stop_from_signal(self, signum: int, __):
LOGGER.info("Received signal %d", signum)
self.stop()
def stop(self):
LOGGER.info("Stopping development server")
self._is_running = False
self.server.shutdown()
@contextmanager
def start_in_context(self) -> Generator[int, None, None]:
port = self.start(blocking=False)
try:
yield port
finally:
self.stop() | 0.58948 | 0.119691 |
__author__ = 'sandra'
# Python packages
import sys, os
import numpy as np
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/madx_parameter/')
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/metaclass/')
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/classtwisstable/')
from metaclass import twiss
import madx_param
from class_twiss_table import TwissTable
filename=sys.argv[1]
print filename
stat=sys.argv[2]
print stat
# Read the twiss file
class madxParams:
def __init__(self,filename):
self.settings={}
self.read_header(filename)
def read_header(self, filename):
myFile=open(filename,"r")
for line in myFile.readlines():
lineList=line.split()
if lineList[0]=="@":
if lineList[2]=='%le':
self.settings[lineList[1]]=float(lineList[3])
else:
self.settings[lineList[1]]=lineList[3]
def get_value(self, paramName):
if paramName in self.settings.keys():
return self.settings[paramName]
else:
raise KeyError('undefined config parameter %s' % paramName)
def return_header_value(filename, label):
thing=madxParams(filename)
return thing.get_value(label)
dy_rms=return_header_value(filename, "DYRMS")
print dy_rms
f=file("dyrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
f.write("%.10f\n"%dy_rms)
f.close()
y_rms=return_header_value(filename, "YCORMS")
print y_rms
g=file("yrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
g.write("%.10f\n"%y_rms)
g.close()
x_rms=return_header_value(filename, "XCORMS")
print x_rms
xf=file("xrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
xf.write("%.10f\n"%x_rms)
xf.close()
tune1=return_header_value(filename, "Q1")
print tune1
q1=file("q1_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
q1.write("%.10f\n"%tune1)
q1.close()
tune2=return_header_value(filename, "Q2")
print tune2
q2=file("q2_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
q2.write("%.10f\n"%tune2)
q2.close()
sys.exit() | correction_methods/getstuff/get_stuff.py |
__author__ = 'sandra'
# Python packages
import sys, os
import numpy as np
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/madx_parameter/')
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/metaclass/')
sys.path.append('/afs/cern.ch/user/s/saumon/FCCee/packages/python/classtwisstable/')
from metaclass import twiss
import madx_param
from class_twiss_table import TwissTable
filename=sys.argv[1]
print filename
stat=sys.argv[2]
print stat
# Read the twiss file
class madxParams:
def __init__(self,filename):
self.settings={}
self.read_header(filename)
def read_header(self, filename):
myFile=open(filename,"r")
for line in myFile.readlines():
lineList=line.split()
if lineList[0]=="@":
if lineList[2]=='%le':
self.settings[lineList[1]]=float(lineList[3])
else:
self.settings[lineList[1]]=lineList[3]
def get_value(self, paramName):
if paramName in self.settings.keys():
return self.settings[paramName]
else:
raise KeyError('undefined config parameter %s' % paramName)
def return_header_value(filename, label):
thing=madxParams(filename)
return thing.get_value(label)
dy_rms=return_header_value(filename, "DYRMS")
print dy_rms
f=file("dyrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
f.write("%.10f\n"%dy_rms)
f.close()
y_rms=return_header_value(filename, "YCORMS")
print y_rms
g=file("yrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
g.write("%.10f\n"%y_rms)
g.close()
x_rms=return_header_value(filename, "XCORMS")
print x_rms
xf=file("xrms_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
xf.write("%.10f\n"%x_rms)
xf.close()
tune1=return_header_value(filename, "Q1")
print tune1
q1=file("q1_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
q1.write("%.10f\n"%tune1)
q1.close()
tune2=return_header_value(filename, "Q2")
print tune2
q2=file("q2_"+stat+".txt",'a')
#np.savetxt(f,dy_rms)
q2.write("%.10f\n"%tune2)
q2.close()
sys.exit() | 0.209308 | 0.054374 |
import copy
from operator import itemgetter
from unittest import mock
from neutron.db import segments_db
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2 import models as ml2_models
from neutron.tests.unit.plugins.ml2 import test_plugin
from neutron_lib import context
from neutron_lib import exceptions as nl_exc
from neutron_lib.plugins import directory
from oslo_config import cfg
from networking_ccloud.common.config import _override_driver_config
from networking_ccloud.common.config import config_oslo # noqa, make sure config opts are there
from networking_ccloud.common import constants as cc_const
from networking_ccloud.ml2.agent.common.api import CCFabricSwitchAgentRPCClient
from networking_ccloud.ml2.agent.common import messages as agent_msg
from networking_ccloud.tests import base
from networking_ccloud.tests.common import config_fixtures as cfix
class TestCCFabricMechanismDriver(test_plugin.Ml2PluginV2TestCase, base.PortBindingHelper, base.TestCase):
_mechanism_drivers = [cc_const.CC_DRIVER_NAME]
def setUp(self):
cfg.CONF.set_override('driver_config_path', 'invalid/path/to/conf.yaml', group='ml2_cc_fabric')
cfg.CONF.set_override('network_vlan_ranges', ['seagull:23:42', 'cat:53:1337', 'crow:200:300', 'squirrel:17:17'],
group='ml2_type_vlan')
cfg.CONF.set_override('mechanism_drivers', self._mechanism_drivers, group='ml2')
cc_const.SWITCH_AGENT_TOPIC_MAP['test'] = 'cc-fabric-switch-agent-test'
switchgroups = [
cfix.make_switchgroup("seagull"),
cfix.make_switchgroup("crow"),
cfix.make_switchgroup("cat"),
cfix.make_switchgroup("squirrel")
]
# hostgroups:
# nova-compute-seagull - vpod
# metagroup, 10 hosts, 4 interfaces, 2 per switchpair, all lacp
# nova-compute-crow - vpod, same as seagull
# cat - bpod, 10 random hosts
# squirrel - used for vlan exhaustion test and non-lacp hosts
hg_seagull = cfix.make_metagroup("seagull")
hg_crow = cfix.make_metagroup("crow")
hg_cat = cfix.make_hostgroups("cat")
hg_squirrel = cfix.make_metagroup("squirrel")
hostgroups = hg_seagull + hg_crow + hg_cat + hg_squirrel
self.conf_drv = cfix.make_config(switchgroups=switchgroups, hostgroups=hostgroups)
_override_driver_config(self.conf_drv)
self.setup_parent()
self.plugin = directory.get_plugin()
self.context = context.get_admin_context()
mm = directory.get_plugin().mechanism_manager
self.mech_driver = mm.mech_drivers[cc_const.CC_DRIVER_NAME].obj
self._vxlan_segment = {'network_type': 'vxlan', 'physical_network': None,
'segmentation_id': 23, 'id': 'test-id'}
def _test_bind_port(self, fake_host, fake_segments=None, network=None, subnet=None, binding_levels=None):
if network is None:
with self.network() as network:
return self._test_bind_port(fake_host, fake_segments, network, binding_levels=binding_levels)
if subnet is None:
with self.subnet(network=network) as subnet:
return self._test_bind_port(fake_host, fake_segments, network, subnet, binding_levels)
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = fake_host
if fake_segments is None:
fake_segments = [self._vxlan_segment]
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], network['network'],
bindings, binding_levels=None)
bl_mock.return_value = binding_levels
pc._segments_to_bind = fake_segments
pc.continue_binding = mock.Mock()
pc.set_binding = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.bind_port(pc)
return pc
def test_bind_port_direct_level_0(self):
with mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
context = self._test_bind_port(fake_host='node001-seagull')
context.continue_binding.assert_called()
mock_bhc.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_direct_level_1(self):
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'seagull', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='node001-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_acu.assert_called()
context.set_binding.assert_called()
# check config
# FIXME: maybe just construct the switchconfig object? or dump its structure to a dict?
swcfg = mock_acu.call_args[0][1]
self.assertEqual(agent_msg.OperationEnum.add, swcfg[0].operation)
self.assertEqual(2, len(swcfg))
self.assertEqual("seagull-sw1", swcfg[0].switch_name)
self.assertEqual("seagull-sw2", swcfg[1].switch_name)
self.assertEqual('add', swcfg[0].operation.name)
self.assertEqual((23, 42), (swcfg[0].vxlan_maps[0].vni, swcfg[0].vxlan_maps[0].vlan))
self.assertEqual(42, swcfg[0].ifaces[0].native_vlan)
self.assertEqual([42], swcfg[0].ifaces[0].trunk_vlans)
def test_bind_port_direct_level_1_broken_segment(self):
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'invalid-physnet', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
context = self._test_bind_port(fake_host='node001-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_bhc.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_hpb(self):
# only one stage bound
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull')
context.continue_binding.assert_called()
mock_acu.assert_called()
context.set_binding.assert_not_called()
# FIXME: check config
swcfg = mock_acu.call_args[0][1]
self.assertEqual(2, len(swcfg))
self.assertEqual(10, len(swcfg[0].ifaces))
def test_bind_port_hpb_level_1_ignored(self):
# driver should ignore second level for non-direct groups
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'seagull', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_acu.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_new_segment(self):
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull')
context.continue_binding.assert_called()
mock_acu.assert_called()
context.set_binding.assert_not_called()
def test_bind_port_new_and_existing_segment(self):
with self.network() as network:
with self.subnet(network=network) as subnet:
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update'):
context1 = self._test_bind_port(fake_host='nova-compute-seagull', network=network, subnet=subnet)
context1.continue_binding.assert_called()
context2 = self._test_bind_port(fake_host='node001-seagull', network=network, subnet=subnet)
context2.continue_binding.assert_called()
# segment looks different, depending on if it was created or if it already existed
# --> compare first arg and second arg separately
self.assertEqual(context1.continue_binding.call_args[0][0],
context2.continue_binding.call_args[0][0])
keyget = itemgetter('id', 'network_type', 'physical_network', 'segmentation_id')
self.assertEqual(keyget(context1.continue_binding.call_args[0][1][0]),
keyget(context2.continue_binding.call_args[0][1][0]))
segment_id = context1.continue_binding.call_args[0][1][0]['id']
segment = segments_db.get_segment_by_id(self.context, segment_id)
self.assertEqual('seagull', segment['physical_network'])
def test_bind_port_vlan_exhaustion(self):
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update'):
context1 = self._test_bind_port(fake_host='nova-compute-squirrel')
context1.continue_binding.assert_called()
self.assertRaises(nl_exc.NoNetworkAvailable, self._test_bind_port, fake_host='nova-compute-squirrel')
def test_bind_port_metagroup(self):
pass
def test_bind_port_metagroup_with_exception(self):
pass
def test_bind_port_host_not_found(self):
context = self._test_bind_port('unkown_host')
context.continue_binding.assert_not_called()
context.set_binding.assert_not_called()
def test_delete_port_segment_in_use_host_in_use(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='nova-compute-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_bhc.assert_not_called()
def test_delete_port_segment_in_use_host_not_in_use(self):
# segment + one active port binding with same binding host as is deleted
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "node002-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
# no vlan updates!
self.assertIsNone(swcfg.vlans)
self.assertIsNone(swcfg.vxlan_maps)
self.assertIsNone(swcfg.bgp)
for iface in swcfg.ifaces:
self.assertEqual([seg_1['segmentation_id']], iface.trunk_vlans)
def test_delete_port_segment_not_in_use(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_called()
self.assertEqual(seg_1['id'], pc.release_dynamic_segment.call_args[0][0])
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
self.assertEqual([seg_1['segmentation_id']], [v.vlan for v in swcfg.vlans])
self.assertEqual([(seg_0['segmentation_id'], seg_1['segmentation_id'])],
[(m.vni, m.vlan) for m in swcfg.vxlan_maps])
self.assertEqual([seg_1['segmentation_id']], [v.vlan for v in swcfg.bgp.vlans])
for iface in swcfg.ifaces:
self.assertEqual([seg_1['segmentation_id']], iface.trunk_vlans)
def test_delete_port_with_host_that_is_in_metagroup(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
# make sure Port-Channel101 of node001-seagull is not part of this
self.assertEqual([f"Port-Channel{n}" for n in range(102, 111)],
[iface.name for iface in swcfg.ifaces])
def test_update_port_to_host_to_same_host(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('networking_ccloud.ml2.mech_driver.CCFabricMechanismDriver.'
'driver_handle_binding_host_removed') as hbh_mock:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None,
original_port=port['port'])
pc._plugin_context = self.context
self.mech_driver.update_port_postcommit(pc)
hbh_mock.assert_not_called()
def test_update_port_to_dummy_host(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 2000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
old_port = copy.deepcopy(port)
old_port['port']['binding:host_id'] = "nova-compute-seagull"
port['port']['binding:host_id'] = "dummy"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.original_binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
nc = driver_context.NetworkContext(self.plugin, self.context, net, net)
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], nc,
bindings, binding_levels=None,
original_port=old_port['port'])
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.update_port_postcommit(pc)
pc.release_dynamic_segment.assert_called()
self.assertEqual(seg_1['id'], pc.release_dynamic_segment.call_args[0][0])
mock_acu.assert_called() | networking_ccloud/tests/unit/ml2/test_mech_driver.py |
import copy
from operator import itemgetter
from unittest import mock
from neutron.db import segments_db
from neutron.plugins.ml2 import driver_context
from neutron.plugins.ml2 import models as ml2_models
from neutron.tests.unit.plugins.ml2 import test_plugin
from neutron_lib import context
from neutron_lib import exceptions as nl_exc
from neutron_lib.plugins import directory
from oslo_config import cfg
from networking_ccloud.common.config import _override_driver_config
from networking_ccloud.common.config import config_oslo # noqa, make sure config opts are there
from networking_ccloud.common import constants as cc_const
from networking_ccloud.ml2.agent.common.api import CCFabricSwitchAgentRPCClient
from networking_ccloud.ml2.agent.common import messages as agent_msg
from networking_ccloud.tests import base
from networking_ccloud.tests.common import config_fixtures as cfix
class TestCCFabricMechanismDriver(test_plugin.Ml2PluginV2TestCase, base.PortBindingHelper, base.TestCase):
_mechanism_drivers = [cc_const.CC_DRIVER_NAME]
def setUp(self):
cfg.CONF.set_override('driver_config_path', 'invalid/path/to/conf.yaml', group='ml2_cc_fabric')
cfg.CONF.set_override('network_vlan_ranges', ['seagull:23:42', 'cat:53:1337', 'crow:200:300', 'squirrel:17:17'],
group='ml2_type_vlan')
cfg.CONF.set_override('mechanism_drivers', self._mechanism_drivers, group='ml2')
cc_const.SWITCH_AGENT_TOPIC_MAP['test'] = 'cc-fabric-switch-agent-test'
switchgroups = [
cfix.make_switchgroup("seagull"),
cfix.make_switchgroup("crow"),
cfix.make_switchgroup("cat"),
cfix.make_switchgroup("squirrel")
]
# hostgroups:
# nova-compute-seagull - vpod
# metagroup, 10 hosts, 4 interfaces, 2 per switchpair, all lacp
# nova-compute-crow - vpod, same as seagull
# cat - bpod, 10 random hosts
# squirrel - used for vlan exhaustion test and non-lacp hosts
hg_seagull = cfix.make_metagroup("seagull")
hg_crow = cfix.make_metagroup("crow")
hg_cat = cfix.make_hostgroups("cat")
hg_squirrel = cfix.make_metagroup("squirrel")
hostgroups = hg_seagull + hg_crow + hg_cat + hg_squirrel
self.conf_drv = cfix.make_config(switchgroups=switchgroups, hostgroups=hostgroups)
_override_driver_config(self.conf_drv)
self.setup_parent()
self.plugin = directory.get_plugin()
self.context = context.get_admin_context()
mm = directory.get_plugin().mechanism_manager
self.mech_driver = mm.mech_drivers[cc_const.CC_DRIVER_NAME].obj
self._vxlan_segment = {'network_type': 'vxlan', 'physical_network': None,
'segmentation_id': 23, 'id': 'test-id'}
def _test_bind_port(self, fake_host, fake_segments=None, network=None, subnet=None, binding_levels=None):
if network is None:
with self.network() as network:
return self._test_bind_port(fake_host, fake_segments, network, binding_levels=binding_levels)
if subnet is None:
with self.subnet(network=network) as subnet:
return self._test_bind_port(fake_host, fake_segments, network, subnet, binding_levels)
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = fake_host
if fake_segments is None:
fake_segments = [self._vxlan_segment]
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], network['network'],
bindings, binding_levels=None)
bl_mock.return_value = binding_levels
pc._segments_to_bind = fake_segments
pc.continue_binding = mock.Mock()
pc.set_binding = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.bind_port(pc)
return pc
def test_bind_port_direct_level_0(self):
with mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
context = self._test_bind_port(fake_host='node001-seagull')
context.continue_binding.assert_called()
mock_bhc.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_direct_level_1(self):
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'seagull', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='node001-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_acu.assert_called()
context.set_binding.assert_called()
# check config
# FIXME: maybe just construct the switchconfig object? or dump its structure to a dict?
swcfg = mock_acu.call_args[0][1]
self.assertEqual(agent_msg.OperationEnum.add, swcfg[0].operation)
self.assertEqual(2, len(swcfg))
self.assertEqual("seagull-sw1", swcfg[0].switch_name)
self.assertEqual("seagull-sw2", swcfg[1].switch_name)
self.assertEqual('add', swcfg[0].operation.name)
self.assertEqual((23, 42), (swcfg[0].vxlan_maps[0].vni, swcfg[0].vxlan_maps[0].vlan))
self.assertEqual(42, swcfg[0].ifaces[0].native_vlan)
self.assertEqual([42], swcfg[0].ifaces[0].trunk_vlans)
def test_bind_port_direct_level_1_broken_segment(self):
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'invalid-physnet', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
context = self._test_bind_port(fake_host='node001-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_bhc.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_hpb(self):
# only one stage bound
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull')
context.continue_binding.assert_called()
mock_acu.assert_called()
context.set_binding.assert_not_called()
# FIXME: check config
swcfg = mock_acu.call_args[0][1]
self.assertEqual(2, len(swcfg))
self.assertEqual(10, len(swcfg[0].ifaces))
def test_bind_port_hpb_level_1_ignored(self):
# driver should ignore second level for non-direct groups
fake_segments = [{'id': 'fake-segment-id', 'physical_network': 'seagull', 'segmentation_id': 42,
'network_type': 'vlan'}]
binding_levels = [{'driver': 'cc-fabric', 'bound_segment': self._vxlan_segment}]
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull',
fake_segments=fake_segments, binding_levels=binding_levels)
context.continue_binding.assert_not_called()
mock_acu.assert_not_called()
context.set_binding.assert_not_called()
def test_bind_port_new_segment(self):
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
context = self._test_bind_port(fake_host='nova-compute-seagull')
context.continue_binding.assert_called()
mock_acu.assert_called()
context.set_binding.assert_not_called()
def test_bind_port_new_and_existing_segment(self):
with self.network() as network:
with self.subnet(network=network) as subnet:
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update'):
context1 = self._test_bind_port(fake_host='nova-compute-seagull', network=network, subnet=subnet)
context1.continue_binding.assert_called()
context2 = self._test_bind_port(fake_host='node001-seagull', network=network, subnet=subnet)
context2.continue_binding.assert_called()
# segment looks different, depending on if it was created or if it already existed
# --> compare first arg and second arg separately
self.assertEqual(context1.continue_binding.call_args[0][0],
context2.continue_binding.call_args[0][0])
keyget = itemgetter('id', 'network_type', 'physical_network', 'segmentation_id')
self.assertEqual(keyget(context1.continue_binding.call_args[0][1][0]),
keyget(context2.continue_binding.call_args[0][1][0]))
segment_id = context1.continue_binding.call_args[0][1][0]['id']
segment = segments_db.get_segment_by_id(self.context, segment_id)
self.assertEqual('seagull', segment['physical_network'])
def test_bind_port_vlan_exhaustion(self):
with mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update'):
context1 = self._test_bind_port(fake_host='nova-compute-squirrel')
context1.continue_binding.assert_called()
self.assertRaises(nl_exc.NoNetworkAvailable, self._test_bind_port, fake_host='nova-compute-squirrel')
def test_bind_port_metagroup(self):
pass
def test_bind_port_metagroup_with_exception(self):
pass
def test_bind_port_host_not_found(self):
context = self._test_bind_port('unkown_host')
context.continue_binding.assert_not_called()
context.set_binding.assert_not_called()
def test_delete_port_segment_in_use_host_in_use(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='nova-compute-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(self.mech_driver, 'handle_binding_host_changed') as mock_bhc:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_bhc.assert_not_called()
def test_delete_port_segment_in_use_host_not_in_use(self):
# segment + one active port binding with same binding host as is deleted
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "node002-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
# no vlan updates!
self.assertIsNone(swcfg.vlans)
self.assertIsNone(swcfg.vxlan_maps)
self.assertIsNone(swcfg.bgp)
for iface in swcfg.ifaces:
self.assertEqual([seg_1['segmentation_id']], iface.trunk_vlans)
def test_delete_port_segment_not_in_use(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_called()
self.assertEqual(seg_1['id'], pc.release_dynamic_segment.call_args[0][0])
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
self.assertEqual([seg_1['segmentation_id']], [v.vlan for v in swcfg.vlans])
self.assertEqual([(seg_0['segmentation_id'], seg_1['segmentation_id'])],
[(m.vni, m.vlan) for m in swcfg.vxlan_maps])
self.assertEqual([seg_1['segmentation_id']], [v.vlan for v in swcfg.bgp.vlans])
for iface in swcfg.ifaces:
self.assertEqual([seg_1['segmentation_id']], iface.trunk_vlans)
def test_delete_port_with_host_that_is_in_metagroup(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None)
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.delete_port_postcommit(pc)
pc.release_dynamic_segment.assert_not_called()
mock_acu.assert_called()
swcfgs = mock_acu.call_args[0][1]
for swcfg in swcfgs:
self.assertEqual(agent_msg.OperationEnum.remove, swcfg.operation)
# make sure Port-Channel101 of node001-seagull is not part of this
self.assertEqual([f"Port-Channel{n}" for n in range(102, 111)],
[iface.name for iface in swcfg.ifaces])
def test_update_port_to_host_to_same_host(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 1000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
self._port_a_1 = self._make_port_with_binding(segments=[(seg_0, 'cc-fabric'),
(seg_1, 'cat-ml2')],
host='node001-seagull')
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
port['port']['binding:host_id'] = "nova-compute-seagull"
with mock.patch('networking_ccloud.ml2.mech_driver.CCFabricMechanismDriver.'
'driver_handle_binding_host_removed') as hbh_mock:
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], net,
bindings, binding_levels=None,
original_port=port['port'])
pc._plugin_context = self.context
self.mech_driver.update_port_postcommit(pc)
hbh_mock.assert_not_called()
def test_update_port_to_dummy_host(self):
net = self._make_network(name="a", admin_state_up=True, fmt='json')['network']
seg_0 = {'network_id': net['id'], 'network_type': 'vxlan', 'segmentation_id': 232323}
seg_1 = {'network_id': net['id'], 'network_type': 'vlan', 'physical_network': 'seagull',
'segmentation_id': 2000}
segments_db.add_network_segment(self.context, net['id'], seg_0)
segments_db.add_network_segment(self.context, net['id'], seg_1, 1, True)
with self.subnet(network=dict(network=net)) as subnet:
with self.port(subnet=subnet) as port:
old_port = copy.deepcopy(port)
old_port['port']['binding:host_id'] = "nova-compute-seagull"
port['port']['binding:host_id'] = "dummy"
with mock.patch('neutron.plugins.ml2.driver_context.PortContext.original_binding_levels',
new_callable=mock.PropertyMock) as bl_mock, \
mock.patch.object(CCFabricSwitchAgentRPCClient, 'apply_config_update') as mock_acu:
bl_mock.return_value = [dict(bound_segment=seg_0), dict(bound_segment=seg_1)]
nc = driver_context.NetworkContext(self.plugin, self.context, net, net)
bindings = ml2_models.PortBinding()
pc = driver_context.PortContext(self.plugin, self.context, port['port'], nc,
bindings, binding_levels=None,
original_port=old_port['port'])
pc.release_dynamic_segment = mock.Mock()
pc._plugin_context = self.context
self.mech_driver.update_port_postcommit(pc)
pc.release_dynamic_segment.assert_called()
self.assertEqual(seg_1['id'], pc.release_dynamic_segment.call_args[0][0])
mock_acu.assert_called() | 0.504883 | 0.098947 |
from flask import jsonify, current_app, request
from flask.views import MethodView
from flask_template.app import db
from models import User
from util.decorators import admin_required
class UserAPI(MethodView):
decorators = [admin_required]
def get(self):
arg_id = request.args.get('id')
arg_username = request.args.get('username')
arg_email = request.args.get('email')
if arg_id:
user = db.session.query(User).filter_by(id=arg_id).first()
elif arg_username:
user = db.session.query(User).filter_by(username=arg_username).first()
elif arg_email:
user = db.session.query(User).filter_by(email=arg_email).first()
else:
current_app.logger.warning("ArgumnetError: %s", request.args)
return jsonify({
'code': 'ArgumnetError',
'msg': 'At least one of (id, username, email) should be supplied.'
}), 400
if user:
current_app.logger.info("User founded: %s", user)
return jsonify({
'code': 'Success',
'msg': 'User founded.',
'data': user.serialize
})
current_app.logger.warning("User not founded: %s", request.args)
return jsonify({
'code': 'NotFound',
'msg': 'User not founded.',
}), 404
def post(self):
username = request.json.get('username')
password = request.json.get('password')
email = request.json.get('email')
new_user = User(username, password, email)
try:
db.session.add(new_user)
db.session.commit()
current_app.logger.info("new user added: %s", new_user)
return jsonify({
'code': 'Success',
'msg': 'Account created.'
}), 201
except Exception as err:
current_app.logger.exception("Unexpected error.")
return jsonify({
"code": err.__class__.__name__,
"msg": "Fail to create account.",
"data": err._message()
}), 500
def delete(self):
arg_id = request.json.get('id')
arg_username = request.json.get('username')
arg_email = request.json.get('email')
if arg_id:
user = db.session.query(User).filter_by(id=arg_id).first()
elif arg_username:
user = db.session.query(User).filter_by(username=arg_username).first()
elif arg_email:
user = db.session.query(User).filter_by(email=arg_email).first()
else:
current_app.logger.warning("ArgumnetError: %s", request.args)
return jsonify({
'code': 'ArgumnetError',
'msg': 'At least one of (id, username, email) should be supplied.'
}), 400
if user:
db.session.delete(user)
db.session.commit()
current_app.logger.info("User deleted: %s", user)
return jsonify({
'code': 'Success',
'msg': 'User deleted.',
'data': user.serialize
}), 204
current_app.logger.warning("User not founded: %s", request.args)
return jsonify({
'code': 'NotFound',
'msg': 'User not founded.',
}), 404 | admin/views.py | from flask import jsonify, current_app, request
from flask.views import MethodView
from flask_template.app import db
from models import User
from util.decorators import admin_required
class UserAPI(MethodView):
decorators = [admin_required]
def get(self):
arg_id = request.args.get('id')
arg_username = request.args.get('username')
arg_email = request.args.get('email')
if arg_id:
user = db.session.query(User).filter_by(id=arg_id).first()
elif arg_username:
user = db.session.query(User).filter_by(username=arg_username).first()
elif arg_email:
user = db.session.query(User).filter_by(email=arg_email).first()
else:
current_app.logger.warning("ArgumnetError: %s", request.args)
return jsonify({
'code': 'ArgumnetError',
'msg': 'At least one of (id, username, email) should be supplied.'
}), 400
if user:
current_app.logger.info("User founded: %s", user)
return jsonify({
'code': 'Success',
'msg': 'User founded.',
'data': user.serialize
})
current_app.logger.warning("User not founded: %s", request.args)
return jsonify({
'code': 'NotFound',
'msg': 'User not founded.',
}), 404
def post(self):
username = request.json.get('username')
password = request.json.get('password')
email = request.json.get('email')
new_user = User(username, password, email)
try:
db.session.add(new_user)
db.session.commit()
current_app.logger.info("new user added: %s", new_user)
return jsonify({
'code': 'Success',
'msg': 'Account created.'
}), 201
except Exception as err:
current_app.logger.exception("Unexpected error.")
return jsonify({
"code": err.__class__.__name__,
"msg": "Fail to create account.",
"data": err._message()
}), 500
def delete(self):
arg_id = request.json.get('id')
arg_username = request.json.get('username')
arg_email = request.json.get('email')
if arg_id:
user = db.session.query(User).filter_by(id=arg_id).first()
elif arg_username:
user = db.session.query(User).filter_by(username=arg_username).first()
elif arg_email:
user = db.session.query(User).filter_by(email=arg_email).first()
else:
current_app.logger.warning("ArgumnetError: %s", request.args)
return jsonify({
'code': 'ArgumnetError',
'msg': 'At least one of (id, username, email) should be supplied.'
}), 400
if user:
db.session.delete(user)
db.session.commit()
current_app.logger.info("User deleted: %s", user)
return jsonify({
'code': 'Success',
'msg': 'User deleted.',
'data': user.serialize
}), 204
current_app.logger.warning("User not founded: %s", request.args)
return jsonify({
'code': 'NotFound',
'msg': 'User not founded.',
}), 404 | 0.27048 | 0.055285 |