code stringlengths 2k 1.04M | repo_path stringlengths 5 517 | parsed_code stringlengths 0 1.04M | quality_prob float64 0.02 0.95 | learning_prob float64 0.02 0.93 |
|---|---|---|---|---|
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppress TensorFlow logging (1)
import pathlib
import tensorflow as tf
import cv2
import argparse
tf.get_logger().setLevel('ERROR') # Suppress TensorFlow logging (2)
parser = argparse.ArgumentParser()
parser.add_argument('--model', help='Folder that the Saved Model is Located In',
default='exported-models/my_mobilenet_model')
parser.add_argument('--labels', help='Where the Labelmap is Located',
default='exported-models/my_mobilenet_model/saved_model/label_map.pbtxt')
parser.add_argument('--image', help='Name of the single image to perform detection on',
default='images/test/i-1e092ec6eabf47f9b85795a9e069181b.jpg')
parser.add_argument('--threshold', help='Minimum confidence threshold for displaying detected objects',
default=0.60)
args = parser.parse_args()
# Enable GPU dynamic memory allocation
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
# PROVIDE PATH TO IMAGE DIRECTORY
IMAGE_PATHS = args.image
# PROVIDE PATH TO MODEL DIRECTORY
PATH_TO_MODEL_DIR = args.model
# PROVIDE PATH TO LABEL MAP
PATH_TO_LABELS = args.labels
# PROVIDE THE MINIMUM CONFIDENCE THRESHOLD
MIN_CONF_THRESH = float(args.threshold)
# LOAD THE MODEL
import time
from object_detection.utils import label_map_util
from object_detection.utils import visualization_utils as viz_utils
PATH_TO_SAVED_MODEL = PATH_TO_MODEL_DIR + "/saved_model"
print('Loading model...', end='')
start_time = time.time()
# LOAD SAVED MODEL AND BUILD DETECTION FUNCTION
detect_fn = tf.saved_model.load(PATH_TO_SAVED_MODEL)
end_time = time.time()
elapsed_time = end_time - start_time
print('Done! Took {} seconds'.format(elapsed_time))
# LOAD LABEL MAP DATA FOR PLOTTING
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS,
use_display_name=True)
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore') # Suppress Matplotlib warnings
def load_image_into_numpy_array(path):
"""Load an image from file into a numpy array.
Puts image into numpy array to feed into tensorflow graph.
Note that by convention we put it into a numpy array with shape
(height, width, channels), where channels=3 for RGB.
Args:
path: the file path to the image
Returns:
uint8 numpy array with shape (img_height, img_width, 3)
"""
return np.array(Image.open(path))
print('Running inference for {}... '.format(IMAGE_PATHS), end='')
image = cv2.imread(IMAGE_PATHS)
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_expanded = np.expand_dims(image_rgb, axis=0)
# The input needs to be a tensor, convert it using `tf.convert_to_tensor`.
input_tensor = tf.convert_to_tensor(image)
# The model expects a batch of images, so add an axis with `tf.newaxis`.
input_tensor = input_tensor[tf.newaxis, ...]
# input_tensor = np.expand_dims(image_np, 0)
detections = detect_fn(input_tensor)
# All outputs are batches tensors.
# Convert to numpy arrays, and take index [0] to remove the batch dimension.
# We're only interested in the first num_detections.
num_detections = int(detections.pop('num_detections'))
detections = {key: value[0, :num_detections].numpy()
for key, value in detections.items()}
detections['num_detections'] = num_detections
# detection_classes should be ints.
detections['detection_classes'] = detections['detection_classes'].astype(np.int64)
image_with_detections = image.copy()
# SET MIN_SCORE_THRESH BASED ON YOU MINIMUM THRESHOLD FOR DETECTIONS
viz_utils.visualize_boxes_and_labels_on_image_array(
image_with_detections,
detections['detection_boxes'],
detections['detection_classes'],
detections['detection_scores'],
category_index,
use_normalized_coordinates=True,
max_boxes_to_draw=200,
min_score_thresh=MIN_CONF_THRESH,
agnostic_mode=False)
print('Done')
# DISPLAYS OUTPUT IMAGE
cv2.imshow('Object Detector', image_with_detections)
# CLOSES WINDOW ONCE KEY IS PRESSED
cv2.waitKey(0)
# CLEANUP
cv2.destroyAllWindows() | workspace/training_demo/TF-image-od.py | import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # Suppress TensorFlow logging (1)
import pathlib
import tensorflow as tf
import cv2
import argparse
tf.get_logger().setLevel('ERROR') # Suppress TensorFlow logging (2)
parser = argparse.ArgumentParser()
parser.add_argument('--model', help='Folder that the Saved Model is Located In',
default='exported-models/my_mobilenet_model')
parser.add_argument('--labels', help='Where the Labelmap is Located',
default='exported-models/my_mobilenet_model/saved_model/label_map.pbtxt')
parser.add_argument('--image', help='Name of the single image to perform detection on',
default='images/test/i-1e092ec6eabf47f9b85795a9e069181b.jpg')
parser.add_argument('--threshold', help='Minimum confidence threshold for displaying detected objects',
default=0.60)
args = parser.parse_args()
# Enable GPU dynamic memory allocation
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
# PROVIDE PATH TO IMAGE DIRECTORY
IMAGE_PATHS = args.image
# PROVIDE PATH TO MODEL DIRECTORY
PATH_TO_MODEL_DIR = args.model
# PROVIDE PATH TO LABEL MAP
PATH_TO_LABELS = args.labels
# PROVIDE THE MINIMUM CONFIDENCE THRESHOLD
MIN_CONF_THRESH = float(args.threshold)
# LOAD THE MODEL
import time
from object_detection.utils import label_map_util
from object_detection.utils import visualization_utils as viz_utils
PATH_TO_SAVED_MODEL = PATH_TO_MODEL_DIR + "/saved_model"
print('Loading model...', end='')
start_time = time.time()
# LOAD SAVED MODEL AND BUILD DETECTION FUNCTION
detect_fn = tf.saved_model.load(PATH_TO_SAVED_MODEL)
end_time = time.time()
elapsed_time = end_time - start_time
print('Done! Took {} seconds'.format(elapsed_time))
# LOAD LABEL MAP DATA FOR PLOTTING
category_index = label_map_util.create_category_index_from_labelmap(PATH_TO_LABELS,
use_display_name=True)
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import warnings
warnings.filterwarnings('ignore') # Suppress Matplotlib warnings
def load_image_into_numpy_array(path):
"""Load an image from file into a numpy array.
Puts image into numpy array to feed into tensorflow graph.
Note that by convention we put it into a numpy array with shape
(height, width, channels), where channels=3 for RGB.
Args:
path: the file path to the image
Returns:
uint8 numpy array with shape (img_height, img_width, 3)
"""
return np.array(Image.open(path))
print('Running inference for {}... '.format(IMAGE_PATHS), end='')
image = cv2.imread(IMAGE_PATHS)
image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
image_expanded = np.expand_dims(image_rgb, axis=0)
# The input needs to be a tensor, convert it using `tf.convert_to_tensor`.
input_tensor = tf.convert_to_tensor(image)
# The model expects a batch of images, so add an axis with `tf.newaxis`.
input_tensor = input_tensor[tf.newaxis, ...]
# input_tensor = np.expand_dims(image_np, 0)
detections = detect_fn(input_tensor)
# All outputs are batches tensors.
# Convert to numpy arrays, and take index [0] to remove the batch dimension.
# We're only interested in the first num_detections.
num_detections = int(detections.pop('num_detections'))
detections = {key: value[0, :num_detections].numpy()
for key, value in detections.items()}
detections['num_detections'] = num_detections
# detection_classes should be ints.
detections['detection_classes'] = detections['detection_classes'].astype(np.int64)
image_with_detections = image.copy()
# SET MIN_SCORE_THRESH BASED ON YOU MINIMUM THRESHOLD FOR DETECTIONS
viz_utils.visualize_boxes_and_labels_on_image_array(
image_with_detections,
detections['detection_boxes'],
detections['detection_classes'],
detections['detection_scores'],
category_index,
use_normalized_coordinates=True,
max_boxes_to_draw=200,
min_score_thresh=MIN_CONF_THRESH,
agnostic_mode=False)
print('Done')
# DISPLAYS OUTPUT IMAGE
cv2.imshow('Object Detector', image_with_detections)
# CLOSES WINDOW ONCE KEY IS PRESSED
cv2.waitKey(0)
# CLEANUP
cv2.destroyAllWindows() | 0.696887 | 0.255602 |
import pandas
from typing import Union
from libs.stock import Stock
from libs.indicators import Indicators
from . import ExamplesInterface
from .common.trigger import get_entry_momment
class ChineseExample(Stock, ExamplesInterface):
dataframe: pandas.DataFrame
market_orders_stock_details: list = []
def __init__(self, stock_data: Union[list, None] = None):
super().__init__()
""" Set custom stock data """
if stock_data is not None:
self.set_stock_data(stock_data)
self.create_dataframe()
self.indicators = Indicators(self.dataframe)
def get_stock_data(self) -> list:
return self.stock
def create_dataframe(self):
self.dataframe = self.stock_dataframe()
def strategy(self, condition_type: Union[str, None] = None) -> tuple:
ssma_3 = self.indicators.ssma(3)
ssma_50 = self.indicators.ssma(50)
dev, dev_color = self.indicators.deviation(20)
""" Strategy conditions """
strategy_buy_conditions = (ssma_3.iloc[-1] > ssma_50.iloc[-1] and ssma_3.iloc[-2] < ssma_50.iloc[
-2] and dev[0] > 0 and dev[0] > dev[1])
strategy_sell_conditions = (ssma_3.iloc[-1] < ssma_50.iloc[-1] and ssma_3.iloc[-2] > ssma_50.iloc[
-2] and dev[0] < 0 and dev[0] < dev[1])
entry_signal, entry_direction = get_entry_momment(strategy_buy_conditions, strategy_sell_conditions,
condition_type)
if entry_signal:
self.market_orders_stock_details.append({
"direction": entry_direction,
"indicators": [
{"name": "SSMA 3 Periods", "value": ssma_3.iloc[-1]},
{"name": "SSMA 3 Periods [Closed]", "value": ssma_3.iloc[-2]},
{"name": "SSMA 50 Periods", "value": ssma_50.iloc[-1]},
{"name": "SSMA 50 Periods [Closed]", "value": ssma_50.iloc[-2]},
{"name": "MA Deviation", "value": dev},
{"name": "MA Deviation Color", "value": dev_color},
],
"candle": self.dataframe.iloc[-1]
})
return entry_signal, entry_direction
def market_order_stock_details(self) -> dict:
return self.market_orders_stock_details[-1] | examples/chinese.py | import pandas
from typing import Union
from libs.stock import Stock
from libs.indicators import Indicators
from . import ExamplesInterface
from .common.trigger import get_entry_momment
class ChineseExample(Stock, ExamplesInterface):
dataframe: pandas.DataFrame
market_orders_stock_details: list = []
def __init__(self, stock_data: Union[list, None] = None):
super().__init__()
""" Set custom stock data """
if stock_data is not None:
self.set_stock_data(stock_data)
self.create_dataframe()
self.indicators = Indicators(self.dataframe)
def get_stock_data(self) -> list:
return self.stock
def create_dataframe(self):
self.dataframe = self.stock_dataframe()
def strategy(self, condition_type: Union[str, None] = None) -> tuple:
ssma_3 = self.indicators.ssma(3)
ssma_50 = self.indicators.ssma(50)
dev, dev_color = self.indicators.deviation(20)
""" Strategy conditions """
strategy_buy_conditions = (ssma_3.iloc[-1] > ssma_50.iloc[-1] and ssma_3.iloc[-2] < ssma_50.iloc[
-2] and dev[0] > 0 and dev[0] > dev[1])
strategy_sell_conditions = (ssma_3.iloc[-1] < ssma_50.iloc[-1] and ssma_3.iloc[-2] > ssma_50.iloc[
-2] and dev[0] < 0 and dev[0] < dev[1])
entry_signal, entry_direction = get_entry_momment(strategy_buy_conditions, strategy_sell_conditions,
condition_type)
if entry_signal:
self.market_orders_stock_details.append({
"direction": entry_direction,
"indicators": [
{"name": "SSMA 3 Periods", "value": ssma_3.iloc[-1]},
{"name": "SSMA 3 Periods [Closed]", "value": ssma_3.iloc[-2]},
{"name": "SSMA 50 Periods", "value": ssma_50.iloc[-1]},
{"name": "SSMA 50 Periods [Closed]", "value": ssma_50.iloc[-2]},
{"name": "MA Deviation", "value": dev},
{"name": "MA Deviation Color", "value": dev_color},
],
"candle": self.dataframe.iloc[-1]
})
return entry_signal, entry_direction
def market_order_stock_details(self) -> dict:
return self.market_orders_stock_details[-1] | 0.789721 | 0.306311 |
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class CategoriesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def categories_category_id_delete(self, category_id, **kwargs): # noqa: E501
"""Delete a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_delete(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_delete_with_http_info(category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_delete_with_http_info(category_id, **kwargs) # noqa: E501
return data
def categories_category_id_delete_with_http_info(self, category_id, **kwargs): # noqa: E501
"""Delete a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_delete_with_http_info(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_category_id_get(self, category_id, **kwargs): # noqa: E501
"""Get a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_get(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_get_with_http_info(category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_get_with_http_info(category_id, **kwargs) # noqa: E501
return data
def categories_category_id_get_with_http_info(self, category_id, **kwargs): # noqa: E501
"""Get a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_get_with_http_info(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_category_id_put(self, body, category_id, **kwargs): # noqa: E501
"""Update a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_put(body, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_put_with_http_info(body, category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_put_with_http_info(body, category_id, **kwargs) # noqa: E501
return data
def categories_category_id_put_with_http_info(self, body, category_id, **kwargs): # noqa: E501
"""Update a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_put_with_http_info(body, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `categories_category_id_put`") # noqa: E501
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_get(self, **kwargs): # noqa: E501
"""Get all categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: CategoryCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.categories_get_with_http_info(**kwargs) # noqa: E501
return data
def categories_get_with_http_info(self, **kwargs): # noqa: E501
"""Get all categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: CategoryCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'length', 'sort', 'q', 'fields', '_with'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'length' in params:
query_params.append(('length', params['length'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if '_with' in params:
query_params.append(('with', params['_with'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_post(self, body, **kwargs): # noqa: E501
"""Create a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.categories_post_with_http_info(body, **kwargs) # noqa: E501
return data
def categories_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `categories_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats) | python/swagger_client/api/categories_api.py | from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class CategoriesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def categories_category_id_delete(self, category_id, **kwargs): # noqa: E501
"""Delete a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_delete(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_delete_with_http_info(category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_delete_with_http_info(category_id, **kwargs) # noqa: E501
return data
def categories_category_id_delete_with_http_info(self, category_id, **kwargs): # noqa: E501
"""Delete a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_delete_with_http_info(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_category_id_get(self, category_id, **kwargs): # noqa: E501
"""Get a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_get(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_get_with_http_info(category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_get_with_http_info(category_id, **kwargs) # noqa: E501
return data
def categories_category_id_get_with_http_info(self, category_id, **kwargs): # noqa: E501
"""Get a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_get_with_http_info(category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_category_id_put(self, body, category_id, **kwargs): # noqa: E501
"""Update a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_put(body, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_category_id_put_with_http_info(body, category_id, **kwargs) # noqa: E501
else:
(data) = self.categories_category_id_put_with_http_info(body, category_id, **kwargs) # noqa: E501
return data
def categories_category_id_put_with_http_info(self, body, category_id, **kwargs): # noqa: E501
"""Update a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_category_id_put_with_http_info(body, category_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:param int category_id: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'category_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_category_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `categories_category_id_put`") # noqa: E501
# verify the required parameter 'category_id' is set
if ('category_id' not in params or
params['category_id'] is None):
raise ValueError("Missing the required parameter `category_id` when calling `categories_category_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'category_id' in params:
path_params['categoryId'] = params['category_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories/{categoryId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_get(self, **kwargs): # noqa: E501
"""Get all categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: CategoryCollection
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.categories_get_with_http_info(**kwargs) # noqa: E501
return data
def categories_get_with_http_info(self, **kwargs): # noqa: E501
"""Get all categories # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page:
:param int length:
:param str sort:
:param str q:
:param str fields:
:param str _with:
:return: CategoryCollection
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'length', 'sort', 'q', 'fields', '_with'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'length' in params:
query_params.append(('length', params['length'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'q' in params:
query_params.append(('q', params['q'])) # noqa: E501
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if '_with' in params:
query_params.append(('with', params['_with'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryCollection', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def categories_post(self, body, **kwargs): # noqa: E501
"""Create a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_post(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.categories_post_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.categories_post_with_http_info(body, **kwargs) # noqa: E501
return data
def categories_post_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a category # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.categories_post_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CategoryPayload body: (required)
:return: CategoryRecord
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method categories_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `categories_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['BasicAuth', 'BearerToken'] # noqa: E501
return self.api_client.call_api(
'/categories', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CategoryRecord', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats) | 0.779364 | 0.05647 |
import pycaption
from django import http
from django.shortcuts import get_object_or_404
from airmozilla.closedcaptions.models import ClosedCaptions
class TxtWriter(pycaption.base.BaseWriter):
def write(self, caption_set):
lang = caption_set.get_languages()[0]
captions = caption_set.get_captions(lang)
output = 'Language: {}\n\n'.format(lang)
for caption in captions:
line = caption.get_text().replace('\n', ' ')
if line.startswith('- '):
output += '\n\n'
output += line + ' '
return output
SUPPORTED_WRITERS = {
'dfxp': pycaption.DFXPWriter,
'ttml': pycaption.DFXPWriter,
'sami': pycaption.SAMIWriter,
'srt': pycaption.SRTWriter,
'scc': pycaption.SCCWriter,
'webvtt': pycaption.WebVTTWriter,
'txt': TxtWriter,
}
FILE_EXTENSIONS = {
'dfxp': 'dfxp.xml',
'dfxp': 'dfxp',
'ttml': 'dfxp',
'sami': 'sami',
'srt': 'srt',
'scc': 'scc',
'webvtt': 'vtt',
'txt': 'txt',
}
CONTENT_TYPES = {
'txt': 'text/plain',
'sami': ' text/xml',
'dfxp': 'application/ttml+xml; charset=utf-8',
'vtt': 'text/vtt',
}
def download(request, filename_hash, id, slug, extension):
closedcaptions = get_object_or_404(
ClosedCaptions,
id=id,
event__slug__iexact=slug,
)
if extension not in FILE_EXTENSIONS.values():
raise http.Http404('Unrecognized extension')
if closedcaptions.filename_hash != filename_hash:
raise http.Http404('Unrecognized hash')
for key, ext in FILE_EXTENSIONS.items():
if ext == extension:
output_writer = SUPPORTED_WRITERS[key]
content = closedcaptions.file.read()
if not (
closedcaptions.file.name.lower().endswith('.ttml') or
closedcaptions.file.name.lower().endswith('.dfxp')
):
content = content.decode('utf-8')
reader = pycaption.detect_format(content)
assert reader
converter = pycaption.CaptionConverter()
converter.read(content, reader())
response = http.HttpResponse()
response['Content-Type'] = CONTENT_TYPES.get(extension, 'text/plain')
response.write(converter.write(output_writer()))
return response | airmozilla/closedcaptions/views.py | import pycaption
from django import http
from django.shortcuts import get_object_or_404
from airmozilla.closedcaptions.models import ClosedCaptions
class TxtWriter(pycaption.base.BaseWriter):
def write(self, caption_set):
lang = caption_set.get_languages()[0]
captions = caption_set.get_captions(lang)
output = 'Language: {}\n\n'.format(lang)
for caption in captions:
line = caption.get_text().replace('\n', ' ')
if line.startswith('- '):
output += '\n\n'
output += line + ' '
return output
SUPPORTED_WRITERS = {
'dfxp': pycaption.DFXPWriter,
'ttml': pycaption.DFXPWriter,
'sami': pycaption.SAMIWriter,
'srt': pycaption.SRTWriter,
'scc': pycaption.SCCWriter,
'webvtt': pycaption.WebVTTWriter,
'txt': TxtWriter,
}
FILE_EXTENSIONS = {
'dfxp': 'dfxp.xml',
'dfxp': 'dfxp',
'ttml': 'dfxp',
'sami': 'sami',
'srt': 'srt',
'scc': 'scc',
'webvtt': 'vtt',
'txt': 'txt',
}
CONTENT_TYPES = {
'txt': 'text/plain',
'sami': ' text/xml',
'dfxp': 'application/ttml+xml; charset=utf-8',
'vtt': 'text/vtt',
}
def download(request, filename_hash, id, slug, extension):
closedcaptions = get_object_or_404(
ClosedCaptions,
id=id,
event__slug__iexact=slug,
)
if extension not in FILE_EXTENSIONS.values():
raise http.Http404('Unrecognized extension')
if closedcaptions.filename_hash != filename_hash:
raise http.Http404('Unrecognized hash')
for key, ext in FILE_EXTENSIONS.items():
if ext == extension:
output_writer = SUPPORTED_WRITERS[key]
content = closedcaptions.file.read()
if not (
closedcaptions.file.name.lower().endswith('.ttml') or
closedcaptions.file.name.lower().endswith('.dfxp')
):
content = content.decode('utf-8')
reader = pycaption.detect_format(content)
assert reader
converter = pycaption.CaptionConverter()
converter.read(content, reader())
response = http.HttpResponse()
response['Content-Type'] = CONTENT_TYPES.get(extension, 'text/plain')
response.write(converter.write(output_writer()))
return response | 0.394784 | 0.093017 |
from bcc import BPF
from pykambpf import UpdatesBuffer, CallGraph, KambpfList
from random import shuffle
import subprocess
from os import getenv
from pathlib import Path
def reload_module():
subprocess.run([getenv("kambpf_reload"), "unload"])
subprocess.run([getenv("kamprobes_reload"), "unload"])
subprocess.run([getenv("kamprobes_reload"), "load"])
subprocess.run([getenv("kambpf_reload"), "load"])
PATH_TO_TEST_MODULE = str(Path(getenv("project_dir")) / "kernel_modules/test_module/build/test_module_main.ko")
dummy_name_pattern = "kambpf_test_dummy_{}"
prog_text = """
int test_fun(struct pt_regs *ctx) {
return 0;
}
"""
class DummyProbes():
def __init__(self, call_graph = None, updates_buffer = None, max_probes = 5000):
if call_graph == None:
call_graph = CallGraph()
call_graph.parse_module(PATH_TO_TEST_MODULE)
self.graph = call_graph
self.dummy_calls = DummyProbes.find_addresses(self.graph, max_probes)
self.b = BPF(text=prog_text)
self.b.load_funcs()
self.fd = self.b.funcs['test_fun'].fd
def find_addresses(call_graph, n):
results = []
for i in range(1, n):
results.extend(call_graph.calls_from_fun(dummy_name_pattern.format(i))[1:])
results = [tup[0] for tup in results]
return results
def set_kambpf_probes(self, n):
ub = UpdatesBuffer(n)
ub.add_probes([(addr, self.fd, -1) for addr in self.dummy_calls[:n]])
ub.close()
def clear_kambpf_probes(self, n):
listdev = KambpfList()
pos = listdev.get_non_empty_pos()
ub = UpdatesBuffer(len(pos))
ub.clear_probes(pos)
ub.close()
listdev.close()
def with_kambpf_probes(self, n, run_id, function):
reload_module()
self.set_kambpf_probes(n)
function("kambpfprobes", n, run_id)
self.clear_kambpf_probes(n)
def set_kprobes(self, n):
for addr in self.dummy_calls[:n]:
self.b.attach_kprobe(event=f"0x{addr:x}", fn_name="test_fun")
def clear_kprobes(self, n):
for addr in self.dummy_calls[:n]:
self.b.detach_kprobe(event=f"0x{addr:x}")
def with_kprobes(self, n, run_id, function):
self.set_kprobes(n)
function("kprobes", n, run_id)
self.clear_kprobes(n)
def reload_module(self):
reload_module()
def cleanupBPF(self):
self.b.cleanup()
def runner_pass_kambpfprobes(self, n, run_id, function):
reload_module()
function("kambpfprobes", n, run_id, lambda: self.set_kambpf_probes(n), lambda: self.clear_kambpf_probes(n))
def runner_pass_kprobes(self, n, run_id, function):
function("kprobes", n, run_id, lambda: self.set_kprobes(n), lambda: self.clear_kprobes(n))
def run_benchmarks_with_dummies(bench, step, max_probes, repetitions=1, pass_probes_to_bench=False):
"""
Run a benchmark with different number of dummy probes set on the test module.
The runs of the benchmark are shuffeled between the two mechanisms and repetitions
for the same number of dummy probes.
param bench: Benchmark funciton to run. Takes three arguments.
First argument is the probing mechanism which was used (one of kprobes, kambpfprobes).
Second argument is the number of probes set. Third argument repetition number for this config.
param step: Step increase in number of dummy probes.
param max_probes: Maximal number of dummy probes, inclusive.
param repetitions: How many times to run a benchmark with same number of dummy probes and same mech.
"""
dummies = DummyProbes()
if pass_probes_to_bench:
runners = [dummies.runner_pass_kambpfprobes, dummies.runner_pass_kprobes]
else:
runners = [dummies.with_kambpf_probes, dummies.with_kprobes]
for n_probes in range(0,max_probes+1, step):
runner_ids = [0,1]*repetitions
shuffle(runner_ids)
run_count = { 0 : 0, 1 : 0}
for r in runner_ids:
runner = runners[r]
runner(n_probes, run_count[r], bench)
run_count[r] += 1
dummies.cleanupBPF() | pykambpf/dummy_probes.py |
from bcc import BPF
from pykambpf import UpdatesBuffer, CallGraph, KambpfList
from random import shuffle
import subprocess
from os import getenv
from pathlib import Path
def reload_module():
subprocess.run([getenv("kambpf_reload"), "unload"])
subprocess.run([getenv("kamprobes_reload"), "unload"])
subprocess.run([getenv("kamprobes_reload"), "load"])
subprocess.run([getenv("kambpf_reload"), "load"])
PATH_TO_TEST_MODULE = str(Path(getenv("project_dir")) / "kernel_modules/test_module/build/test_module_main.ko")
dummy_name_pattern = "kambpf_test_dummy_{}"
prog_text = """
int test_fun(struct pt_regs *ctx) {
return 0;
}
"""
class DummyProbes():
def __init__(self, call_graph = None, updates_buffer = None, max_probes = 5000):
if call_graph == None:
call_graph = CallGraph()
call_graph.parse_module(PATH_TO_TEST_MODULE)
self.graph = call_graph
self.dummy_calls = DummyProbes.find_addresses(self.graph, max_probes)
self.b = BPF(text=prog_text)
self.b.load_funcs()
self.fd = self.b.funcs['test_fun'].fd
def find_addresses(call_graph, n):
results = []
for i in range(1, n):
results.extend(call_graph.calls_from_fun(dummy_name_pattern.format(i))[1:])
results = [tup[0] for tup in results]
return results
def set_kambpf_probes(self, n):
ub = UpdatesBuffer(n)
ub.add_probes([(addr, self.fd, -1) for addr in self.dummy_calls[:n]])
ub.close()
def clear_kambpf_probes(self, n):
listdev = KambpfList()
pos = listdev.get_non_empty_pos()
ub = UpdatesBuffer(len(pos))
ub.clear_probes(pos)
ub.close()
listdev.close()
def with_kambpf_probes(self, n, run_id, function):
reload_module()
self.set_kambpf_probes(n)
function("kambpfprobes", n, run_id)
self.clear_kambpf_probes(n)
def set_kprobes(self, n):
for addr in self.dummy_calls[:n]:
self.b.attach_kprobe(event=f"0x{addr:x}", fn_name="test_fun")
def clear_kprobes(self, n):
for addr in self.dummy_calls[:n]:
self.b.detach_kprobe(event=f"0x{addr:x}")
def with_kprobes(self, n, run_id, function):
self.set_kprobes(n)
function("kprobes", n, run_id)
self.clear_kprobes(n)
def reload_module(self):
reload_module()
def cleanupBPF(self):
self.b.cleanup()
def runner_pass_kambpfprobes(self, n, run_id, function):
reload_module()
function("kambpfprobes", n, run_id, lambda: self.set_kambpf_probes(n), lambda: self.clear_kambpf_probes(n))
def runner_pass_kprobes(self, n, run_id, function):
function("kprobes", n, run_id, lambda: self.set_kprobes(n), lambda: self.clear_kprobes(n))
def run_benchmarks_with_dummies(bench, step, max_probes, repetitions=1, pass_probes_to_bench=False):
"""
Run a benchmark with different number of dummy probes set on the test module.
The runs of the benchmark are shuffeled between the two mechanisms and repetitions
for the same number of dummy probes.
param bench: Benchmark funciton to run. Takes three arguments.
First argument is the probing mechanism which was used (one of kprobes, kambpfprobes).
Second argument is the number of probes set. Third argument repetition number for this config.
param step: Step increase in number of dummy probes.
param max_probes: Maximal number of dummy probes, inclusive.
param repetitions: How many times to run a benchmark with same number of dummy probes and same mech.
"""
dummies = DummyProbes()
if pass_probes_to_bench:
runners = [dummies.runner_pass_kambpfprobes, dummies.runner_pass_kprobes]
else:
runners = [dummies.with_kambpf_probes, dummies.with_kprobes]
for n_probes in range(0,max_probes+1, step):
runner_ids = [0,1]*repetitions
shuffle(runner_ids)
run_count = { 0 : 0, 1 : 0}
for r in runner_ids:
runner = runners[r]
runner(n_probes, run_count[r], bench)
run_count[r] += 1
dummies.cleanupBPF() | 0.435661 | 0.172346 |
load(
"//databricks/private/common:common.bzl",
"CHECK_CONFIG_FILE",
"CMD_INSTANCE_POOL_ID",
"CMD_INSTANCE_POOL_INFO",
"DATABRICKS_TOOLCHAIN",
)
load("//databricks/private/common:utils.bzl", "utils")
load("//databricks/private:providers/providers.bzl", "ConfigureInfo")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
def _impl(ctx):
properties = utils.toolchain_properties(ctx, DATABRICKS_TOOLCHAIN)
api_cmd = ctx.attr._command
cmd = []
configure = ctx.attr.configure
configure_info = configure[ConfigureInfo]
reader_config_file = ctx.attr._config_file_reader.files_to_run.executable.short_path
runfiles = ctx.attr._config_file_reader.files.to_list()
transitive_files = (
properties.toolchain_info_file_list +
properties.jq_info_file_list +
configure[DefaultInfo].default_runfiles.files.to_list()
)
variables = [
'CLI="%s"' % properties.cli,
'CMD="%s %s $@"' % (ctx.attr._api, api_cmd),
'export DATABRICKS_CONFIG_FILE="%s"' % configure_info.config_file,
'DEFAULT_OPTIONS="--profile %s"' % configure_info.profile,
'JQ_TOOL="%s"' % properties.jq_tool,
'READER_CONFIG_FILE="%s"' % reader_config_file,
]
substitutions_file = ctx.actions.declare_file(ctx.label.name + ".substitutions.json")
ctx.actions.expand_template(
template = ctx.file.template,
output = substitutions_file,
substitutions = ctx.attr.substitutions,
)
runfiles += [substitutions_file]
variables += [
"INSTANCE_POOL_JSONFILE_TEMPLATE=$(cat %s)" % substitutions_file.short_path,
]
substitutions_contitions = CHECK_CONFIG_FILE
if api_cmd in ["create", "edit"]:
cmd.append("exe ${CLI} ${CMD} ${DEFAULT_OPTIONS} --json-file %s" % (substitutions_file.short_path))
elif api_cmd in ["get"]:
substitutions_contitions += CMD_INSTANCE_POOL_INFO
cmd.append("exe ${JQ_TOOL} -M . <<< ${INSTANCE_POOL_INFO}")
elif api_cmd in ["delete"]:
substitutions_contitions += (CMD_INSTANCE_POOL_INFO + CMD_INSTANCE_POOL_ID)
cmd.append("exe ${CLI} ${CMD} ${DEFAULT_OPTIONS} --instance-pool-id ${INSTANCE_POOL_ID}")
else:
cmd.append('exe ${JQ_TOOL} -M . "%s"' % substitutions_file.short_path)
ctx.actions.expand_template(
is_executable = True,
output = ctx.outputs.executable,
template = ctx.file._resolve_tpl,
substitutions = {
"%{VARIABLES}": "\n".join(variables),
"%{CONDITIONS}": substitutions_contitions,
"%{CMD}": " && ".join(cmd),
},
)
return [
DefaultInfo(
runfiles = ctx.runfiles(
files = runfiles,
transitive_files = depset(transitive_files),
),
executable = ctx.outputs.executable,
),
]
_common_attr = {
"_api": attr.string(
default = "instance-pools",
),
"_config_file_reader": attr.label(
default = Label("//databricks/private/cmd/config_file_reader:main"),
executable = True,
cfg = "host",
),
"_resolve_tpl": attr.label(
default = utils.resolve_tpl,
allow_single_file = True,
),
"configure": attr.label(
mandatory = True,
providers = [ConfigureInfo],
),
"template": attr.label(
mandatory = True,
allow_single_file = [".json"],
),
"substitutions": attr.string_dict(),
}
_instance_pools_resolve = rule(
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
implementation = _impl,
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "resolve"),
},
),
)
_instance_pools_get = rule(
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
implementation = _impl,
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "get"),
},
),
)
_instance_pools_create = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "create"),
},
),
)
_instance_pools_edit = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "edit"),
},
),
)
_instance_pools_delete = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "delete"),
},
),
)
def instance_pools(name, **kwargs):
_instance_pools_resolve(name = name, **kwargs)
_instance_pools_resolve(name = name + ".resolve", **kwargs)
_instance_pools_get(name = name + ".get", **kwargs)
_instance_pools_create(name = name + ".create", **kwargs)
_instance_pools_edit(name = name + ".edit", **kwargs)
_instance_pools_delete(name = name + ".delete", **kwargs) | databricks/private/rules/instance_pools/main.bzl | load(
"//databricks/private/common:common.bzl",
"CHECK_CONFIG_FILE",
"CMD_INSTANCE_POOL_ID",
"CMD_INSTANCE_POOL_INFO",
"DATABRICKS_TOOLCHAIN",
)
load("//databricks/private/common:utils.bzl", "utils")
load("//databricks/private:providers/providers.bzl", "ConfigureInfo")
load("@bazel_skylib//lib:dicts.bzl", "dicts")
def _impl(ctx):
properties = utils.toolchain_properties(ctx, DATABRICKS_TOOLCHAIN)
api_cmd = ctx.attr._command
cmd = []
configure = ctx.attr.configure
configure_info = configure[ConfigureInfo]
reader_config_file = ctx.attr._config_file_reader.files_to_run.executable.short_path
runfiles = ctx.attr._config_file_reader.files.to_list()
transitive_files = (
properties.toolchain_info_file_list +
properties.jq_info_file_list +
configure[DefaultInfo].default_runfiles.files.to_list()
)
variables = [
'CLI="%s"' % properties.cli,
'CMD="%s %s $@"' % (ctx.attr._api, api_cmd),
'export DATABRICKS_CONFIG_FILE="%s"' % configure_info.config_file,
'DEFAULT_OPTIONS="--profile %s"' % configure_info.profile,
'JQ_TOOL="%s"' % properties.jq_tool,
'READER_CONFIG_FILE="%s"' % reader_config_file,
]
substitutions_file = ctx.actions.declare_file(ctx.label.name + ".substitutions.json")
ctx.actions.expand_template(
template = ctx.file.template,
output = substitutions_file,
substitutions = ctx.attr.substitutions,
)
runfiles += [substitutions_file]
variables += [
"INSTANCE_POOL_JSONFILE_TEMPLATE=$(cat %s)" % substitutions_file.short_path,
]
substitutions_contitions = CHECK_CONFIG_FILE
if api_cmd in ["create", "edit"]:
cmd.append("exe ${CLI} ${CMD} ${DEFAULT_OPTIONS} --json-file %s" % (substitutions_file.short_path))
elif api_cmd in ["get"]:
substitutions_contitions += CMD_INSTANCE_POOL_INFO
cmd.append("exe ${JQ_TOOL} -M . <<< ${INSTANCE_POOL_INFO}")
elif api_cmd in ["delete"]:
substitutions_contitions += (CMD_INSTANCE_POOL_INFO + CMD_INSTANCE_POOL_ID)
cmd.append("exe ${CLI} ${CMD} ${DEFAULT_OPTIONS} --instance-pool-id ${INSTANCE_POOL_ID}")
else:
cmd.append('exe ${JQ_TOOL} -M . "%s"' % substitutions_file.short_path)
ctx.actions.expand_template(
is_executable = True,
output = ctx.outputs.executable,
template = ctx.file._resolve_tpl,
substitutions = {
"%{VARIABLES}": "\n".join(variables),
"%{CONDITIONS}": substitutions_contitions,
"%{CMD}": " && ".join(cmd),
},
)
return [
DefaultInfo(
runfiles = ctx.runfiles(
files = runfiles,
transitive_files = depset(transitive_files),
),
executable = ctx.outputs.executable,
),
]
_common_attr = {
"_api": attr.string(
default = "instance-pools",
),
"_config_file_reader": attr.label(
default = Label("//databricks/private/cmd/config_file_reader:main"),
executable = True,
cfg = "host",
),
"_resolve_tpl": attr.label(
default = utils.resolve_tpl,
allow_single_file = True,
),
"configure": attr.label(
mandatory = True,
providers = [ConfigureInfo],
),
"template": attr.label(
mandatory = True,
allow_single_file = [".json"],
),
"substitutions": attr.string_dict(),
}
_instance_pools_resolve = rule(
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
implementation = _impl,
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "resolve"),
},
),
)
_instance_pools_get = rule(
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
implementation = _impl,
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "get"),
},
),
)
_instance_pools_create = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "create"),
},
),
)
_instance_pools_edit = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "edit"),
},
),
)
_instance_pools_delete = rule(
implementation = _impl,
executable = True,
toolchains = [DATABRICKS_TOOLCHAIN],
attrs = dicts.add(
_common_attr,
{
"_command": attr.string(default = "delete"),
},
),
)
def instance_pools(name, **kwargs):
_instance_pools_resolve(name = name, **kwargs)
_instance_pools_resolve(name = name + ".resolve", **kwargs)
_instance_pools_get(name = name + ".get", **kwargs)
_instance_pools_create(name = name + ".create", **kwargs)
_instance_pools_edit(name = name + ".edit", **kwargs)
_instance_pools_delete(name = name + ".delete", **kwargs) | 0.340266 | 0.107017 |
__all__ = [
'make_read_write_lock',
]
import threading
from g1.bases.assertions import ASSERT
def make_read_write_lock():
rwlock = ReadWriteLock()
return (
LockLike(rwlock.reader_acquire, rwlock.reader_release),
LockLike(rwlock.writer_acquire, rwlock.writer_release),
)
class LockLike:
def __init__(self, acquire, release):
self.acquire = acquire
self.release = release
def __enter__(self):
self.acquire()
def __exit__(self, *_):
self.release()
class ReadWriteLock:
"""Readers-writer lock.
The writer part of the lock is pretty much like an ordinary lock,
but the readers part of the lock, at the current implementation, is
somehow like a reentrant lock (the same thread may acquire a reader
lock multiple times).
NOTE: stdlib's Lock.acquire takes both blocking and timeout
arguments even though just timeout is sufficient in all use cases.
I think the blocking argument is there just to maintain backward
compatibility. stdlib's Lock.acquire's interface is complicated
because of this; so I would prefer omitting blocking argument,
breaking compatibility with stdlib.
"""
def __init__(self):
self._lock = threading.Lock()
self._reader_cond = threading.Condition(self._lock)
self._num_readers = 0
self._writer_cond = threading.Condition(self._lock)
self._num_writers = 0
def reader_acquire(self, *, timeout=None):
with self._lock:
if not self._reader_cond.wait_for(
lambda: self._num_writers == 0,
timeout=timeout,
):
return False
self._num_readers += 1
return True
def reader_release(self):
with self._lock:
ASSERT.greater(self._num_readers, 0)
ASSERT.equal(self._num_writers, 0)
self._num_readers -= 1
if self._num_readers == 0:
self._writer_cond.notify()
def writer_acquire(self, *, timeout=None):
with self._lock:
if not self._writer_cond.wait_for(
lambda: self._num_readers == 0 and self._num_writers == 0,
timeout=timeout,
):
return False
self._num_writers += 1
return True
def writer_release(self):
with self._lock:
ASSERT.equal(self._num_readers, 0)
ASSERT.equal(self._num_writers, 1)
self._num_writers = 0
self._reader_cond.notify_all()
self._writer_cond.notify() | py/g1/threads/g1/threads/locks.py | __all__ = [
'make_read_write_lock',
]
import threading
from g1.bases.assertions import ASSERT
def make_read_write_lock():
rwlock = ReadWriteLock()
return (
LockLike(rwlock.reader_acquire, rwlock.reader_release),
LockLike(rwlock.writer_acquire, rwlock.writer_release),
)
class LockLike:
def __init__(self, acquire, release):
self.acquire = acquire
self.release = release
def __enter__(self):
self.acquire()
def __exit__(self, *_):
self.release()
class ReadWriteLock:
"""Readers-writer lock.
The writer part of the lock is pretty much like an ordinary lock,
but the readers part of the lock, at the current implementation, is
somehow like a reentrant lock (the same thread may acquire a reader
lock multiple times).
NOTE: stdlib's Lock.acquire takes both blocking and timeout
arguments even though just timeout is sufficient in all use cases.
I think the blocking argument is there just to maintain backward
compatibility. stdlib's Lock.acquire's interface is complicated
because of this; so I would prefer omitting blocking argument,
breaking compatibility with stdlib.
"""
def __init__(self):
self._lock = threading.Lock()
self._reader_cond = threading.Condition(self._lock)
self._num_readers = 0
self._writer_cond = threading.Condition(self._lock)
self._num_writers = 0
def reader_acquire(self, *, timeout=None):
with self._lock:
if not self._reader_cond.wait_for(
lambda: self._num_writers == 0,
timeout=timeout,
):
return False
self._num_readers += 1
return True
def reader_release(self):
with self._lock:
ASSERT.greater(self._num_readers, 0)
ASSERT.equal(self._num_writers, 0)
self._num_readers -= 1
if self._num_readers == 0:
self._writer_cond.notify()
def writer_acquire(self, *, timeout=None):
with self._lock:
if not self._writer_cond.wait_for(
lambda: self._num_readers == 0 and self._num_writers == 0,
timeout=timeout,
):
return False
self._num_writers += 1
return True
def writer_release(self):
with self._lock:
ASSERT.equal(self._num_readers, 0)
ASSERT.equal(self._num_writers, 1)
self._num_writers = 0
self._reader_cond.notify_all()
self._writer_cond.notify() | 0.520496 | 0.174762 |
from random import randint
from getpass import getpass
import string
# this generates the dictionary that the cypher uses
allprint = dict((k,v) for v,k in enumerate(string.printable))
# remove the useless characters
del allprint['\t']
del allprint['\n']
del allprint['\r']
del allprint['\x0c']
del allprint['\x0b']
# construct the reverse dictionary from the allprint one
translate_dict = dict([(j,i) for i,j in allprint.items()])
# make random keys for the encrpytion that can be used later (sent to the recipient etc.)
shiftvalue_1 = randint(-93,93)
shiftvalue_2 = randint(-93,93)
while shiftvalue_1 == shiftvalue_2:
shiftvalue_2 = randint(-93,93)
shiftvalue_3 = randint(-93,93)
while shiftvalue_2 == shiftvalue_3:
shiftvalue_3 = randint(-93,93)
# The function for encrypting:
def encrypt(message, shift1, shift2, shift3):
message_cyphertext_values = [] # initalise the list for the message keys
for letter in list(message): # extract the letters from the message
message_cyphertext_values.append(allprint[letter]) # populate the cyphertest values
# index the cyphertext for ease
message_windex = zip(range(len(message_cyphertext_values)),list(message_cyphertext_values))
cyphertext = [] # initilise the cypertext list for the text
# actually encrypt the message
for index,value in message_windex:
if index in range(len(message))[0::3]:
j = (value + shift1) % 95
cyphertext.append(translate_dict[j])
elif index in range(len(message))[1::3]:
j = (value + shift2) % 95
cyphertext.append(translate_dict[j])
elif index in range(len(message))[2::3]:
j = (value + shift3) % 95
cyphertext.append(translate_dict[j])
return cyphertext
def decrypt(message, shift1, shift2, shift3):
message_decyphertext_values = [] # initalise the list for the message keys
for letter in list(message): # extract the letters from the message
message_decyphertext_values.append(allprint[letter]) # populate the cyphertest values
# index the decyphertext for ease
message_windex = zip(range(len(message_decyphertext_values)),list(message_decyphertext_values))
decyphertext = [] # initilise the decypertext list for the text
# decrypt the message
for index,value in message_windex:
if index in range(len(message))[0::3]:
j = (value - shift1) % 95
decyphertext.append(translate_dict[j])
elif index in range(len(message))[1::3]:
j = (value - shift2) % 95
decyphertext.append(translate_dict[j])
elif index in range(len(message))[2::3]:
j = (value - shift3) % 95
decyphertext.append(translate_dict[j])
return decyphertext
# the message variable is assigned by user input which is hidden by getpass
message = str(getpass("Enter the message to be encrypted: "))
encoded_message = encrypt(message,shiftvalue_1,shiftvalue_2,shiftvalue_3)
print("encypted message: %s" % ''.join(encoded_message))
decrypt_q = input("decrypt? (y/n) ")
if decrypt_q == 'y':
decoded_message = decrypt(encoded_message,shiftvalue_1,shiftvalue_2,shiftvalue_3)
print("message was: ")
print(''.join(decoded_message))
message = str(getpass("Enter the message to be encrypted: "))
encoded_message = encrypt(message,1,2,3)
print("encypted message: %s" % ''.join(encoded_message))
decoded_message = decrypt(encoded_message,1,2,3)
print("decrypted message: %s" % ''.join(decoded_message)) | encryption_1.py | from random import randint
from getpass import getpass
import string
# this generates the dictionary that the cypher uses
allprint = dict((k,v) for v,k in enumerate(string.printable))
# remove the useless characters
del allprint['\t']
del allprint['\n']
del allprint['\r']
del allprint['\x0c']
del allprint['\x0b']
# construct the reverse dictionary from the allprint one
translate_dict = dict([(j,i) for i,j in allprint.items()])
# make random keys for the encrpytion that can be used later (sent to the recipient etc.)
shiftvalue_1 = randint(-93,93)
shiftvalue_2 = randint(-93,93)
while shiftvalue_1 == shiftvalue_2:
shiftvalue_2 = randint(-93,93)
shiftvalue_3 = randint(-93,93)
while shiftvalue_2 == shiftvalue_3:
shiftvalue_3 = randint(-93,93)
# The function for encrypting:
def encrypt(message, shift1, shift2, shift3):
message_cyphertext_values = [] # initalise the list for the message keys
for letter in list(message): # extract the letters from the message
message_cyphertext_values.append(allprint[letter]) # populate the cyphertest values
# index the cyphertext for ease
message_windex = zip(range(len(message_cyphertext_values)),list(message_cyphertext_values))
cyphertext = [] # initilise the cypertext list for the text
# actually encrypt the message
for index,value in message_windex:
if index in range(len(message))[0::3]:
j = (value + shift1) % 95
cyphertext.append(translate_dict[j])
elif index in range(len(message))[1::3]:
j = (value + shift2) % 95
cyphertext.append(translate_dict[j])
elif index in range(len(message))[2::3]:
j = (value + shift3) % 95
cyphertext.append(translate_dict[j])
return cyphertext
def decrypt(message, shift1, shift2, shift3):
message_decyphertext_values = [] # initalise the list for the message keys
for letter in list(message): # extract the letters from the message
message_decyphertext_values.append(allprint[letter]) # populate the cyphertest values
# index the decyphertext for ease
message_windex = zip(range(len(message_decyphertext_values)),list(message_decyphertext_values))
decyphertext = [] # initilise the decypertext list for the text
# decrypt the message
for index,value in message_windex:
if index in range(len(message))[0::3]:
j = (value - shift1) % 95
decyphertext.append(translate_dict[j])
elif index in range(len(message))[1::3]:
j = (value - shift2) % 95
decyphertext.append(translate_dict[j])
elif index in range(len(message))[2::3]:
j = (value - shift3) % 95
decyphertext.append(translate_dict[j])
return decyphertext
# the message variable is assigned by user input which is hidden by getpass
message = str(getpass("Enter the message to be encrypted: "))
encoded_message = encrypt(message,shiftvalue_1,shiftvalue_2,shiftvalue_3)
print("encypted message: %s" % ''.join(encoded_message))
decrypt_q = input("decrypt? (y/n) ")
if decrypt_q == 'y':
decoded_message = decrypt(encoded_message,shiftvalue_1,shiftvalue_2,shiftvalue_3)
print("message was: ")
print(''.join(decoded_message))
message = str(getpass("Enter the message to be encrypted: "))
encoded_message = encrypt(message,1,2,3)
print("encypted message: %s" % ''.join(encoded_message))
decoded_message = decrypt(encoded_message,1,2,3)
print("decrypted message: %s" % ''.join(decoded_message)) | 0.174903 | 0.169406 |
import sys
import jax
import jax.numpy as jnp
import chex
import numpy as np
import haiku as hk
from absl.testing import absltest
import rljaxff.alpha_zero.network as net_lib
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=protected-access
def _sample_input(input_shape):
return jnp.zeros((1,) + input_shape, dtype=jnp.float32)
class AZNetworkTest(absltest.TestCase):
def setUp(self):
self.input_size = (6, )
self.num_actions = 3
self.hidden_sizes = [5, 4]
self.seed = 0
random_state = np.random.RandomState(self.seed)
self.rng_key = jax.random.PRNGKey(
random_state.randint(
-sys.maxsize - 1, sys.maxsize + 1, dtype=np.int64
)
)
network_fn = net_lib.alphazero_network(
self.hidden_sizes, self.num_actions
)
self.network = hk.transform(network_fn)
def test_init(self):
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
self.assertLen(params, 2*len(self.hidden_sizes))
def test_apply(self):
inputs = _sample_input(self.input_size)
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
output = self.network.apply(params, self.rng_key, inputs)
chex.assert_shape(output.value[0], (1, ))
chex.assert_shape(output.action_probs[0], (self.num_actions, ))
def test_apply_batch(self):
batch_size = 8
inputs = jnp.zeros((batch_size,) + self.input_size, dtype=jnp.float32)
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
outputs = self.network.apply(params, self.rng_key, inputs)
chex.assert_shape(outputs.value, (batch_size, 1))
chex.assert_shape(outputs.action_probs, (batch_size, self.num_actions))
if __name__ == "__main__":
absltest.main() | rljaxff/alpha_zero/network_test.py | import sys
import jax
import jax.numpy as jnp
import chex
import numpy as np
import haiku as hk
from absl.testing import absltest
import rljaxff.alpha_zero.network as net_lib
# pylint: disable=missing-class-docstring
# pylint: disable=missing-function-docstring
# pylint: disable=protected-access
def _sample_input(input_shape):
return jnp.zeros((1,) + input_shape, dtype=jnp.float32)
class AZNetworkTest(absltest.TestCase):
def setUp(self):
self.input_size = (6, )
self.num_actions = 3
self.hidden_sizes = [5, 4]
self.seed = 0
random_state = np.random.RandomState(self.seed)
self.rng_key = jax.random.PRNGKey(
random_state.randint(
-sys.maxsize - 1, sys.maxsize + 1, dtype=np.int64
)
)
network_fn = net_lib.alphazero_network(
self.hidden_sizes, self.num_actions
)
self.network = hk.transform(network_fn)
def test_init(self):
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
self.assertLen(params, 2*len(self.hidden_sizes))
def test_apply(self):
inputs = _sample_input(self.input_size)
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
output = self.network.apply(params, self.rng_key, inputs)
chex.assert_shape(output.value[0], (1, ))
chex.assert_shape(output.action_probs[0], (self.num_actions, ))
def test_apply_batch(self):
batch_size = 8
inputs = jnp.zeros((batch_size,) + self.input_size, dtype=jnp.float32)
params = self.network.init(
self.rng_key, _sample_input(self.input_size)
)
outputs = self.network.apply(params, self.rng_key, inputs)
chex.assert_shape(outputs.value, (batch_size, 1))
chex.assert_shape(outputs.action_probs, (batch_size, self.num_actions))
if __name__ == "__main__":
absltest.main() | 0.359252 | 0.31785 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from typing import Any
from rasa_nlu.extractors import EntityExtractor
from rasa_nlu.training_data import Message
class OrderCodeExtractor(EntityExtractor):
name = "ner_order_code"
provides = ["entities"]
requires = ["spacy_nlp"]
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
text = message.text
if "order_code" in message.get("intent").get("name"):
for entity_mapper in message.get("entities"):
if entity_mapper["entity"] == "order_code":
return
order_code = ""
if len(text.split()) == 1:
order_code = text
else:
spacy_nlp = kwargs.get("spacy_nlp", None)
doc = spacy_nlp(text)
for token in doc:
deps = list(token.lefts)
additional_txt = ""
# Get external punctuation
if len(deps) > 0 and not deps[0].text.isalnum():
additional_txt = deps[0].text
if token.pos_ in ["NUM", "PROPN"]:
order_code = additional_txt + token.text
break
if token.head.pos_ == "VERB" and token.dep_ == "attr":
order_code = additional_txt + token.text
break
if order_code != "":
start_idx = text.find(order_code)
end_idx = start_idx + len(order_code)
entity = {
"entity": "order_code",
"value": order_code,
"start": start_idx,
"end": end_idx,
"confidence": 1.0,
"extractor": self.name,
}
message.set("entities",
message.get("entities", []) + [entity],
add_to_output=True)
# Rule-based on single word and is alpha-number
if len(text.split()) == 1 and not text.isalpha() and not text.isdigit() and text.isalnum() and '@' not in text:
entity = {
"entity": "order_code",
"value": text,
"start": 1,
"end": len(text) - 1,
"confidence": 0.7,
"extractor": self.name,
}
message.set("entities",
message.get("entities", []) + [entity],
add_to_output=True) | rasa_nlu/extractors/order_code_extractor.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from typing import Any
from rasa_nlu.extractors import EntityExtractor
from rasa_nlu.training_data import Message
class OrderCodeExtractor(EntityExtractor):
name = "ner_order_code"
provides = ["entities"]
requires = ["spacy_nlp"]
def process(self, message, **kwargs):
# type: (Message, **Any) -> None
text = message.text
if "order_code" in message.get("intent").get("name"):
for entity_mapper in message.get("entities"):
if entity_mapper["entity"] == "order_code":
return
order_code = ""
if len(text.split()) == 1:
order_code = text
else:
spacy_nlp = kwargs.get("spacy_nlp", None)
doc = spacy_nlp(text)
for token in doc:
deps = list(token.lefts)
additional_txt = ""
# Get external punctuation
if len(deps) > 0 and not deps[0].text.isalnum():
additional_txt = deps[0].text
if token.pos_ in ["NUM", "PROPN"]:
order_code = additional_txt + token.text
break
if token.head.pos_ == "VERB" and token.dep_ == "attr":
order_code = additional_txt + token.text
break
if order_code != "":
start_idx = text.find(order_code)
end_idx = start_idx + len(order_code)
entity = {
"entity": "order_code",
"value": order_code,
"start": start_idx,
"end": end_idx,
"confidence": 1.0,
"extractor": self.name,
}
message.set("entities",
message.get("entities", []) + [entity],
add_to_output=True)
# Rule-based on single word and is alpha-number
if len(text.split()) == 1 and not text.isalpha() and not text.isdigit() and text.isalnum() and '@' not in text:
entity = {
"entity": "order_code",
"value": text,
"start": 1,
"end": len(text) - 1,
"confidence": 0.7,
"extractor": self.name,
}
message.set("entities",
message.get("entities", []) + [entity],
add_to_output=True) | 0.591133 | 0.162912 |
from beem.utils import formatTimeString, resolve_authorperm, construct_authorperm, addTzInfo
from beem.nodelist import NodeList
from beem.comment import Comment
from beem import Steem
from beem.account import Account
from beem.instance import set_shared_steem_instance
from beem.blockchain import Blockchain
import time
import json
import os
import math
import dataset
import random
from datetime import date, datetime, timedelta
from dateutil.parser import parse
from beem.constants import STEEM_100_PERCENT
from steemrewarding.post_storage import PostsTrx
from steemrewarding.command_storage import CommandsTrx
from steemrewarding.vote_rule_storage import VoteRulesTrx
from steemrewarding.pending_vote_storage import PendingVotesTrx
from steemrewarding.config_storage import ConfigurationDB
from steemrewarding.vote_storage import VotesTrx
from steemrewarding.vote_log_storage import VoteLogTrx
from steemrewarding.failed_vote_log_storage import FailedVoteLogTrx
from steemrewarding.broadcast_vote_storage import BroadcastVoteTrx
from steemrewarding.utils import isfloat, upvote_comment, valid_age, upvote_comment_without_check
from steemrewarding.version import version as rewardingversion
from steemrewarding.account_storage import AccountsDB
from steemrewarding.version import version as rewarding_version
import dataset
if __name__ == "__main__":
config_file = 'config.json'
if not os.path.isfile(config_file):
raise Exception("config.json is missing!")
else:
with open(config_file) as json_data_file:
config_data = json.load(json_data_file)
# print(config_data)
databaseConnector = config_data["databaseConnector"]
wallet_password = config_data["wallet_password"]
posting_auth_acc = config_data["posting_auth_acc"]
voting_round_sec = config_data["voting_round_sec"]
start_prep_time = time.time()
db = dataset.connect(databaseConnector)
# Create keyStorage
nobroadcast = False
# nobroadcast = True
postTrx = PostsTrx(db)
voteRulesTrx = VoteRulesTrx(db)
confStorage = ConfigurationDB(db)
pendingVotesTrx = PendingVotesTrx(db)
voteLogTrx = VoteLogTrx(db)
failedVoteLogTrx = FailedVoteLogTrx(db)
accountsTrx = AccountsDB(db)
broadcastVoteTrx = BroadcastVoteTrx(db)
conf_setup = confStorage.get()
# last_post_block = conf_setup["last_post_block"]
nodes = NodeList()
# nodes.update_nodes(weights={"block": 1})
try:
nodes.update_nodes()
except:
print("could not update nodes")
node_list = nodes.get_nodes(exclude_limited=False)
stm = Steem(node=node_list, num_retries=5, call_num_retries=3, timeout=15, nobroadcast=nobroadcast)
stm.wallet.unlock(wallet_password)
print("Use node %s" % str(stm))
last_voter = None
for vote in broadcastVoteTrx.get_all_expired():
if last_voter is not None and last_voter == vote["voter"]:
print("Skip %s for this round" % vote["voter"])
continue
voter_acc = Account(vote["voter"], steem_instance=stm)
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
print("%s has not sufficient RC" % vote["voter"])
last_voter = vote["voter"]
continue
if vote["retry_count"] >= 5:
broadcastVoteTrx.update_processed(vote["voter"], vote["authorperm"], None, False, True)
continue
if vote["expiration"] is not None and vote["expiration"] < datetime.utcnow():
continue
if vote["weight"] < 0.01:
continue
try:
print("voter %s votes %s" % (vote["voter"], vote["authorperm"]))
stm.vote(vote["weight"], vote["authorperm"], vote["voter"])
except Exception as e:
print("Vote failed: %s" % str(e))
last_voter = vote["voter"]
broadcastVoteTrx.update({"voter": vote["voter"], "authorperm": vote["authorperm"], "retry_count": vote["retry_count"] + 1})
print("Start apply new votes")
vote_count = 0
delete_pending_votes = []
for pending_vote in pendingVotesTrx.get_command_list_timed():
settings = None
# print("time vote %.2f s - %d votes" % (time.time() - start_prep_time, vote_count))
if (pending_vote["vote_weight"] is None or pending_vote["vote_weight"] <= 0) and (pending_vote["vote_sbd"] is None or float(pending_vote["vote_sbd"]) <= 0):
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero. (%s %% and %s $)" % (pending_vote["vote_weight"], pending_vote["vote_sbd"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = (datetime.utcnow() - pending_vote["comment_timestamp"]).total_seconds() / 60
maximum_vote_delay_min = pending_vote["maximum_vote_delay_min"]
if maximum_vote_delay_min < 0:
maximum_vote_delay_min = 9360
if age_min > maximum_vote_delay_min + voting_round_sec / 60:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than %.2f min." % (maximum_vote_delay_min),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
if voter_acc.sp < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as Steem Power is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as RC is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
vote_weight = pending_vote["vote_weight"]
if vote_weight is None or vote_weight <= 0:
vote_weight = voter_acc.get_vote_pct_for_SBD(float(pending_vote["vote_sbd"])) / 100.
if vote_weight > 100:
vote_weight = 100
elif vote_weight < 0.01:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_hour = ((datetime.utcnow()) - pending_vote["created"]).total_seconds() / 60 / 60
if age_hour > 156:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than 6.5 days.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = ((datetime.utcnow()) - pending_vote["created"]).total_seconds() / 60
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
try:
c = Comment(pending_vote["authorperm"], use_tags_api=True, steem_instance=stm)
except:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not process %s" % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if pending_vote["max_net_votes"] >= 0 and pending_vote["max_net_votes"] < c["net_votes"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The number of post/comment votes (%d) is higher than max_net_votes (%d)." % (c["net_votes"], pending_vote["max_net_votes"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_pending_payout"] >= 0 and pending_vote["max_pending_payout"] < float(c["pending_payout_value"]):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The pending payout of post/comment votes (%.2f) is higher than max_pending_payout (%.2f)." % (float(c["pending_payout_value"]), pending_vote["max_pending_payout"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# check for max votes per day/week
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_day"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_24h_before = voteLogTrx.get_votes_per_day(pending_vote["voter"], author, sliding_time_window)
if votes_24h_before >= pending_vote["max_votes_per_day"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 24h (max_votes_per_day is %d)." % (votes_24h_before, pending_vote["max_votes_per_day"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_week"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_168h_before = voteLogTrx.get_votes_per_week(pending_vote["voter"], author, sliding_time_window)
if votes_168h_before >= pending_vote["max_votes_per_week"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 7 days (max_votes_per_week is %d)." % (votes_168h_before, pending_vote["max_votes_per_week"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if voter_acc.vp < pending_vote["min_vp"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Voting power is %.2f %%, which is to low. (min_vp is %.2f %%)" % (voter_acc.vp, pending_vote["min_vp"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
pause_votes_below_vp = settings["pause_votes_below_vp"]
if settings["vp"] is None:
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
else:
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
pause_votes_below_vp = 0
if voter_acc.vp < pause_votes_below_vp:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Voting is paused (VP = %.2f %%, which below pause_votes_below_vp of %.2f %%)" % (voter_acc.vp, pause_votes_below_vp),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
posting_auth = False
for a in voter_acc["posting"]["account_auths"]:
if a[0] == posting_auth_acc:
posting_auth = True
if voter_acc["name"] == posting_auth_acc:
posting_auth = True
already_voted = False
for v in c["active_votes"]:
if voter_acc["name"] == v["voter"]:
already_voted = True
if not posting_auth or already_voted:
if already_voted:
error_msg = "already voted."
else:
error_msg = "posting authority is missing"
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["vp_scaler"] > 0:
vote_weight *= 1 - ((100 - voter_acc.vp) / 100 * pending_vote["vp_scaler"])
if vote_weight <= 0:
error_msg = "Vote weight is zero or below zero (%.2f %%)" % vote_weight
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# sucess = upvote_comment(c, voter_acc["name"], vote_weight)
reply_message = upvote_comment_without_check(c, voter_acc["name"], vote_weight)
if reply_message is not None:
vote_count += 1
if pending_vote["leave_comment"]:
try:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None and "upvote_comment" in settings and settings["upvote_comment"] is not None:
json_metadata = {'app': 'rewarding/%s' % (rewarding_version)}
reply_body = settings["upvote_comment"]
reply_body = reply_body.replace("{{name}}", "@%s" % c["author"] ).replace("{{voter}}", "@%s" % voter_acc["name"])
c.reply(reply_body, author=voter_acc["name"], meta=json_metadata)
except:
print("Could not leave comment!")
voteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "author": c["author"],
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"voted_after_min": age_min, "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"trail_vote": pending_vote["trail_vote"], "main_post": pending_vote["main_post"],
"voter_to_follow": pending_vote["voter_to_follow"]})
broadcastVoteTrx.add({"expiration": formatTimeString(reply_message["expiration"]).replace(tzinfo=None), "authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
else:
broadcastVoteTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight, "valid": True})
for pending_vote in delete_pending_votes:
pendingVotesTrx.delete(pending_vote["authorperm"], pending_vote["voter"], pending_vote["vote_when_vp_reached"])
delete_pending_votes = []
print("time vote %.2f s - %d votes" % (time.time() - start_prep_time, vote_count))
votes_above_vp = 0
votes_below_vp = 0
for pending_vote in pendingVotesTrx.get_command_list_vp_reached():
settings = None
if (pending_vote["vote_weight"] is None or pending_vote["vote_weight"] <= 0) and (pending_vote["vote_sbd"] is None or float(pending_vote["vote_sbd"]) <= 0):
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": 0, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = (datetime.utcnow() - pending_vote["comment_timestamp"]).total_seconds() / 60
maximum_vote_delay_min = pending_vote["maximum_vote_delay_min"]
if maximum_vote_delay_min > 0 and age_min > maximum_vote_delay_min + voting_round_sec / 60:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than %.2f min." % (maximum_vote_delay_min),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
settings = accountsTrx.get(pending_vote["voter"])
if settings is None:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
pause_votes_below_vp = 0
vp = voter_acc.vp
else:
pause_votes_below_vp = settings["pause_votes_below_vp"]
vp = settings["vp"]
vp_update = settings["vp_update"]
if vp_update is not None:
diff_in_seconds = ((datetime.utcnow()) - (vp_update)).total_seconds()
if diff_in_seconds < 3600:
regenerated_vp = diff_in_seconds * 10000 / 432000 / 100
if vp + regenerated_vp < pending_vote["min_vp"]:
votes_below_vp += 1
continue
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
if voter_acc.sp < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as Steem Power is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as RC is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.vp < pending_vote["min_vp"]:
votes_below_vp += 1
continue
votes_above_vp += 1
print("Votes above min_vp %d / below %d" % (votes_above_vp, votes_below_vp))
if voter_acc.vp < pause_votes_below_vp:
continue
vote_weight = pending_vote["vote_weight"]
if vote_weight <= 0:
vote_weight = voter_acc.get_vote_pct_for_SBD(float(pending_vote["vote_sbd"])) / 100.
if vote_weight > 100:
vote_weight = 100
elif vote_weight < 0.01:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
try:
c = Comment(pending_vote["authorperm"], steem_instance=stm)
except:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not process %s" % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if not valid_age(c):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than 6.5 days.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_net_votes"] >= 0 and pending_vote["max_net_votes"] < c["net_votes"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The number of post/comment votes (%d) is higher than max_net_votes (%d)." % (c["net_votes"], pending_vote["max_net_votes"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_pending_payout"] >= 0 and pending_vote["max_pending_payout"] < float(c["pending_payout_value"]):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The pending payout of post/comment votes (%.2f) is higher than max_pending_payout (%.2f)." % (float(c["pending_payout_value"]), pending_vote["max_pending_payout"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_day"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_24h_before = voteLogTrx.get_votes_per_day(pending_vote["voter"], author, sliding_time_window)
if votes_24h_before >= pending_vote["max_votes_per_day"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 24h (max_votes_per_day is %d)." % (votes_24h_before, pending_vote["max_votes_per_day"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_week"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_168h_before = voteLogTrx.get_votes_per_week(pending_vote["voter"], author, sliding_time_window)
if votes_168h_before >= pending_vote["max_votes_per_week"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 7 days (max_votes_per_week is %d)." % (votes_168h_before, pending_vote["max_votes_per_week"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
posting_auth = False
for a in voter_acc["posting"]["account_auths"]:
if a[0] == posting_auth_acc:
posting_auth = True
if voter_acc["name"] == posting_auth_acc:
posting_auth = True
already_voted = False
for v in c["active_votes"]:
if voter_acc["name"] == v["voter"]:
already_voted = True
if not posting_auth or already_voted:
if already_voted:
error_msg = "already voted."
else:
error_msg = "posting authority is missing"
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"] })
continue
if pending_vote["vp_scaler"] > 0:
vote_weight *= 1 - ((100 - voter_acc.vp) / 100 * pending_vote["vp_scaler"])
if vote_weight <= 0:
error_msg = "Vote weight is zero or below zero (%.2f %%)" % vote_weight
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# sucess = upvote_comment(c, voter_acc["name"], vote_weight)
reply_message = upvote_comment_without_check(c, voter_acc["name"], vote_weight)
if reply_message is not None:
vote_count += 1
if pending_vote["leave_comment"]:
try:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None and "upvote_comment" in settings and settings["upvote_comment"] is not None:
json_metadata = {'app': 'rewarding/%s' % (rewarding_version)}
reply_body = settings["upvote_comment"]
reply_body = reply_body.replace("{{name}}", "@%s" % c["author"] ).replace("{{voter}}", "@%s" % voter_acc["name"])
c.reply(reply_body, author=voter_acc["name"], meta=json_metadata)
except:
print("Could not leave comment!")
# add vote to log
voteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "author": c["author"],
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"voted_after_min": age_min, "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"trail_vote": pending_vote["trail_vote"], "main_post": pending_vote["main_post"],
"voter_to_follow": pending_vote["voter_to_follow"], "is_pending": True})
broadcastVoteTrx.add({"expiration": formatTimeString(reply_message["expiration"]).replace(tzinfo=None), "authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
else:
broadcastVoteTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight, "valid": True})
continue
for pending_vote in delete_pending_votes:
pendingVotesTrx.delete(pending_vote["authorperm"], pending_vote["voter"], pending_vote["vote_when_vp_reached"])
delete_pending_votes = []
print("upvote posts script run %.2f s - %d votes were broadcasted" % (time.time() - start_prep_time, vote_count)) | upvote_post_comments.py | from beem.utils import formatTimeString, resolve_authorperm, construct_authorperm, addTzInfo
from beem.nodelist import NodeList
from beem.comment import Comment
from beem import Steem
from beem.account import Account
from beem.instance import set_shared_steem_instance
from beem.blockchain import Blockchain
import time
import json
import os
import math
import dataset
import random
from datetime import date, datetime, timedelta
from dateutil.parser import parse
from beem.constants import STEEM_100_PERCENT
from steemrewarding.post_storage import PostsTrx
from steemrewarding.command_storage import CommandsTrx
from steemrewarding.vote_rule_storage import VoteRulesTrx
from steemrewarding.pending_vote_storage import PendingVotesTrx
from steemrewarding.config_storage import ConfigurationDB
from steemrewarding.vote_storage import VotesTrx
from steemrewarding.vote_log_storage import VoteLogTrx
from steemrewarding.failed_vote_log_storage import FailedVoteLogTrx
from steemrewarding.broadcast_vote_storage import BroadcastVoteTrx
from steemrewarding.utils import isfloat, upvote_comment, valid_age, upvote_comment_without_check
from steemrewarding.version import version as rewardingversion
from steemrewarding.account_storage import AccountsDB
from steemrewarding.version import version as rewarding_version
import dataset
if __name__ == "__main__":
config_file = 'config.json'
if not os.path.isfile(config_file):
raise Exception("config.json is missing!")
else:
with open(config_file) as json_data_file:
config_data = json.load(json_data_file)
# print(config_data)
databaseConnector = config_data["databaseConnector"]
wallet_password = config_data["wallet_password"]
posting_auth_acc = config_data["posting_auth_acc"]
voting_round_sec = config_data["voting_round_sec"]
start_prep_time = time.time()
db = dataset.connect(databaseConnector)
# Create keyStorage
nobroadcast = False
# nobroadcast = True
postTrx = PostsTrx(db)
voteRulesTrx = VoteRulesTrx(db)
confStorage = ConfigurationDB(db)
pendingVotesTrx = PendingVotesTrx(db)
voteLogTrx = VoteLogTrx(db)
failedVoteLogTrx = FailedVoteLogTrx(db)
accountsTrx = AccountsDB(db)
broadcastVoteTrx = BroadcastVoteTrx(db)
conf_setup = confStorage.get()
# last_post_block = conf_setup["last_post_block"]
nodes = NodeList()
# nodes.update_nodes(weights={"block": 1})
try:
nodes.update_nodes()
except:
print("could not update nodes")
node_list = nodes.get_nodes(exclude_limited=False)
stm = Steem(node=node_list, num_retries=5, call_num_retries=3, timeout=15, nobroadcast=nobroadcast)
stm.wallet.unlock(wallet_password)
print("Use node %s" % str(stm))
last_voter = None
for vote in broadcastVoteTrx.get_all_expired():
if last_voter is not None and last_voter == vote["voter"]:
print("Skip %s for this round" % vote["voter"])
continue
voter_acc = Account(vote["voter"], steem_instance=stm)
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
print("%s has not sufficient RC" % vote["voter"])
last_voter = vote["voter"]
continue
if vote["retry_count"] >= 5:
broadcastVoteTrx.update_processed(vote["voter"], vote["authorperm"], None, False, True)
continue
if vote["expiration"] is not None and vote["expiration"] < datetime.utcnow():
continue
if vote["weight"] < 0.01:
continue
try:
print("voter %s votes %s" % (vote["voter"], vote["authorperm"]))
stm.vote(vote["weight"], vote["authorperm"], vote["voter"])
except Exception as e:
print("Vote failed: %s" % str(e))
last_voter = vote["voter"]
broadcastVoteTrx.update({"voter": vote["voter"], "authorperm": vote["authorperm"], "retry_count": vote["retry_count"] + 1})
print("Start apply new votes")
vote_count = 0
delete_pending_votes = []
for pending_vote in pendingVotesTrx.get_command_list_timed():
settings = None
# print("time vote %.2f s - %d votes" % (time.time() - start_prep_time, vote_count))
if (pending_vote["vote_weight"] is None or pending_vote["vote_weight"] <= 0) and (pending_vote["vote_sbd"] is None or float(pending_vote["vote_sbd"]) <= 0):
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero. (%s %% and %s $)" % (pending_vote["vote_weight"], pending_vote["vote_sbd"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = (datetime.utcnow() - pending_vote["comment_timestamp"]).total_seconds() / 60
maximum_vote_delay_min = pending_vote["maximum_vote_delay_min"]
if maximum_vote_delay_min < 0:
maximum_vote_delay_min = 9360
if age_min > maximum_vote_delay_min + voting_round_sec / 60:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than %.2f min." % (maximum_vote_delay_min),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
if voter_acc.sp < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as Steem Power is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as RC is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
vote_weight = pending_vote["vote_weight"]
if vote_weight is None or vote_weight <= 0:
vote_weight = voter_acc.get_vote_pct_for_SBD(float(pending_vote["vote_sbd"])) / 100.
if vote_weight > 100:
vote_weight = 100
elif vote_weight < 0.01:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_hour = ((datetime.utcnow()) - pending_vote["created"]).total_seconds() / 60 / 60
if age_hour > 156:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than 6.5 days.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = ((datetime.utcnow()) - pending_vote["created"]).total_seconds() / 60
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
try:
c = Comment(pending_vote["authorperm"], use_tags_api=True, steem_instance=stm)
except:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not process %s" % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if pending_vote["max_net_votes"] >= 0 and pending_vote["max_net_votes"] < c["net_votes"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The number of post/comment votes (%d) is higher than max_net_votes (%d)." % (c["net_votes"], pending_vote["max_net_votes"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_pending_payout"] >= 0 and pending_vote["max_pending_payout"] < float(c["pending_payout_value"]):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The pending payout of post/comment votes (%.2f) is higher than max_pending_payout (%.2f)." % (float(c["pending_payout_value"]), pending_vote["max_pending_payout"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# check for max votes per day/week
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_day"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_24h_before = voteLogTrx.get_votes_per_day(pending_vote["voter"], author, sliding_time_window)
if votes_24h_before >= pending_vote["max_votes_per_day"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 24h (max_votes_per_day is %d)." % (votes_24h_before, pending_vote["max_votes_per_day"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_week"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_168h_before = voteLogTrx.get_votes_per_week(pending_vote["voter"], author, sliding_time_window)
if votes_168h_before >= pending_vote["max_votes_per_week"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 7 days (max_votes_per_week is %d)." % (votes_168h_before, pending_vote["max_votes_per_week"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if voter_acc.vp < pending_vote["min_vp"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Voting power is %.2f %%, which is to low. (min_vp is %.2f %%)" % (voter_acc.vp, pending_vote["min_vp"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
pause_votes_below_vp = settings["pause_votes_below_vp"]
if settings["vp"] is None:
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
else:
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
pause_votes_below_vp = 0
if voter_acc.vp < pause_votes_below_vp:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Voting is paused (VP = %.2f %%, which below pause_votes_below_vp of %.2f %%)" % (voter_acc.vp, pause_votes_below_vp),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
posting_auth = False
for a in voter_acc["posting"]["account_auths"]:
if a[0] == posting_auth_acc:
posting_auth = True
if voter_acc["name"] == posting_auth_acc:
posting_auth = True
already_voted = False
for v in c["active_votes"]:
if voter_acc["name"] == v["voter"]:
already_voted = True
if not posting_auth or already_voted:
if already_voted:
error_msg = "already voted."
else:
error_msg = "posting authority is missing"
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["vp_scaler"] > 0:
vote_weight *= 1 - ((100 - voter_acc.vp) / 100 * pending_vote["vp_scaler"])
if vote_weight <= 0:
error_msg = "Vote weight is zero or below zero (%.2f %%)" % vote_weight
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# sucess = upvote_comment(c, voter_acc["name"], vote_weight)
reply_message = upvote_comment_without_check(c, voter_acc["name"], vote_weight)
if reply_message is not None:
vote_count += 1
if pending_vote["leave_comment"]:
try:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None and "upvote_comment" in settings and settings["upvote_comment"] is not None:
json_metadata = {'app': 'rewarding/%s' % (rewarding_version)}
reply_body = settings["upvote_comment"]
reply_body = reply_body.replace("{{name}}", "@%s" % c["author"] ).replace("{{voter}}", "@%s" % voter_acc["name"])
c.reply(reply_body, author=voter_acc["name"], meta=json_metadata)
except:
print("Could not leave comment!")
voteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "author": c["author"],
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"voted_after_min": age_min, "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"trail_vote": pending_vote["trail_vote"], "main_post": pending_vote["main_post"],
"voter_to_follow": pending_vote["voter_to_follow"]})
broadcastVoteTrx.add({"expiration": formatTimeString(reply_message["expiration"]).replace(tzinfo=None), "authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
else:
broadcastVoteTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight, "valid": True})
for pending_vote in delete_pending_votes:
pendingVotesTrx.delete(pending_vote["authorperm"], pending_vote["voter"], pending_vote["vote_when_vp_reached"])
delete_pending_votes = []
print("time vote %.2f s - %d votes" % (time.time() - start_prep_time, vote_count))
votes_above_vp = 0
votes_below_vp = 0
for pending_vote in pendingVotesTrx.get_command_list_vp_reached():
settings = None
if (pending_vote["vote_weight"] is None or pending_vote["vote_weight"] <= 0) and (pending_vote["vote_sbd"] is None or float(pending_vote["vote_sbd"]) <= 0):
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": 0, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
age_min = (datetime.utcnow() - pending_vote["comment_timestamp"]).total_seconds() / 60
maximum_vote_delay_min = pending_vote["maximum_vote_delay_min"]
if maximum_vote_delay_min > 0 and age_min > maximum_vote_delay_min + voting_round_sec / 60:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than %.2f min." % (maximum_vote_delay_min),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if age_min < pending_vote["vote_delay_min"] - voting_round_sec / 2.0 / 60:
continue
settings = accountsTrx.get(pending_vote["voter"])
if settings is None:
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
pause_votes_below_vp = 0
vp = voter_acc.vp
else:
pause_votes_below_vp = settings["pause_votes_below_vp"]
vp = settings["vp"]
vp_update = settings["vp_update"]
if vp_update is not None:
diff_in_seconds = ((datetime.utcnow()) - (vp_update)).total_seconds()
if diff_in_seconds < 3600:
regenerated_vp = diff_in_seconds * 10000 / 432000 / 100
if vp + regenerated_vp < pending_vote["min_vp"]:
votes_below_vp += 1
continue
voter_acc = Account(pending_vote["voter"], steem_instance=stm)
accountsTrx.upsert({"name": pending_vote["voter"], "vp_update":datetime.utcnow(), "vp": voter_acc.vp})
if voter_acc.sp < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as Steem Power is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.get_rc_manabar()["current_mana"] / 1e9 < 0.1:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not vot %s, as RC is almost zero." % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": pending_vote["vote_weight"], "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if voter_acc.vp < pending_vote["min_vp"]:
votes_below_vp += 1
continue
votes_above_vp += 1
print("Votes above min_vp %d / below %d" % (votes_above_vp, votes_below_vp))
if voter_acc.vp < pause_votes_below_vp:
continue
vote_weight = pending_vote["vote_weight"]
if vote_weight <= 0:
vote_weight = voter_acc.get_vote_pct_for_SBD(float(pending_vote["vote_sbd"])) / 100.
if vote_weight > 100:
vote_weight = 100
elif vote_weight < 0.01:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "vote_weight was set to zero.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
try:
c = Comment(pending_vote["authorperm"], steem_instance=stm)
except:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "Could not process %s" % (pending_vote["authorperm"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
print("Could not process %s" % pending_vote["authorperm"])
continue
if not valid_age(c):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "post is older than 6.5 days.",
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_net_votes"] >= 0 and pending_vote["max_net_votes"] < c["net_votes"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The number of post/comment votes (%d) is higher than max_net_votes (%d)." % (c["net_votes"], pending_vote["max_net_votes"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
if pending_vote["max_pending_payout"] >= 0 and pending_vote["max_pending_payout"] < float(c["pending_payout_value"]):
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The pending payout of post/comment votes (%.2f) is higher than max_pending_payout (%.2f)." % (float(c["pending_payout_value"]), pending_vote["max_pending_payout"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_day"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_24h_before = voteLogTrx.get_votes_per_day(pending_vote["voter"], author, sliding_time_window)
if votes_24h_before >= pending_vote["max_votes_per_day"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 24h (max_votes_per_day is %d)." % (votes_24h_before, pending_vote["max_votes_per_day"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
author, permlink = resolve_authorperm(pending_vote["authorperm"])
if pending_vote["max_votes_per_week"] > -1:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None:
sliding_time_window = settings["sliding_time_window"]
else:
sliding_time_window = True
votes_168h_before = voteLogTrx.get_votes_per_week(pending_vote["voter"], author, sliding_time_window)
if votes_168h_before >= pending_vote["max_votes_per_week"]:
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": "The author was already upvoted %d in the last 7 days (max_votes_per_week is %d)." % (votes_168h_before, pending_vote["max_votes_per_week"]),
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
posting_auth = False
for a in voter_acc["posting"]["account_auths"]:
if a[0] == posting_auth_acc:
posting_auth = True
if voter_acc["name"] == posting_auth_acc:
posting_auth = True
already_voted = False
for v in c["active_votes"]:
if voter_acc["name"] == v["voter"]:
already_voted = True
if not posting_auth or already_voted:
if already_voted:
error_msg = "already voted."
else:
error_msg = "posting authority is missing"
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"] })
continue
if pending_vote["vp_scaler"] > 0:
vote_weight *= 1 - ((100 - voter_acc.vp) / 100 * pending_vote["vp_scaler"])
if vote_weight <= 0:
error_msg = "Vote weight is zero or below zero (%.2f %%)" % vote_weight
failedVoteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "error": error_msg,
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"min_vp": pending_vote["min_vp"], "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"main_post": pending_vote["main_post"]})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
continue
# sucess = upvote_comment(c, voter_acc["name"], vote_weight)
reply_message = upvote_comment_without_check(c, voter_acc["name"], vote_weight)
if reply_message is not None:
vote_count += 1
if pending_vote["leave_comment"]:
try:
if settings is None:
settings = accountsTrx.get(voter_acc["name"])
if settings is not None and "upvote_comment" in settings and settings["upvote_comment"] is not None:
json_metadata = {'app': 'rewarding/%s' % (rewarding_version)}
reply_body = settings["upvote_comment"]
reply_body = reply_body.replace("{{name}}", "@%s" % c["author"] ).replace("{{voter}}", "@%s" % voter_acc["name"])
c.reply(reply_body, author=voter_acc["name"], meta=json_metadata)
except:
print("Could not leave comment!")
# add vote to log
voteLogTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "author": c["author"],
"timestamp": datetime.utcnow(), "vote_weight": vote_weight, "vote_delay_min": pending_vote["vote_delay_min"],
"voted_after_min": age_min, "vp": voter_acc.vp, "vote_when_vp_reached": pending_vote["vote_when_vp_reached"],
"trail_vote": pending_vote["trail_vote"], "main_post": pending_vote["main_post"],
"voter_to_follow": pending_vote["voter_to_follow"], "is_pending": True})
broadcastVoteTrx.add({"expiration": formatTimeString(reply_message["expiration"]).replace(tzinfo=None), "authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight})
delete_pending_votes.append({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"], "vote_when_vp_reached": pending_vote["vote_when_vp_reached"]})
else:
broadcastVoteTrx.add({"authorperm": pending_vote["authorperm"], "voter": pending_vote["voter"],
"weight": vote_weight, "valid": True})
continue
for pending_vote in delete_pending_votes:
pendingVotesTrx.delete(pending_vote["authorperm"], pending_vote["voter"], pending_vote["vote_when_vp_reached"])
delete_pending_votes = []
print("upvote posts script run %.2f s - %d votes were broadcasted" % (time.time() - start_prep_time, vote_count)) | 0.215846 | 0.113383 |
# Author: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# License: MIT License
import numpy as np
import ot
def test_gromov():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=4)
xt = xs[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
G = ot.gromov.gromov_wasserstein(C1, C2, p, q, 'square_loss', verbose=True)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
Id = (1 / (1.0 * n_samples)) * np.eye(n_samples, n_samples)
np.testing.assert_allclose(
G, np.flipud(Id), atol=1e-04)
gw, log = ot.gromov.gromov_wasserstein2(C1, C2, p, q, 'kl_loss', log=True)
gw_val = ot.gromov.gromov_wasserstein2(C1, C2, p, q, 'kl_loss', log=False)
G = log['T']
np.testing.assert_allclose(gw, 0, atol=1e-1, rtol=1e-1)
np.testing.assert_allclose(gw, gw_val, atol=1e-1, rtol=1e-1) # cf log=False
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_entropic_gromov():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=42)
xt = xs[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
G = ot.gromov.entropic_gromov_wasserstein(
C1, C2, p, q, 'square_loss', epsilon=5e-4, verbose=True)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
gw, log = ot.gromov.entropic_gromov_wasserstein2(
C1, C2, p, q, 'kl_loss', epsilon=1e-2, log=True)
G = log['T']
np.testing.assert_allclose(gw, 0, atol=1e-1, rtol=1e-1)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_gromov_barycenter():
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
Cb = ot.gromov.gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'square_loss', # 5e-4,
max_iter=100, tol=1e-3,
verbose=True)
np.testing.assert_allclose(Cb.shape, (n_samples, n_samples))
Cb2 = ot.gromov.gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'kl_loss', # 5e-4,
max_iter=100, tol=1e-3)
np.testing.assert_allclose(Cb2.shape, (n_samples, n_samples))
def test_gromov_entropic_barycenter():
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
Cb = ot.gromov.entropic_gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'square_loss', 2e-3,
max_iter=100, tol=1e-3,
verbose=True)
np.testing.assert_allclose(Cb.shape, (n_samples, n_samples))
Cb2 = ot.gromov.entropic_gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'kl_loss', 2e-3,
max_iter=100, tol=1e-3)
np.testing.assert_allclose(Cb2.shape, (n_samples, n_samples))
def test_fgw():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=42)
xt = xs[::-1].copy()
ys = np.random.randn(xs.shape[0], 2)
yt = ys[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
M = ot.dist(ys, yt)
M /= M.max()
G = ot.gromov.fused_gromov_wasserstein(M, C1, C2, p, q, 'square_loss', alpha=0.5)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence fgw
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence fgw
Id = (1 / (1.0 * n_samples)) * np.eye(n_samples, n_samples)
np.testing.assert_allclose(
G, np.flipud(Id), atol=1e-04) # cf convergence gromov
fgw, log = ot.gromov.fused_gromov_wasserstein2(M, C1, C2, p, q, 'square_loss', alpha=0.5, log=True)
G = log['T']
np.testing.assert_allclose(fgw, 0, atol=1e-1, rtol=1e-1)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_fgw_barycenter():
np.random.seed(42)
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
ys = np.random.randn(Xs.shape[0], 2)
yt = np.random.randn(Xt.shape[0], 2)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], [ot.unif(ns), ot.unif(nt)], [.5, .5], 0.5,
fixed_structure=False, fixed_features=False,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
xalea = np.random.randn(n_samples, 2)
init_C = ot.dist(xalea, xalea)
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], ps=[ot.unif(ns), ot.unif(nt)], lambdas=[.5, .5], alpha=0.5,
fixed_structure=True, init_C=init_C, fixed_features=False,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
init_X = np.random.randn(n_samples, ys.shape[1])
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], [ot.unif(ns), ot.unif(nt)], [.5, .5], 0.5,
fixed_structure=False, fixed_features=True, init_X=init_X,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
def test_gromov_1d():
np.random.seed(42)
# Test cost for diag
u = np.array([1, 0, 4])
v = np.array([1, 4, 0])
cost_gw1D = ot.gromov.gromov_1d2(u, v)
T = ot.gromov.gromov_1d(u, v, dense=False)
assert cost_gw1D == 0
assert ot.gromov.gromov_loss_sorted_1d(np.dot(u, 3 * T), v) == 0
# Test for anti diag
u = np.array([1, 0, 4])
v = np.array([-1, 2, 3])
cost_gw1D = ot.gromov.gromov_1d2(u, v)
T = ot.gromov.gromov_1d(u, v, dense=False)
assert cost_gw1D == 0
assert ot.gromov.gromov_loss_sorted_1d(np.dot(u, 3 * T), v) == 0
# Test GW 1d better than GW POT
all_good = []
its_all_good_man = False
for n in range(3, 100):
ns = n
nt = n
xs_alea = np.random.randn(ns, 1)
xt_alea = np.random.randn(nt, 1)
T_1d, log_1d = ot.gromov.gromov_1d(xs_alea.ravel(), xt_alea.ravel(), log=True, dense=False)
C1 = ot.dist(xs_alea, metric='sqeuclidean')
C2 = ot.dist(xt_alea, metric='sqeuclidean')
p = np.ones(C1.shape[0]) / C1.shape[0]
q = np.ones(C2.shape[0]) / C2.shape[0]
T_GW, log_GW = ot.gromov.gromov_wasserstein(C1, C2, p, q, 'square_loss', log=True)
all_good.append(log_1d['gw_dist'] - log_GW['gw_dist'])
all_good = np.array(all_good)
if len(all_good) == 0:
its_all_good_man = True
elif np.max(all_good[all_good >= 0]) <= 1e-14:
its_all_good_man = True
assert its_all_good_man
all_good = []
its_all_good_man = False
for repeat in range(100):
ns = 5
nt = 5
xs_alea = np.random.randn(ns, 1)
xt_alea = np.random.randn(nt, 1)
T_1d, log_1d = ot.gromov.gromov_1d(xs_alea.ravel(), xt_alea.ravel(), log=True, dense=False)
C1 = ot.dist(xs_alea, metric='sqeuclidean')
C2 = ot.dist(xt_alea, metric='sqeuclidean')
p = np.ones(C1.shape[0]) / C1.shape[0]
q = np.ones(C2.shape[0]) / C2.shape[0]
constC, hC1, hC2 = ot.gromov.init_matrix(C1, C2, p, q, 'square_loss')
d_1D = ot.gromov.gwloss(constC, hC1, hC2, T_1d)
d_true_1D = log_1d['gw_dist']
all_good.append(np.abs(d_1D - d_true_1D))
all_good = np.array(all_good)
assert np.all(all_good <= 1e-13) | test/test_gromov.py |
# Author: <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>
#
# License: MIT License
import numpy as np
import ot
def test_gromov():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=4)
xt = xs[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
G = ot.gromov.gromov_wasserstein(C1, C2, p, q, 'square_loss', verbose=True)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
Id = (1 / (1.0 * n_samples)) * np.eye(n_samples, n_samples)
np.testing.assert_allclose(
G, np.flipud(Id), atol=1e-04)
gw, log = ot.gromov.gromov_wasserstein2(C1, C2, p, q, 'kl_loss', log=True)
gw_val = ot.gromov.gromov_wasserstein2(C1, C2, p, q, 'kl_loss', log=False)
G = log['T']
np.testing.assert_allclose(gw, 0, atol=1e-1, rtol=1e-1)
np.testing.assert_allclose(gw, gw_val, atol=1e-1, rtol=1e-1) # cf log=False
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_entropic_gromov():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=42)
xt = xs[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
G = ot.gromov.entropic_gromov_wasserstein(
C1, C2, p, q, 'square_loss', epsilon=5e-4, verbose=True)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
gw, log = ot.gromov.entropic_gromov_wasserstein2(
C1, C2, p, q, 'kl_loss', epsilon=1e-2, log=True)
G = log['T']
np.testing.assert_allclose(gw, 0, atol=1e-1, rtol=1e-1)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_gromov_barycenter():
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
Cb = ot.gromov.gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'square_loss', # 5e-4,
max_iter=100, tol=1e-3,
verbose=True)
np.testing.assert_allclose(Cb.shape, (n_samples, n_samples))
Cb2 = ot.gromov.gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'kl_loss', # 5e-4,
max_iter=100, tol=1e-3)
np.testing.assert_allclose(Cb2.shape, (n_samples, n_samples))
def test_gromov_entropic_barycenter():
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
Cb = ot.gromov.entropic_gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'square_loss', 2e-3,
max_iter=100, tol=1e-3,
verbose=True)
np.testing.assert_allclose(Cb.shape, (n_samples, n_samples))
Cb2 = ot.gromov.entropic_gromov_barycenters(n_samples, [C1, C2],
[ot.unif(ns), ot.unif(nt)
], ot.unif(n_samples), [.5, .5],
'kl_loss', 2e-3,
max_iter=100, tol=1e-3)
np.testing.assert_allclose(Cb2.shape, (n_samples, n_samples))
def test_fgw():
n_samples = 50 # nb samples
mu_s = np.array([0, 0])
cov_s = np.array([[1, 0], [0, 1]])
xs = ot.datasets.make_2D_samples_gauss(n_samples, mu_s, cov_s, random_state=42)
xt = xs[::-1].copy()
ys = np.random.randn(xs.shape[0], 2)
yt = ys[::-1].copy()
p = ot.unif(n_samples)
q = ot.unif(n_samples)
C1 = ot.dist(xs, xs)
C2 = ot.dist(xt, xt)
C1 /= C1.max()
C2 /= C2.max()
M = ot.dist(ys, yt)
M /= M.max()
G = ot.gromov.fused_gromov_wasserstein(M, C1, C2, p, q, 'square_loss', alpha=0.5)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence fgw
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence fgw
Id = (1 / (1.0 * n_samples)) * np.eye(n_samples, n_samples)
np.testing.assert_allclose(
G, np.flipud(Id), atol=1e-04) # cf convergence gromov
fgw, log = ot.gromov.fused_gromov_wasserstein2(M, C1, C2, p, q, 'square_loss', alpha=0.5, log=True)
G = log['T']
np.testing.assert_allclose(fgw, 0, atol=1e-1, rtol=1e-1)
# check constratints
np.testing.assert_allclose(
p, G.sum(1), atol=1e-04) # cf convergence gromov
np.testing.assert_allclose(
q, G.sum(0), atol=1e-04) # cf convergence gromov
def test_fgw_barycenter():
np.random.seed(42)
ns = 50
nt = 60
Xs, ys = ot.datasets.make_data_classif('3gauss', ns, random_state=42)
Xt, yt = ot.datasets.make_data_classif('3gauss2', nt, random_state=42)
ys = np.random.randn(Xs.shape[0], 2)
yt = np.random.randn(Xt.shape[0], 2)
C1 = ot.dist(Xs)
C2 = ot.dist(Xt)
n_samples = 3
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], [ot.unif(ns), ot.unif(nt)], [.5, .5], 0.5,
fixed_structure=False, fixed_features=False,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
xalea = np.random.randn(n_samples, 2)
init_C = ot.dist(xalea, xalea)
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], ps=[ot.unif(ns), ot.unif(nt)], lambdas=[.5, .5], alpha=0.5,
fixed_structure=True, init_C=init_C, fixed_features=False,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
init_X = np.random.randn(n_samples, ys.shape[1])
X, C = ot.gromov.fgw_barycenters(n_samples, [ys, yt], [C1, C2], [ot.unif(ns), ot.unif(nt)], [.5, .5], 0.5,
fixed_structure=False, fixed_features=True, init_X=init_X,
p=ot.unif(n_samples), loss_fun='square_loss',
max_iter=100, tol=1e-3)
np.testing.assert_allclose(C.shape, (n_samples, n_samples))
np.testing.assert_allclose(X.shape, (n_samples, ys.shape[1]))
def test_gromov_1d():
np.random.seed(42)
# Test cost for diag
u = np.array([1, 0, 4])
v = np.array([1, 4, 0])
cost_gw1D = ot.gromov.gromov_1d2(u, v)
T = ot.gromov.gromov_1d(u, v, dense=False)
assert cost_gw1D == 0
assert ot.gromov.gromov_loss_sorted_1d(np.dot(u, 3 * T), v) == 0
# Test for anti diag
u = np.array([1, 0, 4])
v = np.array([-1, 2, 3])
cost_gw1D = ot.gromov.gromov_1d2(u, v)
T = ot.gromov.gromov_1d(u, v, dense=False)
assert cost_gw1D == 0
assert ot.gromov.gromov_loss_sorted_1d(np.dot(u, 3 * T), v) == 0
# Test GW 1d better than GW POT
all_good = []
its_all_good_man = False
for n in range(3, 100):
ns = n
nt = n
xs_alea = np.random.randn(ns, 1)
xt_alea = np.random.randn(nt, 1)
T_1d, log_1d = ot.gromov.gromov_1d(xs_alea.ravel(), xt_alea.ravel(), log=True, dense=False)
C1 = ot.dist(xs_alea, metric='sqeuclidean')
C2 = ot.dist(xt_alea, metric='sqeuclidean')
p = np.ones(C1.shape[0]) / C1.shape[0]
q = np.ones(C2.shape[0]) / C2.shape[0]
T_GW, log_GW = ot.gromov.gromov_wasserstein(C1, C2, p, q, 'square_loss', log=True)
all_good.append(log_1d['gw_dist'] - log_GW['gw_dist'])
all_good = np.array(all_good)
if len(all_good) == 0:
its_all_good_man = True
elif np.max(all_good[all_good >= 0]) <= 1e-14:
its_all_good_man = True
assert its_all_good_man
all_good = []
its_all_good_man = False
for repeat in range(100):
ns = 5
nt = 5
xs_alea = np.random.randn(ns, 1)
xt_alea = np.random.randn(nt, 1)
T_1d, log_1d = ot.gromov.gromov_1d(xs_alea.ravel(), xt_alea.ravel(), log=True, dense=False)
C1 = ot.dist(xs_alea, metric='sqeuclidean')
C2 = ot.dist(xt_alea, metric='sqeuclidean')
p = np.ones(C1.shape[0]) / C1.shape[0]
q = np.ones(C2.shape[0]) / C2.shape[0]
constC, hC1, hC2 = ot.gromov.init_matrix(C1, C2, p, q, 'square_loss')
d_1D = ot.gromov.gwloss(constC, hC1, hC2, T_1d)
d_true_1D = log_1d['gw_dist']
all_good.append(np.abs(d_1D - d_true_1D))
all_good = np.array(all_good)
assert np.all(all_good <= 1e-13) | 0.545286 | 0.557062 |
import os
import datetime
import requests
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from pangea.core.encrypted_fields import EncryptedString
from pangea.core.models import (
PangeaUser,
Organization,
Sample,
Pipeline,
PipelineModule,
WorkOrder,
WorkOrderProto,
JobOrder,
JobOrderProto,
GroupWorkOrder,
GroupWorkOrderProto,
PrivilegedUser,
SampleAnalysisResult,
)
from .constants import (
UPLOAD_TEST_FILENAME,
UPLOAD_TEST_FILEPATH,
)
class WorkOrderTests(APITestCase):
@classmethod
def setUpTestData(cls):
cls.organization = Organization.objects.create(name='Test Organization')
cls.user = PangeaUser.objects.create(email='<EMAIL>', password='<PASSWORD>')
cls.group = cls.organization.create_sample_group(name='GRP_01', is_library=True, is_public=False)
cls.sample = cls.group.create_sample(name='SMPL_01')
def test_create_work_order(self):
"""Test API call to create a work order from a sample and a prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse(
'sample-create-workorder',
kwargs={'sample_pk': self.sample.pk, 'wop_pk': wop.pk}
)
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(response.data['uuid'])
self.assertTrue(WorkOrder.objects.exists())
def test_create_group_work_order(self):
"""Test API call to create a group work order from a group and a prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test groupwork order')
url = reverse(
'sample-group-create-workorder',
kwargs={'sample_group_pk': self.group.pk, 'wop_pk': gwop.pk}
)
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(response.data['uuid'])
self.assertTrue(GroupWorkOrder.objects.exists())
def test_modify_job_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
self.assertEqual(jo.name, jop.name)
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.patch(url, {'name': 'new name'}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
jo = wo.jobs.get()
self.assertEqual(jo.name, 'new name')
def test_non_privileged_cannot_modify_job_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
self.assertEqual(jo.name, jop.name)
self.client.force_authenticate(user=self.user)
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.patch(url, {'name': 'new name'}, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_list_work_order_protos(self):
"""Test that we can list all existing work order prototypes."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse('work-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_list_group_work_order_protos(self):
"""Test that we can list all existing group work order prototypes."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
url = reverse('group-work-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_list_work_orders_in_work_order_proto(self):
"""Test that we can list the work orders made from a work order prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-proto-list-work-orders', kwargs={'pk': wop.pk})
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(wo.uuid))
def test_list_group_work_orders_in_group_work_order_proto(self):
"""Test that we can list the group work orders made from a group work order prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
wop = WorkOrderProto.objects.create(name='test work order')
gwop.work_order_protos.add(wop)
gwop.save()
gwo = gwop.work_order(self.group)
url = reverse('group-work-order-proto-list-group-work-orders', kwargs={'pk': gwop.pk})
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(gwo.uuid))
def test_list_work_orders_in_work_order_proto_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-proto-list-work-orders', kwargs={'pk': wop.pk})
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 0)
def test_list_job_order_protos(self):
wop = WorkOrderProto.objects.create(name='test work order')
JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
url = reverse('job-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_get_work_order_proto_detail(self):
"""Test that we can get info for a work order prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse('work-order-proto-detail', kwargs={'pk': wop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wop.uuid))
def test_get_group_work_order_proto_detail(self):
"""Test that we can get info for a group work order prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
url = reverse('group-work-order-proto-detail', kwargs={'pk': gwop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(gwop.uuid))
def test_get_work_order_detail(self):
"""Test that we can get info for a work order."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wo.uuid))
def test_get_group_work_order_detail(self):
"""Test that we can get info for a group work order."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
gwo = gwop.work_order(self.group)
url = reverse('group-work-order-detail', kwargs={'pk': gwo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(gwo.uuid))
def test_get_work_order_detail_extended(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wo.uuid))
self.assertEqual(response.data['status'], 'pending')
self.assertEqual(len(response.data['job_order_objs']), 1)
def test_get_work_order_detail_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_work_order_in_sample(self):
"""Test that we can access all work orders for a sample."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-list-workorder', kwargs={'sample_pk': self.sample.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(wo.uuid))
def test_get_group_work_order_in_sample_group(self):
"""Test that we can access all group work orders for a smaple group."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
gwo = gwop.work_order(self.group)
url = reverse('sample-group-list-workorder', kwargs={'sample_group_pk': self.group.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(gwo.uuid))
def test_get_job_order_proto_detail(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
url = reverse('job-order-proto-detail', kwargs={'pk': jop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(jop.uuid))
def test_get_job_order_detail_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_job_order_detail(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(jo.uuid))
def test_privileged_user_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_privileged_user_can_create_ar(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-ars-create')
url += f'?work_order_uuid={wo.uuid}'
data = {
'module_name': 'taxa',
'sample': self.sample.pk,
'description': 'short description',
'metadata': {'a': 1, 'b': 'foo'},
}
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(SampleAnalysisResult.objects.count(), 1)
self.assertEqual(SampleAnalysisResult.objects.get().sample, self.sample)
self.assertEqual(SampleAnalysisResult.objects.get().module_name, 'taxa')
def test_non_privileged_user_cannot_create_ar(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-ars-create')
url += f'?work_order_uuid={wo.uuid}'
data = {
'module_name': 'taxa',
'sample': self.sample.pk,
'description': 'short description',
'metadata': {'a': 1, 'b': 'foo'},
}
self.client.force_authenticate(user=self.user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_user_in_org_cannot_retrieve_sample_if_they_use_work_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_non_privileged_user_cannot_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_no_user_cannot_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) | pangea/core/tests/test_api_work_order.py | import os
import datetime
import requests
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from pangea.core.encrypted_fields import EncryptedString
from pangea.core.models import (
PangeaUser,
Organization,
Sample,
Pipeline,
PipelineModule,
WorkOrder,
WorkOrderProto,
JobOrder,
JobOrderProto,
GroupWorkOrder,
GroupWorkOrderProto,
PrivilegedUser,
SampleAnalysisResult,
)
from .constants import (
UPLOAD_TEST_FILENAME,
UPLOAD_TEST_FILEPATH,
)
class WorkOrderTests(APITestCase):
@classmethod
def setUpTestData(cls):
cls.organization = Organization.objects.create(name='Test Organization')
cls.user = PangeaUser.objects.create(email='<EMAIL>', password='<PASSWORD>')
cls.group = cls.organization.create_sample_group(name='GRP_01', is_library=True, is_public=False)
cls.sample = cls.group.create_sample(name='SMPL_01')
def test_create_work_order(self):
"""Test API call to create a work order from a sample and a prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse(
'sample-create-workorder',
kwargs={'sample_pk': self.sample.pk, 'wop_pk': wop.pk}
)
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(response.data['uuid'])
self.assertTrue(WorkOrder.objects.exists())
def test_create_group_work_order(self):
"""Test API call to create a group work order from a group and a prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test groupwork order')
url = reverse(
'sample-group-create-workorder',
kwargs={'sample_group_pk': self.group.pk, 'wop_pk': gwop.pk}
)
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertTrue(response.data['uuid'])
self.assertTrue(GroupWorkOrder.objects.exists())
def test_modify_job_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
self.assertEqual(jo.name, jop.name)
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.patch(url, {'name': 'new name'}, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
jo = wo.jobs.get()
self.assertEqual(jo.name, 'new name')
def test_non_privileged_cannot_modify_job_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
self.assertEqual(jo.name, jop.name)
self.client.force_authenticate(user=self.user)
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.patch(url, {'name': 'new name'}, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_list_work_order_protos(self):
"""Test that we can list all existing work order prototypes."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse('work-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_list_group_work_order_protos(self):
"""Test that we can list all existing group work order prototypes."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
url = reverse('group-work-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_list_work_orders_in_work_order_proto(self):
"""Test that we can list the work orders made from a work order prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-proto-list-work-orders', kwargs={'pk': wop.pk})
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(wo.uuid))
def test_list_group_work_orders_in_group_work_order_proto(self):
"""Test that we can list the group work orders made from a group work order prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
wop = WorkOrderProto.objects.create(name='test work order')
gwop.work_order_protos.add(wop)
gwop.save()
gwo = gwop.work_order(self.group)
url = reverse('group-work-order-proto-list-group-work-orders', kwargs={'pk': gwop.pk})
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(gwo.uuid))
def test_list_work_orders_in_work_order_proto_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-proto-list-work-orders', kwargs={'pk': wop.pk})
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 0)
def test_list_job_order_protos(self):
wop = WorkOrderProto.objects.create(name='test work order')
JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
url = reverse('job-order-proto-list')
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
def test_get_work_order_proto_detail(self):
"""Test that we can get info for a work order prototype."""
wop = WorkOrderProto.objects.create(name='test work order')
url = reverse('work-order-proto-detail', kwargs={'pk': wop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wop.uuid))
def test_get_group_work_order_proto_detail(self):
"""Test that we can get info for a group work order prototype."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
url = reverse('group-work-order-proto-detail', kwargs={'pk': gwop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(gwop.uuid))
def test_get_work_order_detail(self):
"""Test that we can get info for a work order."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wo.uuid))
def test_get_group_work_order_detail(self):
"""Test that we can get info for a group work order."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
gwo = gwop.work_order(self.group)
url = reverse('group-work-order-detail', kwargs={'pk': gwo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(gwo.uuid))
def test_get_work_order_detail_extended(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(wo.uuid))
self.assertEqual(response.data['status'], 'pending')
self.assertEqual(len(response.data['job_order_objs']), 1)
def test_get_work_order_detail_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('work-order-detail', kwargs={'pk': wo.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_work_order_in_sample(self):
"""Test that we can access all work orders for a sample."""
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-list-workorder', kwargs={'sample_pk': self.sample.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(wo.uuid))
def test_get_group_work_order_in_sample_group(self):
"""Test that we can access all group work orders for a smaple group."""
gwop = GroupWorkOrderProto.objects.create(name='test group work order')
gwo = gwop.work_order(self.group)
url = reverse('sample-group-list-workorder', kwargs={'sample_group_pk': self.group.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data['results']), 1)
self.assertEqual(response.data['results'][0]['uuid'], str(gwo.uuid))
def test_get_job_order_proto_detail(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(
name='test job order',
work_order_proto=wop,
)
url = reverse('job-order-proto-detail', kwargs={'pk': jop.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(jop.uuid))
def test_get_job_order_detail_unauth(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_get_job_order_detail(self):
wop = WorkOrderProto.objects.create(name='test work order')
jop = JobOrderProto.objects.create(name='test job order', work_order_proto=wop)
wo = wop.work_order(self.sample)
jo = wo.jobs.get()
url = reverse('job-order-detail', kwargs={'pk': jo.pk})
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data['uuid'], str(jo.uuid))
def test_privileged_user_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_privileged_user_can_create_ar(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-ars-create')
url += f'?work_order_uuid={wo.uuid}'
data = {
'module_name': 'taxa',
'sample': self.sample.pk,
'description': 'short description',
'metadata': {'a': 1, 'b': 'foo'},
}
PrivilegedUser.objects.create(user=self.user, work_order_proto=wop)
self.client.force_authenticate(user=self.user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(SampleAnalysisResult.objects.count(), 1)
self.assertEqual(SampleAnalysisResult.objects.get().sample, self.sample)
self.assertEqual(SampleAnalysisResult.objects.get().module_name, 'taxa')
def test_non_privileged_user_cannot_create_ar(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-ars-create')
url += f'?work_order_uuid={wo.uuid}'
data = {
'module_name': 'taxa',
'sample': self.sample.pk,
'description': 'short description',
'metadata': {'a': 1, 'b': 'foo'},
}
self.client.force_authenticate(user=self.user)
response = self.client.post(url, data, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_user_in_org_cannot_retrieve_sample_if_they_use_work_order(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
self.organization.users.add(self.user)
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_non_privileged_user_cannot_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
self.client.force_authenticate(user=self.user)
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_no_user_cannot_retrieve_sample(self):
wop = WorkOrderProto.objects.create(name='test work order')
wo = wop.work_order(self.sample)
url = reverse('sample-details', kwargs={'pk': self.sample.pk})
url += f'?work_order_uuid={wo.uuid}'
response = self.client.get(url, format='json')
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) | 0.573201 | 0.234571 |
import math as m
from typing import Iterable, NamedTuple, Set
from .types import FloatColor, Int, IntColor
__all__ = ["Palette"]
class RGBCIELABColor(NamedTuple):
rgb: IntColor
cielab: FloatColor
class CIE:
"""
A helper class of static methods for CIE-related functions
"""
SRGB_TO_CIEXYZ_MATRIX = [
[0.41239080, 0.35758434, 0.18048079],
[0.21263901, 0.71516868, 0.07219232],
[0.01933082, 0.11919478, 0.95053215],
]
# Values for standard illuminant D65
X_n = 0.950489
Y_n = 1
Z_n = 1.08884
# Constants for CIEXYZ to CIELAB
SIGMA = 6 / 29
SIGMA_SQUARED = SIGMA ** 2
SIGMA_CUBED = SIGMA ** 3
ONE_THIRD = 1 / 3
FOUR_TWENTYNINTHS = 4 / 29
TWENTY_FIVE_POWER_SEVEN = 25 ** 7
# Constants for CIEDE2000
TAU = 2 * m.pi
K_L = K_C = K_H = 1
@classmethod
def _srgb_to_ciexyz(cls, r: Int, g: Int, b: Int) -> FloatColor:
# Converts sRGB to CIEXYZ
def inverse_gamma(u):
# Implements inverse gamma for gamma expansion
return u / 12.92 if u <= 0.04045 else ((u + 0.055) / 1.055) ** 2.4
# RGB components are scaled to [0, 1]
# and have the inverse gamma function applied
r, g, b = (inverse_gamma(v / 255) for v in (r, g, b))
# Linearised RGB values are converted to CIEXYZ by linear transformation
x, y, z = (
sum(v * w for w in row)
for v, row in zip((r, g, b), cls.SRGB_TO_CIEXYZ_MATRIX)
)
return (x, y, z)
@classmethod
def _ciexyz_to_cielab(cls, x: float, y: float, z: float) -> FloatColor:
# Converts CIEXYZ to CIELAB
def f(t):
return (
t ** cls.ONE_THIRD
if t > cls.SIGMA_CUBED
else (t * cls.ONE_THIRD / cls.SIGMA_SQUARED + cls.FOUR_TWENTYNINTHS)
)
f_x = f(x / cls.X_n)
f_y = f(y / cls.Y_n)
f_z = f(z / cls.Z_n)
L = 116 * f_y - 16
a = 500 * (f_x - f_y)
b = 200 * (f_y - f_z)
return (L, a, b)
@classmethod
def srgb_to_cielab(cls, r: Int, g: Int, b: Int) -> FloatColor:
"""
Takes an RGB color value, which each component in the range 0-255
and converts to a CIELAB color value.
RGB colors are assumed to be in the sRGB color space.
:param r: The red value of the color
:type r: Int
:param g: The green value of the color
:type g: Int
:param b: The blue value of the color
:type b: Int
:return: The equivalent CIELAB color value
:rtype: FloatColor
"""
L, a, b = cls._ciexyz_to_cielab(*cls._srgb_to_ciexyz(r, g, b))
return (L, a, b)
@classmethod
def radians_to_degrees(cls, angle: float) -> float:
"""
Converts radians to degrees, accepting values between -pi and pi.
:raises ValueError: If the angle is not between -pi and pi.
:return: The angle in degrees, in the range 0 to 360
:rtype: float
"""
if angle > m.pi or angle < -m.pi:
raise ValueError("Angle must be between -pi and pi")
return m.degrees(angle if angle >= 0 else angle + cls.TAU)
@classmethod
def ciede2000(
cls,
L_1: float,
a_1: float,
b_1: float,
L_2: float,
a_2: float,
b_2: float,
) -> float:
"""Calculates the CIEDE2000 difference between two CIELAB colors
:param L_1: The L* value of the first color
:type L_1: float
:param a_1: The a* value of the first color
:type a_1: float
:param b_1: The b* value of the first color
:type b_1: float
:param L_2: The L* value of the second color
:type L_2: float
:param a_2: The a* value of the second color
:type a_2: float
:param b_2: The b* value of the second color
:type b_2: float
:return: The CIEDE2000 difference
:rtype: float
"""
# Calculates the CIEDE2000 color difference value between two CIELAB colors
# Source: http://www2.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf
C_star_1_ab = (a_1 ** 2 + b_1 ** 2) ** 0.5
C_star_2_ab = (a_2 ** 2 + b_2 ** 2) ** 0.5
C_star_ab_bar = (C_star_1_ab + C_star_2_ab) / 2
# Equal to G + 1
G_plus = 1 + 0.5 * (
1 - (1 / (1 + cls.TWENTY_FIVE_POWER_SEVEN / C_star_ab_bar ** 7)) ** 0.5
)
a_prime_1 = G_plus * a_1
a_prime_2 = G_plus * a_2
C_prime_1 = (a_prime_1 ** 2 + b_1 ** 2) ** 0.5
C_prime_2 = (a_prime_2 ** 2 + b_2 ** 2) ** 0.5
h_prime_1 = (
0
if b_1 == 0 and a_prime_1 == 0
else cls.radians_to_degrees(m.atan2(b_1, a_prime_1))
)
h_prime_2 = (
0
if b_2 == 0 and a_prime_2 == 0
else cls.radians_to_degrees(m.atan2(b_2, a_prime_2))
)
Delta_L_prime = L_2 - L_1
Delta_C_prime = C_prime_2 - C_prime_1
# Optimisations
h_prime_diff = h_prime_2 - h_prime_1
abs_h_prime_diff = abs(h_prime_diff)
if C_prime_1 == 0 or C_prime_2 == 0:
Delta_h_prime = 0
elif abs_h_prime_diff <= 180:
Delta_h_prime = h_prime_diff
elif h_prime_diff > 180:
Delta_h_prime = h_prime_diff - 360
else:
Delta_h_prime = h_prime_diff + 360
Delta_H_prime = (
2 * (C_prime_1 * C_prime_2) ** 0.5 * m.sin(m.radians(Delta_h_prime / 2))
)
L_prime_bar = (L_1 + L_2) / 2
C_prime_bar = (C_prime_1 + C_prime_2) / 2
# An optimisation
h_prime_sum = h_prime_1 + h_prime_2
if C_prime_1 == 0 or C_prime_2 == 0:
h_prime_bar = h_prime_sum
elif abs_h_prime_diff <= 180:
h_prime_bar = h_prime_sum / 2
elif h_prime_sum < 360:
h_prime_bar = (h_prime_sum + 360) / 2
else:
h_prime_bar = (h_prime_sum - 360) / 2
T = (
1
- 0.17 * m.cos(m.radians(h_prime_bar - 30))
+ 0.24 * m.cos(m.radians(2 * h_prime_bar))
+ 0.32 * m.cos(m.radians(3 * h_prime_bar + 6))
- 0.2 * m.cos(m.radians(4 * h_prime_bar - 63))
)
Delta_theta = 30 * m.exp(-(((h_prime_bar - 275) / 25) ** 2))
R_C = 2 * (1 / (1 + cls.TWENTY_FIVE_POWER_SEVEN / C_prime_bar ** 7)) ** 0.5
S_L = (
1
+ (0.015 * (L_prime_bar - 50) ** 2) / (20 + (L_prime_bar - 50) ** 2) ** 0.5
)
S_C = 1 + 0.045 * C_prime_bar
S_H = 1 + 0.015 * C_prime_bar * T
R_T = -m.sin(m.radians(2 * Delta_theta)) * R_C
Delta_E = (
(Delta_L_prime / (cls.K_L * S_L)) ** 2
+ (Delta_C_prime / (cls.K_C * S_C)) ** 2
+ (Delta_H_prime / (cls.K_H * S_H)) ** 2
+ (
R_T
* (Delta_C_prime / (cls.K_C * S_C))
* (Delta_H_prime / (cls.K_H * S_H))
)
) ** 0.5
return Delta_E
class Palette:
"""
A class for holding a palette of colors.
"""
def __init__(self):
self.colors: Set[RGBCIELABColor] = set()
self.cache = {}
def add(self, colors: Iterable[IntColor]) -> None:
"""
Add RGB colors to the palette
:param colors: An iterable of color tuples to add to the palette
:type colors: Iterable[Color]
"""
for color in colors:
cielab_color = CIE.srgb_to_cielab(*color)
self.colors.add(RGBCIELABColor(color, cielab_color))
def find_nearest(self, color: IntColor) -> IntColor:
"""
Finds the nearest RGB color in the palette to the specified color.
Uses CIEDE2000 color difference
:param color: The source color in RGB
:type color: IntColor
:raises Exception: If the palette has no colours in it
:return: The nearest color in the palette in RGB
:rtype: IntColor
"""
if not self.colors:
raise Exception("Palette is empty")
if color in self.cache:
return self.cache[color]
color_as_cielab = CIE.srgb_to_cielab(*color)
nearest_color = min(
(color for color in self.colors),
key=lambda c: CIE.ciede2000(*c.cielab, *color_as_cielab),
).rgb
self.cache[color] = nearest_color
return nearest_color | coloromo/color.py | import math as m
from typing import Iterable, NamedTuple, Set
from .types import FloatColor, Int, IntColor
__all__ = ["Palette"]
class RGBCIELABColor(NamedTuple):
rgb: IntColor
cielab: FloatColor
class CIE:
"""
A helper class of static methods for CIE-related functions
"""
SRGB_TO_CIEXYZ_MATRIX = [
[0.41239080, 0.35758434, 0.18048079],
[0.21263901, 0.71516868, 0.07219232],
[0.01933082, 0.11919478, 0.95053215],
]
# Values for standard illuminant D65
X_n = 0.950489
Y_n = 1
Z_n = 1.08884
# Constants for CIEXYZ to CIELAB
SIGMA = 6 / 29
SIGMA_SQUARED = SIGMA ** 2
SIGMA_CUBED = SIGMA ** 3
ONE_THIRD = 1 / 3
FOUR_TWENTYNINTHS = 4 / 29
TWENTY_FIVE_POWER_SEVEN = 25 ** 7
# Constants for CIEDE2000
TAU = 2 * m.pi
K_L = K_C = K_H = 1
@classmethod
def _srgb_to_ciexyz(cls, r: Int, g: Int, b: Int) -> FloatColor:
# Converts sRGB to CIEXYZ
def inverse_gamma(u):
# Implements inverse gamma for gamma expansion
return u / 12.92 if u <= 0.04045 else ((u + 0.055) / 1.055) ** 2.4
# RGB components are scaled to [0, 1]
# and have the inverse gamma function applied
r, g, b = (inverse_gamma(v / 255) for v in (r, g, b))
# Linearised RGB values are converted to CIEXYZ by linear transformation
x, y, z = (
sum(v * w for w in row)
for v, row in zip((r, g, b), cls.SRGB_TO_CIEXYZ_MATRIX)
)
return (x, y, z)
@classmethod
def _ciexyz_to_cielab(cls, x: float, y: float, z: float) -> FloatColor:
# Converts CIEXYZ to CIELAB
def f(t):
return (
t ** cls.ONE_THIRD
if t > cls.SIGMA_CUBED
else (t * cls.ONE_THIRD / cls.SIGMA_SQUARED + cls.FOUR_TWENTYNINTHS)
)
f_x = f(x / cls.X_n)
f_y = f(y / cls.Y_n)
f_z = f(z / cls.Z_n)
L = 116 * f_y - 16
a = 500 * (f_x - f_y)
b = 200 * (f_y - f_z)
return (L, a, b)
@classmethod
def srgb_to_cielab(cls, r: Int, g: Int, b: Int) -> FloatColor:
"""
Takes an RGB color value, which each component in the range 0-255
and converts to a CIELAB color value.
RGB colors are assumed to be in the sRGB color space.
:param r: The red value of the color
:type r: Int
:param g: The green value of the color
:type g: Int
:param b: The blue value of the color
:type b: Int
:return: The equivalent CIELAB color value
:rtype: FloatColor
"""
L, a, b = cls._ciexyz_to_cielab(*cls._srgb_to_ciexyz(r, g, b))
return (L, a, b)
@classmethod
def radians_to_degrees(cls, angle: float) -> float:
"""
Converts radians to degrees, accepting values between -pi and pi.
:raises ValueError: If the angle is not between -pi and pi.
:return: The angle in degrees, in the range 0 to 360
:rtype: float
"""
if angle > m.pi or angle < -m.pi:
raise ValueError("Angle must be between -pi and pi")
return m.degrees(angle if angle >= 0 else angle + cls.TAU)
@classmethod
def ciede2000(
cls,
L_1: float,
a_1: float,
b_1: float,
L_2: float,
a_2: float,
b_2: float,
) -> float:
"""Calculates the CIEDE2000 difference between two CIELAB colors
:param L_1: The L* value of the first color
:type L_1: float
:param a_1: The a* value of the first color
:type a_1: float
:param b_1: The b* value of the first color
:type b_1: float
:param L_2: The L* value of the second color
:type L_2: float
:param a_2: The a* value of the second color
:type a_2: float
:param b_2: The b* value of the second color
:type b_2: float
:return: The CIEDE2000 difference
:rtype: float
"""
# Calculates the CIEDE2000 color difference value between two CIELAB colors
# Source: http://www2.ece.rochester.edu/~gsharma/ciede2000/ciede2000noteCRNA.pdf
C_star_1_ab = (a_1 ** 2 + b_1 ** 2) ** 0.5
C_star_2_ab = (a_2 ** 2 + b_2 ** 2) ** 0.5
C_star_ab_bar = (C_star_1_ab + C_star_2_ab) / 2
# Equal to G + 1
G_plus = 1 + 0.5 * (
1 - (1 / (1 + cls.TWENTY_FIVE_POWER_SEVEN / C_star_ab_bar ** 7)) ** 0.5
)
a_prime_1 = G_plus * a_1
a_prime_2 = G_plus * a_2
C_prime_1 = (a_prime_1 ** 2 + b_1 ** 2) ** 0.5
C_prime_2 = (a_prime_2 ** 2 + b_2 ** 2) ** 0.5
h_prime_1 = (
0
if b_1 == 0 and a_prime_1 == 0
else cls.radians_to_degrees(m.atan2(b_1, a_prime_1))
)
h_prime_2 = (
0
if b_2 == 0 and a_prime_2 == 0
else cls.radians_to_degrees(m.atan2(b_2, a_prime_2))
)
Delta_L_prime = L_2 - L_1
Delta_C_prime = C_prime_2 - C_prime_1
# Optimisations
h_prime_diff = h_prime_2 - h_prime_1
abs_h_prime_diff = abs(h_prime_diff)
if C_prime_1 == 0 or C_prime_2 == 0:
Delta_h_prime = 0
elif abs_h_prime_diff <= 180:
Delta_h_prime = h_prime_diff
elif h_prime_diff > 180:
Delta_h_prime = h_prime_diff - 360
else:
Delta_h_prime = h_prime_diff + 360
Delta_H_prime = (
2 * (C_prime_1 * C_prime_2) ** 0.5 * m.sin(m.radians(Delta_h_prime / 2))
)
L_prime_bar = (L_1 + L_2) / 2
C_prime_bar = (C_prime_1 + C_prime_2) / 2
# An optimisation
h_prime_sum = h_prime_1 + h_prime_2
if C_prime_1 == 0 or C_prime_2 == 0:
h_prime_bar = h_prime_sum
elif abs_h_prime_diff <= 180:
h_prime_bar = h_prime_sum / 2
elif h_prime_sum < 360:
h_prime_bar = (h_prime_sum + 360) / 2
else:
h_prime_bar = (h_prime_sum - 360) / 2
T = (
1
- 0.17 * m.cos(m.radians(h_prime_bar - 30))
+ 0.24 * m.cos(m.radians(2 * h_prime_bar))
+ 0.32 * m.cos(m.radians(3 * h_prime_bar + 6))
- 0.2 * m.cos(m.radians(4 * h_prime_bar - 63))
)
Delta_theta = 30 * m.exp(-(((h_prime_bar - 275) / 25) ** 2))
R_C = 2 * (1 / (1 + cls.TWENTY_FIVE_POWER_SEVEN / C_prime_bar ** 7)) ** 0.5
S_L = (
1
+ (0.015 * (L_prime_bar - 50) ** 2) / (20 + (L_prime_bar - 50) ** 2) ** 0.5
)
S_C = 1 + 0.045 * C_prime_bar
S_H = 1 + 0.015 * C_prime_bar * T
R_T = -m.sin(m.radians(2 * Delta_theta)) * R_C
Delta_E = (
(Delta_L_prime / (cls.K_L * S_L)) ** 2
+ (Delta_C_prime / (cls.K_C * S_C)) ** 2
+ (Delta_H_prime / (cls.K_H * S_H)) ** 2
+ (
R_T
* (Delta_C_prime / (cls.K_C * S_C))
* (Delta_H_prime / (cls.K_H * S_H))
)
) ** 0.5
return Delta_E
class Palette:
"""
A class for holding a palette of colors.
"""
def __init__(self):
self.colors: Set[RGBCIELABColor] = set()
self.cache = {}
def add(self, colors: Iterable[IntColor]) -> None:
"""
Add RGB colors to the palette
:param colors: An iterable of color tuples to add to the palette
:type colors: Iterable[Color]
"""
for color in colors:
cielab_color = CIE.srgb_to_cielab(*color)
self.colors.add(RGBCIELABColor(color, cielab_color))
def find_nearest(self, color: IntColor) -> IntColor:
"""
Finds the nearest RGB color in the palette to the specified color.
Uses CIEDE2000 color difference
:param color: The source color in RGB
:type color: IntColor
:raises Exception: If the palette has no colours in it
:return: The nearest color in the palette in RGB
:rtype: IntColor
"""
if not self.colors:
raise Exception("Palette is empty")
if color in self.cache:
return self.cache[color]
color_as_cielab = CIE.srgb_to_cielab(*color)
nearest_color = min(
(color for color in self.colors),
key=lambda c: CIE.ciede2000(*c.cielab, *color_as_cielab),
).rgb
self.cache[color] = nearest_color
return nearest_color | 0.901645 | 0.551332 |
import logging
import unittest
from quantum.common import exceptions as exc
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials as creds
from quantum.plugins.cisco.db import api as db
from quantum.plugins.cisco.db import l2network_db as cdb
from quantum.plugins.cisco import l2network_plugin_configuration as conf
from quantum.plugins.cisco.models import l2network_multi_blade
logging.basicConfig(level=logging.WARN)
LOG = logging.getLogger(__name__)
# Set some data to use in tests
tenant_id = "network_admin"
net_name = "TestNetwork1"
new_net_name = "NewTestNetwork1"
net_id = "44"
port_id = "p0005"
port_state = const.PORT_UP
interface_id = "vif-01"
vlan_id = "102"
def vlan_name(id):
return "q-%svlan" % id[0:10]
class TestMultiBlade(unittest.TestCase):
"""
Tests for the multi-blade model for the L2Network plugin
"""
_plugins = {}
_inventory = {}
def setUp(self):
"""Setup our tests"""
# Initialize cdb and credentials
db.configure_db({'sql_connection': 'sqlite:///:memory:'})
cdb.initialize()
creds.Store.initialize()
# Create a place a store net and port ids for the druation of the test
self.net_id = 0
self.port_id = 0
# Create the multiblade object
self._l2network_multiblade = (
l2network_multi_blade.L2NetworkMultiBlade())
self.plugin_key = (
"quantum.plugins.cisco.ucs.cisco_ucs_plugin.UCSVICPlugin")
# Get UCS inventory to make sure all UCSs are affected by tests
for key in conf.PLUGINS[const.PLUGINS].keys():
if key in conf.PLUGINS[const.INVENTORY].keys():
plugin_obj = conf.PLUGINS[const.INVENTORY][key]
self._inventory[key] = importutils.import_object(plugin_obj)
self.ucs_count = self._inventory['ucs_plugin']._inventory.__len__()
def tearDown(self):
"""Tear down our tests"""
try:
port = db.port_get(self.net_id, self.port_id)
self._l2network_multiblade.delete_port([tenant_id, self.net_id,
self.port_id])
except exc.NetworkNotFound:
# We won't always have a port to remove
pass
except exc.PortNotFound:
# We won't always have a port to remove
pass
try:
net = db.network_get(self.net_id)
self._l2network_multiblade.delete_network([tenant_id, self.net_id])
except exc.NetworkNotFound:
# We won't always have a network to remove
pass
db.clear_db()
def test_create_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_create_network - START")
# Create the network in the test DB, then with the model
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
networks = self._l2network_multiblade.create_network([
tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id,
])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], net_name)
LOG.debug("test_create_network - END")
def test_delete_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_network - START")
# Create the network in the test DB, then with the model
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
networks = self._l2network_multiblade.delete_network([tenant_id,
self.net_id])
cdb.remove_vlan_binding(self.net_id)
db.network_destroy(self.net_id)
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], net_name)
LOG.debug("test_delete_network - END")
def test_delete_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_networkDNE - START")
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.delete_network,
[tenant_id, net_id])
LOG.debug("test_delete_networkDNE - END")
def test_update_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_update_network - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
net_details = db.network_update(self.net_id, tenant_id,
name=new_net_name)
networks = self._l2network_multiblade.update_network([
tenant_id,
self.net_id,
{'name': new_net_name},
])
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], new_net_name)
LOG.debug("test_update_network - END")
def test_update_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_update_networkDNE - START")
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.update_network,
[tenant_id, net_id, {'name': new_net_name}])
LOG.debug("test_update_networkDNE - END")
def test_get_all_networks(self):
"""Not implemented for this model"""
pass
def test_get_network_details(self):
"""Not implemented for this model"""
pass
def test_create_port(self):
"""Support for the Quantum core API call"""
LOG.debug("test_create_port - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
port = self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state,
self.port_id])
self.assertEqual(self.port_id, port[0][const.PORTID])
LOG.debug("test_create_port - END")
def test_delete_port(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_port - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
port = self._l2network_multiblade.delete_port([tenant_id,
self.net_id,
self.port_id])
self.assertEqual(self.port_id, port[0][const.PORTID])
# Recreating port so tear down doesn't cause an error
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
LOG.debug("test_delete_port - END")
def test_get_all_ports(self):
"""Not implemented for this model"""
pass
def test_update_port(self):
"""Not implemented for this model"""
pass
def test_update_portDNE(self):
"""Not implemented for this model"""
pass
def test_update_port_networkDNE(self):
"""Not implemented for this model"""
pass
def test_port_details(self):
"""Not implemented for this model"""
pass
def test_plug_interface(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
interface = self._l2network_multiblade.plug_interface(
[tenant_id, self.net_id, self.port_id, interface_id])
port = db.port_set_attachment(self.net_id, self.port_id, interface_id)
self.assertEqual(self.port_id, interface[0][const.PORTID])
self.assertEqual(port[const.INTERFACEID], interface_id)
LOG.debug("test_plug_interface - END")
def test_plug_interface_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface_networkDNE - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.plug_interface,
[tenant_id, net_id, self.port_id, interface_id])
LOG.debug("test_plug_interface_networkDNE - END")
def test_plug_interface_portDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface_portDNE - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.assertRaises(exc.PortNotFound,
self._l2network_multiblade.plug_interface,
[tenant_id, self.net_id, port_id, interface_id])
LOG.debug("test_plug_interface_portDNE - START")
def test_unplug_interface(self):
"""Support for the Quantum core API call"""
LOG.debug("test_unplug_interface - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
self._l2network_multiblade.plug_interface([tenant_id, self.net_id,
self.port_id, interface_id])
db.port_set_attachment(self.net_id, self.port_id, interface_id)
interface = self._l2network_multiblade.unplug_interface([tenant_id,
self.net_id,
self.port_id])
self.assertEqual(self.port_id, interface[0][const.PORTID])
LOG.debug("test_unplug_interface - END") | quantum/plugins/cisco/tests/unit/test_l2network_multi_blade.py |
import logging
import unittest
from quantum.common import exceptions as exc
from quantum.openstack.common import importutils
from quantum.plugins.cisco.common import cisco_constants as const
from quantum.plugins.cisco.common import cisco_credentials as creds
from quantum.plugins.cisco.db import api as db
from quantum.plugins.cisco.db import l2network_db as cdb
from quantum.plugins.cisco import l2network_plugin_configuration as conf
from quantum.plugins.cisco.models import l2network_multi_blade
logging.basicConfig(level=logging.WARN)
LOG = logging.getLogger(__name__)
# Set some data to use in tests
tenant_id = "network_admin"
net_name = "TestNetwork1"
new_net_name = "NewTestNetwork1"
net_id = "44"
port_id = "p0005"
port_state = const.PORT_UP
interface_id = "vif-01"
vlan_id = "102"
def vlan_name(id):
return "q-%svlan" % id[0:10]
class TestMultiBlade(unittest.TestCase):
"""
Tests for the multi-blade model for the L2Network plugin
"""
_plugins = {}
_inventory = {}
def setUp(self):
"""Setup our tests"""
# Initialize cdb and credentials
db.configure_db({'sql_connection': 'sqlite:///:memory:'})
cdb.initialize()
creds.Store.initialize()
# Create a place a store net and port ids for the druation of the test
self.net_id = 0
self.port_id = 0
# Create the multiblade object
self._l2network_multiblade = (
l2network_multi_blade.L2NetworkMultiBlade())
self.plugin_key = (
"quantum.plugins.cisco.ucs.cisco_ucs_plugin.UCSVICPlugin")
# Get UCS inventory to make sure all UCSs are affected by tests
for key in conf.PLUGINS[const.PLUGINS].keys():
if key in conf.PLUGINS[const.INVENTORY].keys():
plugin_obj = conf.PLUGINS[const.INVENTORY][key]
self._inventory[key] = importutils.import_object(plugin_obj)
self.ucs_count = self._inventory['ucs_plugin']._inventory.__len__()
def tearDown(self):
"""Tear down our tests"""
try:
port = db.port_get(self.net_id, self.port_id)
self._l2network_multiblade.delete_port([tenant_id, self.net_id,
self.port_id])
except exc.NetworkNotFound:
# We won't always have a port to remove
pass
except exc.PortNotFound:
# We won't always have a port to remove
pass
try:
net = db.network_get(self.net_id)
self._l2network_multiblade.delete_network([tenant_id, self.net_id])
except exc.NetworkNotFound:
# We won't always have a network to remove
pass
db.clear_db()
def test_create_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_create_network - START")
# Create the network in the test DB, then with the model
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
networks = self._l2network_multiblade.create_network([
tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id,
])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], net_name)
LOG.debug("test_create_network - END")
def test_delete_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_network - START")
# Create the network in the test DB, then with the model
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
networks = self._l2network_multiblade.delete_network([tenant_id,
self.net_id])
cdb.remove_vlan_binding(self.net_id)
db.network_destroy(self.net_id)
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], net_name)
LOG.debug("test_delete_network - END")
def test_delete_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_networkDNE - START")
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.delete_network,
[tenant_id, net_id])
LOG.debug("test_delete_networkDNE - END")
def test_update_network(self):
"""Support for the Quantum core API call"""
LOG.debug("test_update_network - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
net_details = db.network_update(self.net_id, tenant_id,
name=new_net_name)
networks = self._l2network_multiblade.update_network([
tenant_id,
self.net_id,
{'name': new_net_name},
])
for network in networks:
self.assertEqual(network[const.NET_ID], self.net_id)
self.assertEqual(network[const.NET_NAME], new_net_name)
LOG.debug("test_update_network - END")
def test_update_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_update_networkDNE - START")
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.update_network,
[tenant_id, net_id, {'name': new_net_name}])
LOG.debug("test_update_networkDNE - END")
def test_get_all_networks(self):
"""Not implemented for this model"""
pass
def test_get_network_details(self):
"""Not implemented for this model"""
pass
def test_create_port(self):
"""Support for the Quantum core API call"""
LOG.debug("test_create_port - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
port = self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state,
self.port_id])
self.assertEqual(self.port_id, port[0][const.PORTID])
LOG.debug("test_create_port - END")
def test_delete_port(self):
"""Support for the Quantum core API call"""
LOG.debug("test_delete_port - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
port = self._l2network_multiblade.delete_port([tenant_id,
self.net_id,
self.port_id])
self.assertEqual(self.port_id, port[0][const.PORTID])
# Recreating port so tear down doesn't cause an error
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
LOG.debug("test_delete_port - END")
def test_get_all_ports(self):
"""Not implemented for this model"""
pass
def test_update_port(self):
"""Not implemented for this model"""
pass
def test_update_portDNE(self):
"""Not implemented for this model"""
pass
def test_update_port_networkDNE(self):
"""Not implemented for this model"""
pass
def test_port_details(self):
"""Not implemented for this model"""
pass
def test_plug_interface(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
interface = self._l2network_multiblade.plug_interface(
[tenant_id, self.net_id, self.port_id, interface_id])
port = db.port_set_attachment(self.net_id, self.port_id, interface_id)
self.assertEqual(self.port_id, interface[0][const.PORTID])
self.assertEqual(port[const.INTERFACEID], interface_id)
LOG.debug("test_plug_interface - END")
def test_plug_interface_networkDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface_networkDNE - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
self.assertRaises(exc.NetworkNotFound,
self._l2network_multiblade.plug_interface,
[tenant_id, net_id, self.port_id, interface_id])
LOG.debug("test_plug_interface_networkDNE - END")
def test_plug_interface_portDNE(self):
"""Support for the Quantum core API call"""
LOG.debug("test_plug_interface_portDNE - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.assertRaises(exc.PortNotFound,
self._l2network_multiblade.plug_interface,
[tenant_id, self.net_id, port_id, interface_id])
LOG.debug("test_plug_interface_portDNE - START")
def test_unplug_interface(self):
"""Support for the Quantum core API call"""
LOG.debug("test_unplug_interface - START")
self.net_id = db.network_create(tenant_id, net_name)[const.UUID]
self._l2network_multiblade.create_network([tenant_id,
net_name,
self.net_id,
vlan_name(self.net_id),
vlan_id])
cdb.add_vlan_binding(vlan_id, vlan_name(self.net_id), self.net_id)
self.port_id = db.port_create(self.net_id, port_state)[const.UUID]
self._l2network_multiblade.create_port([tenant_id,
self.net_id,
port_state, self.port_id])
self._l2network_multiblade.plug_interface([tenant_id, self.net_id,
self.port_id, interface_id])
db.port_set_attachment(self.net_id, self.port_id, interface_id)
interface = self._l2network_multiblade.unplug_interface([tenant_id,
self.net_id,
self.port_id])
self.assertEqual(self.port_id, interface[0][const.PORTID])
LOG.debug("test_unplug_interface - END") | 0.553747 | 0.168583 |
import sys
import os
import tweepy
import json
class TokenManager:
def write_config_json(self, api_key, api_secret, access_token, access_token_secret):
cfg_dict = {}
cfg_dict['api_key'] = api_key
cfg_dict['api_secret'] = api_secret
cfg_dict['access_token'] = access_token
cfg_dict['access_token_secret'] = access_token_secret
json_str = json.dumps(cfg_dict, indent=4)
if not os.path.exists('config'):
os.makedirs('config')
with open('config/config.json', 'w') as f:
f.write(json_str)
def test_api(self, api_key, api_secret, access_token, access_token_secret):
auth = tweepy.OAuthHandler(api_key, api_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, retry_count=3, retry_delay=5, timeout=100, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
try:
api.home_timeline()
print ('\nAPI credentials validated...!')
if not os.path.isfile('config.json'):
self.write_config_json(api_key, api_secret, access_token, access_token_secret)
return api
except tweepy.error.TweepError as tweeperror:
if tweeperror.message[0]['code'] == '89':
print('\nInvalid or expired token. Please verify your credentials and try again\n')
elif tweeperror.message[0]['code'] == '420':
print('\nYou are currently rate-limited. Please wait 15 minutes and try again.\n')
else:
raise
sys.exit(0)
def first_cfg_api(self):
print('Looks like you have not configured your API credentials yet. See https://developer.twitter.com/ for details\n')
api_key = input('Please enter your API key:\n')
api_secret = input('Please enter your API secret key:\n')
access_token = input('Please enter your access token:\n')
access_token_secret = input('Please enter your access token secret:\n')
return self.test_api(api_key, api_secret, access_token, access_token_secret)
def init_api(self):
try:
with open('config/config.json') as f:
token = json.load(f)
api = self.test_api(token['api_key'], token['api_secret'], token['access_token'], token['access_token_secret'])
return api
except IOError:
api = self.first_cfg_api()
return api | modules/manager.py |
import sys
import os
import tweepy
import json
class TokenManager:
def write_config_json(self, api_key, api_secret, access_token, access_token_secret):
cfg_dict = {}
cfg_dict['api_key'] = api_key
cfg_dict['api_secret'] = api_secret
cfg_dict['access_token'] = access_token
cfg_dict['access_token_secret'] = access_token_secret
json_str = json.dumps(cfg_dict, indent=4)
if not os.path.exists('config'):
os.makedirs('config')
with open('config/config.json', 'w') as f:
f.write(json_str)
def test_api(self, api_key, api_secret, access_token, access_token_secret):
auth = tweepy.OAuthHandler(api_key, api_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, retry_count=3, retry_delay=5, timeout=100, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
try:
api.home_timeline()
print ('\nAPI credentials validated...!')
if not os.path.isfile('config.json'):
self.write_config_json(api_key, api_secret, access_token, access_token_secret)
return api
except tweepy.error.TweepError as tweeperror:
if tweeperror.message[0]['code'] == '89':
print('\nInvalid or expired token. Please verify your credentials and try again\n')
elif tweeperror.message[0]['code'] == '420':
print('\nYou are currently rate-limited. Please wait 15 minutes and try again.\n')
else:
raise
sys.exit(0)
def first_cfg_api(self):
print('Looks like you have not configured your API credentials yet. See https://developer.twitter.com/ for details\n')
api_key = input('Please enter your API key:\n')
api_secret = input('Please enter your API secret key:\n')
access_token = input('Please enter your access token:\n')
access_token_secret = input('Please enter your access token secret:\n')
return self.test_api(api_key, api_secret, access_token, access_token_secret)
def init_api(self):
try:
with open('config/config.json') as f:
token = json.load(f)
api = self.test_api(token['api_key'], token['api_secret'], token['access_token'], token['access_token_secret'])
return api
except IOError:
api = self.first_cfg_api()
return api | 0.160332 | 0.038142 |
from transformers.modeling_roberta import RobertaForMultipleChoice
from transformers.modeling_outputs import MultipleChoiceModelOutput
from torch.nn import CrossEntropyLoss
class RobertaForVariableMultipleChoice(RobertaForMultipleChoice):
def forward(
self,
input_ids=None,
token_type_ids=None,
attention_mask=None,
labels=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
return_dict = (
return_dict if return_dict is not None else self.config.use_return_dict
)
num_choices = (
input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1]
)
flat_input_ids = (
input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
)
flat_position_ids = (
position_ids.view(-1, position_ids.size(-1))
if position_ids is not None
else None
)
flat_token_type_ids = (
token_type_ids.view(-1, token_type_ids.size(-1))
if token_type_ids is not None
else None
)
flat_attention_mask = (
attention_mask.view(-1, attention_mask.size(-1))
if attention_mask is not None
else None
)
flat_inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
outputs = self.roberta(
flat_input_ids,
position_ids=flat_position_ids,
token_type_ids=flat_token_type_ids,
attention_mask=flat_attention_mask,
head_mask=head_mask,
inputs_embeds=flat_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
if getattr(self, "zero_missing_choices", True):
missing_choices = (
(input_ids.sum(dim=2) == 0).flatten()
if input_ids is not None
else (inputs_embeds.sum(dim=2) == 0).flatten()
)
logits[missing_choices] = 0.0
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
) | RoBERTa/adapted_hf.py | from transformers.modeling_roberta import RobertaForMultipleChoice
from transformers.modeling_outputs import MultipleChoiceModelOutput
from torch.nn import CrossEntropyLoss
class RobertaForVariableMultipleChoice(RobertaForMultipleChoice):
def forward(
self,
input_ids=None,
token_type_ids=None,
attention_mask=None,
labels=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
):
return_dict = (
return_dict if return_dict is not None else self.config.use_return_dict
)
num_choices = (
input_ids.shape[1] if input_ids is not None else inputs_embeds.shape[1]
)
flat_input_ids = (
input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
)
flat_position_ids = (
position_ids.view(-1, position_ids.size(-1))
if position_ids is not None
else None
)
flat_token_type_ids = (
token_type_ids.view(-1, token_type_ids.size(-1))
if token_type_ids is not None
else None
)
flat_attention_mask = (
attention_mask.view(-1, attention_mask.size(-1))
if attention_mask is not None
else None
)
flat_inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
outputs = self.roberta(
flat_input_ids,
position_ids=flat_position_ids,
token_type_ids=flat_token_type_ids,
attention_mask=flat_attention_mask,
head_mask=head_mask,
inputs_embeds=flat_inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
)
pooled_output = outputs[1]
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
if getattr(self, "zero_missing_choices", True):
missing_choices = (
(input_ids.sum(dim=2) == 0).flatten()
if input_ids is not None
else (inputs_embeds.sum(dim=2) == 0).flatten()
)
logits[missing_choices] = 0.0
reshaped_logits = logits.view(-1, num_choices)
loss = None
if labels is not None:
loss_fct = CrossEntropyLoss()
loss = loss_fct(reshaped_logits, labels)
if not return_dict:
output = (reshaped_logits,) + outputs[2:]
return ((loss,) + output) if loss is not None else output
return MultipleChoiceModelOutput(
loss=loss,
logits=reshaped_logits,
hidden_states=outputs.hidden_states,
attentions=outputs.attentions,
) | 0.798069 | 0.229654 |
import time
import datetime
import re
import argparse
from mylogging import logger
from selenium import webdriver
import pandas as pd
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from urllib3.exceptions import MaxRetryError
from selenium.common.exceptions import WebDriverException
# UTILS
date_regex = re.compile("^[0-9]+\.[0-9]+\.[0-9]+$")
# PARSE DATA
def parse_data(file):
data = pd.read_excel(file, header=0, index_col=[0, 1, 2])
return data
# ENTER DATA
class DataInserter:
def __init__(self, data, selenium_url, disable_all=False, max_tries=10):
self.data = data
self.driver = None
self.disable_all = disable_all
self.logged_in = False
self.is_remote = True
try:
self.driver = webdriver.Firefox()
self.is_remote = False
except WebDriverException as e:
logger.info('Could not run firefox locally. Switching to remote option. Error message: ' + str(e))
n_tries = 0
logger.info("Connecting to selenium at " + selenium_url)
while self.driver is None:
try:
self.driver = webdriver.Remote(selenium_url, DesiredCapabilities.FIREFOX)
logger.info('Successfully connected to selenium')
except MaxRetryError:
n_tries += 1
logger.warning('Remote webdriver is not running yet ({}/{})...'.format(n_tries, max_tries))
if n_tries >= max_tries:
raise Exception('Remote webdriver does not seem to be running...')
else:
time.sleep(1)
def navigate_to_page(self):
sport_db_url = 'https://www.sportdb.ch'
logger.debug('Navigating to %s', sport_db_url)
self.driver.get(sport_db_url)
logger.debug('Navigated to %s', sport_db_url)
def login(self, username, password):
logger.debug('Filling out login form...')
username_field = self.driver.find_element_by_id('j_username')
username_field.clear()
username_field.send_keys(username)
username_field = self.driver.find_element_by_id('j_password')
username_field.clear()
username_field.send_keys(password)
logger.debug('Clicking login button')
login = self.driver.find_element_by_id('ButtonLogin')
login.click()
self.logged_in = True
logger.debug('Clicked login button')
src = self.driver.page_source
if 'Bitte überprüfen Sie Benutzername und Passwort' in src or 'Bitte Benutzername und Passwort angeben' in src:
raise Exception('Something went wrong. Most likely, you provided the wrong username or password')
def to_awk(self, course_id):
logger.debug('Browsing to AWK with course id %s...', course_id)
if course_id is None:
logger.debug('Mode: manual')
input('No course_id provided. Manually navigate to "Anwesenheitskontrolle" for your course.')
else:
logger.debug('Mode: automatic')
self.driver.get('https://www.sportdb.ch/extranet/kurs/kursEditAwk.do?kursId={}'.format(course_id))
logger.debug('Waiting a bit for page to fully load')
time.sleep(1)
if 'Error' in self.driver.title:
raise Exception('Something went wrong. Most likely, you entered the wrong course id.')
logger.debug('Browsed to AWK...')
def set_attendance(self, attended, box, name, date):
if attended:
logger.debug("Attended")
attended = attended and not self.disable_all
logger.debug('Setting attendance for %s on %s', name, date)
if attended and not box.is_selected():
logger.debug('{} attended on {}'.format(name, date))
box.click()
return True
elif not attended and box.is_selected():
logger.debug('{} did not attend on {}'.format(name, date))
box.click()
return True
elif attended and box.is_selected():
logger.debug('{} attended on {} (already entered)'.format(name, date))
elif not attended and not box.is_selected():
logger.debug('{} did not attend on {} (already entered)'.format(name, date))
else:
logger.error('Program error')
assert False
return False
def enter_data(self):
any_changed = False
# match ids and days
logger.debug('Determining days on the current page')
days = self.driver.find_elements_by_xpath(".//*[contains(@class, 'awkDay')]//span")
days = [d.text for d in days if date_regex.match(d.text)]
logger.debug('Determining day ids on the current page')
day_ids = self.driver.find_elements_by_xpath(".//*[contains(@class, 'select-all leiter')]")
day_ids = [d.get_attribute('name') for d in day_ids]
logger.debug("Asserting length of results matches: \t\n%s, \t\n%s", days, day_ids)
assert(len(days) == len(day_ids))
day_to_id = {day: day_id for day, day_id in zip(days, day_ids)}
logger.debug('Found days: %s', day_to_id)
# enter data
logger.debug('Entering data...')
for column in self.data:
date = column.to_pydatetime().strftime('%d.%m.%Y')
for key, val in self.data[column].iteritems():
js_id = key[0]
last_name = key[1]
first_name = key[2]
name = first_name + ' ' + last_name
attended = val == 'x'
if date in day_to_id:
day_id = day_to_id[date]
path = ".//input[contains(@name, 'kursAktivitaetTeilnehmerMap({})')][contains(@value, 'I-{}')]"\
.format(day_id, js_id)
logger.debug('Locating checkbox for %s (%s) on %s by path %s', name, js_id, date, path)
box = self.driver.find_element_by_xpath(path)
logger.debug('Filling out checkbox')
changed = self.set_attendance(attended, box, name, date)
any_changed = any_changed or changed
logger.debug('Filled out checkbox')
logger.debug('Wait a bit for page to process changes')
time.sleep(1)
# save
if any_changed:
logger.debug('Saving data...')
save = self.driver.find_element_by_id('formSave')
save.click()
logger.debug('Saved data')
else:
logger.debug('Not saving, since there were no changes.')
def to_previous(self):
previous = self.driver.find_element_by_id('previousLink')
c = previous.get_attribute("class")
if 'disabled' not in c:
previous.click()
logger.debug('Waiting a bit for page to fully load')
time.sleep(1)
return True
else:
return False
def __del__(self):
if self.logged_in:
logger.debug('Logging out...')
logout = self.driver.find_element_by_id('logout')
logout.click()
logger.debug('Closing driver...')
self.driver.close()
def run(data_file, username, password, course_id, disable_all, selenium_url, test):
logger.debug("Running...")
# parse data
data = parse_data(data_file)
# navigate
ins = DataInserter(data, selenium_url, disable_all)
ins.navigate_to_page()
ins.login(username, password)
ins.to_awk(course_id)
if not test:
# enter data
while True:
logger.info('Entering data...')
ins.enter_data()
logger.info('Entered data. Going to previous page...')
more = ins.to_previous()
if not more:
break
logger.info("Einträge vollständig. Keine Garantie für Korrektheit, bitte Daten überprüfen. Vergiss nicht, den Kurs noch abzuschliessen.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Eintragehilfe für Anwesenheitskontrolle bei sportdb')
parser.add_argument('data_file', action='store', type=str,
help='File mit Daten. Siehe data/reference.xls für ein Referenzfile (letztes Argument)')
parser.add_argument('--username', dest='username', action='store',
type=str, help='Username für sportdb (z.B. js-123456)', required=True)
parser.add_argument('--password', dest='password', action='store',
type=str, default=None,
help='Passwort für <PASSWORD> (default: interaktive Eingabe)')
parser.add_argument('--course-id', dest='course_id', action='store', default=None, type=str,
help='Kurs ID (z.B. 1234567). Kann aus der URL der Anwesenheitskontrolle abgelesen werden. Wenn nicht angegeben, wirst du interaktiv angefragt, zur korrekten Anwesenheitskontrolle zu navigieren.')
parser.add_argument('--disable-all', dest='disable_all', action='store_true', default=False,
help='Deaktiviere die Anwesenheit für alle Personen und Daten im File.')
parser.add_argument('--test', action='store_true', default=False,
help='Nur Login&Logout.')
parser.add_argument('--selenium-url', default="http://selenium:4444/wd/hub",
help='URL unter der Selenium erreicht werden kann.')
args = parser.parse_args()
if args.password is None:
password = input("Password? ")
else:
password = args.password
run(args.data_file, args.username, password, args.course_id, args.disable_all, args.selenium_url, args.test) | code/insert_data.py | import time
import datetime
import re
import argparse
from mylogging import logger
from selenium import webdriver
import pandas as pd
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from urllib3.exceptions import MaxRetryError
from selenium.common.exceptions import WebDriverException
# UTILS
date_regex = re.compile("^[0-9]+\.[0-9]+\.[0-9]+$")
# PARSE DATA
def parse_data(file):
data = pd.read_excel(file, header=0, index_col=[0, 1, 2])
return data
# ENTER DATA
class DataInserter:
def __init__(self, data, selenium_url, disable_all=False, max_tries=10):
self.data = data
self.driver = None
self.disable_all = disable_all
self.logged_in = False
self.is_remote = True
try:
self.driver = webdriver.Firefox()
self.is_remote = False
except WebDriverException as e:
logger.info('Could not run firefox locally. Switching to remote option. Error message: ' + str(e))
n_tries = 0
logger.info("Connecting to selenium at " + selenium_url)
while self.driver is None:
try:
self.driver = webdriver.Remote(selenium_url, DesiredCapabilities.FIREFOX)
logger.info('Successfully connected to selenium')
except MaxRetryError:
n_tries += 1
logger.warning('Remote webdriver is not running yet ({}/{})...'.format(n_tries, max_tries))
if n_tries >= max_tries:
raise Exception('Remote webdriver does not seem to be running...')
else:
time.sleep(1)
def navigate_to_page(self):
sport_db_url = 'https://www.sportdb.ch'
logger.debug('Navigating to %s', sport_db_url)
self.driver.get(sport_db_url)
logger.debug('Navigated to %s', sport_db_url)
def login(self, username, password):
logger.debug('Filling out login form...')
username_field = self.driver.find_element_by_id('j_username')
username_field.clear()
username_field.send_keys(username)
username_field = self.driver.find_element_by_id('j_password')
username_field.clear()
username_field.send_keys(password)
logger.debug('Clicking login button')
login = self.driver.find_element_by_id('ButtonLogin')
login.click()
self.logged_in = True
logger.debug('Clicked login button')
src = self.driver.page_source
if 'Bitte überprüfen Sie Benutzername und Passwort' in src or 'Bitte Benutzername und Passwort angeben' in src:
raise Exception('Something went wrong. Most likely, you provided the wrong username or password')
def to_awk(self, course_id):
logger.debug('Browsing to AWK with course id %s...', course_id)
if course_id is None:
logger.debug('Mode: manual')
input('No course_id provided. Manually navigate to "Anwesenheitskontrolle" for your course.')
else:
logger.debug('Mode: automatic')
self.driver.get('https://www.sportdb.ch/extranet/kurs/kursEditAwk.do?kursId={}'.format(course_id))
logger.debug('Waiting a bit for page to fully load')
time.sleep(1)
if 'Error' in self.driver.title:
raise Exception('Something went wrong. Most likely, you entered the wrong course id.')
logger.debug('Browsed to AWK...')
def set_attendance(self, attended, box, name, date):
if attended:
logger.debug("Attended")
attended = attended and not self.disable_all
logger.debug('Setting attendance for %s on %s', name, date)
if attended and not box.is_selected():
logger.debug('{} attended on {}'.format(name, date))
box.click()
return True
elif not attended and box.is_selected():
logger.debug('{} did not attend on {}'.format(name, date))
box.click()
return True
elif attended and box.is_selected():
logger.debug('{} attended on {} (already entered)'.format(name, date))
elif not attended and not box.is_selected():
logger.debug('{} did not attend on {} (already entered)'.format(name, date))
else:
logger.error('Program error')
assert False
return False
def enter_data(self):
any_changed = False
# match ids and days
logger.debug('Determining days on the current page')
days = self.driver.find_elements_by_xpath(".//*[contains(@class, 'awkDay')]//span")
days = [d.text for d in days if date_regex.match(d.text)]
logger.debug('Determining day ids on the current page')
day_ids = self.driver.find_elements_by_xpath(".//*[contains(@class, 'select-all leiter')]")
day_ids = [d.get_attribute('name') for d in day_ids]
logger.debug("Asserting length of results matches: \t\n%s, \t\n%s", days, day_ids)
assert(len(days) == len(day_ids))
day_to_id = {day: day_id for day, day_id in zip(days, day_ids)}
logger.debug('Found days: %s', day_to_id)
# enter data
logger.debug('Entering data...')
for column in self.data:
date = column.to_pydatetime().strftime('%d.%m.%Y')
for key, val in self.data[column].iteritems():
js_id = key[0]
last_name = key[1]
first_name = key[2]
name = first_name + ' ' + last_name
attended = val == 'x'
if date in day_to_id:
day_id = day_to_id[date]
path = ".//input[contains(@name, 'kursAktivitaetTeilnehmerMap({})')][contains(@value, 'I-{}')]"\
.format(day_id, js_id)
logger.debug('Locating checkbox for %s (%s) on %s by path %s', name, js_id, date, path)
box = self.driver.find_element_by_xpath(path)
logger.debug('Filling out checkbox')
changed = self.set_attendance(attended, box, name, date)
any_changed = any_changed or changed
logger.debug('Filled out checkbox')
logger.debug('Wait a bit for page to process changes')
time.sleep(1)
# save
if any_changed:
logger.debug('Saving data...')
save = self.driver.find_element_by_id('formSave')
save.click()
logger.debug('Saved data')
else:
logger.debug('Not saving, since there were no changes.')
def to_previous(self):
previous = self.driver.find_element_by_id('previousLink')
c = previous.get_attribute("class")
if 'disabled' not in c:
previous.click()
logger.debug('Waiting a bit for page to fully load')
time.sleep(1)
return True
else:
return False
def __del__(self):
if self.logged_in:
logger.debug('Logging out...')
logout = self.driver.find_element_by_id('logout')
logout.click()
logger.debug('Closing driver...')
self.driver.close()
def run(data_file, username, password, course_id, disable_all, selenium_url, test):
logger.debug("Running...")
# parse data
data = parse_data(data_file)
# navigate
ins = DataInserter(data, selenium_url, disable_all)
ins.navigate_to_page()
ins.login(username, password)
ins.to_awk(course_id)
if not test:
# enter data
while True:
logger.info('Entering data...')
ins.enter_data()
logger.info('Entered data. Going to previous page...')
more = ins.to_previous()
if not more:
break
logger.info("Einträge vollständig. Keine Garantie für Korrektheit, bitte Daten überprüfen. Vergiss nicht, den Kurs noch abzuschliessen.")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Eintragehilfe für Anwesenheitskontrolle bei sportdb')
parser.add_argument('data_file', action='store', type=str,
help='File mit Daten. Siehe data/reference.xls für ein Referenzfile (letztes Argument)')
parser.add_argument('--username', dest='username', action='store',
type=str, help='Username für sportdb (z.B. js-123456)', required=True)
parser.add_argument('--password', dest='password', action='store',
type=str, default=None,
help='Passwort für <PASSWORD> (default: interaktive Eingabe)')
parser.add_argument('--course-id', dest='course_id', action='store', default=None, type=str,
help='Kurs ID (z.B. 1234567). Kann aus der URL der Anwesenheitskontrolle abgelesen werden. Wenn nicht angegeben, wirst du interaktiv angefragt, zur korrekten Anwesenheitskontrolle zu navigieren.')
parser.add_argument('--disable-all', dest='disable_all', action='store_true', default=False,
help='Deaktiviere die Anwesenheit für alle Personen und Daten im File.')
parser.add_argument('--test', action='store_true', default=False,
help='Nur Login&Logout.')
parser.add_argument('--selenium-url', default="http://selenium:4444/wd/hub",
help='URL unter der Selenium erreicht werden kann.')
args = parser.parse_args()
if args.password is None:
password = input("Password? ")
else:
password = args.password
run(args.data_file, args.username, password, args.course_id, args.disable_all, args.selenium_url, args.test) | 0.305801 | 0.087603 |
import numpy as np
from utils.proto_msp import ProtoMSP
import time
import pickle
from utils.misc import print_params
from utils.misc import load_features
from utils.misc import parse_args
from utils.misc import create_episode
from utils.misc import get_features
def run_episode(train_mean, cl_data_file, model, n_way=5, n_support=5, n_query=15):
z_all, y = create_episode(cl_data_file, n_way, n_support, n_query)
model.train_mean = train_mean
time_list = [time.time(), ]
model.opt.reduced_dim = 4
model.method_sub(z_all, model.sub_train_mean)
time_list.append(time.time())
model.method_sub(z_all, model.mean_and_norm)
time_list.append(time.time())
model.method_sub(z_all, model.mean_and_norm_ber)
time_list.append(time.time())
model.method_project(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_project(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
model.opt.reduced_dim = 10
model.method_cluster_baseline(z_all)
time_list.append(time.time())
model.method_proj_and_cluster(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_proj_and_cluster(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
model.method_mean_shift(z_all)
time_list.append(time.time())
model.method_project_and_mean_shift(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_project_and_mean_shift(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
time_list = [i - j for i, j in zip(time_list[1:], time_list[:-1])]
return np.asarray(time_list)
def run_exp(params, verbose):
print_params(params)
n_episodes = 10000
few_shot_params = dict(n_way=params.n_way, n_support=params.n_shot)
model = ProtoMSP(opt=params)
model = model.cuda()
train_mean, cl_data_file = load_features(params)
time_list = []
name = ['baseline', 'sub reg', 'sub', 'pca', 'ica',
'cluster', 'pca cluster', 'ica cluster',
'msp', 'pca msp', 'ica msp',
]
for i in range(1, n_episodes + 1):
times = run_episode(train_mean, cl_data_file, model, n_query=params.n_query, **few_shot_params)
time_list.append(times)
if i % verbose == 0:
tl = sum(time_list)/len(time_list)
msg = ''
for k, v in zip(name, tl):
msg += f'{k}: {v:.2e}, '
print(msg)
return
def table_exp():
params = parse_args('test')
for ds in ['mini', 'tiered']:
for shot in [1, 5]:
params.dataset = ds
params.n_shot = shot
run_exp(params, verbose=500)
if __name__ == '__main__':
get_features()
table_exp() | src/exp_run_time.py |
import numpy as np
from utils.proto_msp import ProtoMSP
import time
import pickle
from utils.misc import print_params
from utils.misc import load_features
from utils.misc import parse_args
from utils.misc import create_episode
from utils.misc import get_features
def run_episode(train_mean, cl_data_file, model, n_way=5, n_support=5, n_query=15):
z_all, y = create_episode(cl_data_file, n_way, n_support, n_query)
model.train_mean = train_mean
time_list = [time.time(), ]
model.opt.reduced_dim = 4
model.method_sub(z_all, model.sub_train_mean)
time_list.append(time.time())
model.method_sub(z_all, model.mean_and_norm)
time_list.append(time.time())
model.method_sub(z_all, model.mean_and_norm_ber)
time_list.append(time.time())
model.method_project(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_project(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
model.opt.reduced_dim = 10
model.method_cluster_baseline(z_all)
time_list.append(time.time())
model.method_proj_and_cluster(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_proj_and_cluster(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
model.method_mean_shift(z_all)
time_list.append(time.time())
model.method_project_and_mean_shift(z_all, model.mean_and_norm_ber, model.calc_pca)
time_list.append(time.time())
model.method_project_and_mean_shift(z_all, model.mean_and_norm_ber, model.calc_ica)
time_list.append(time.time())
time_list = [i - j for i, j in zip(time_list[1:], time_list[:-1])]
return np.asarray(time_list)
def run_exp(params, verbose):
print_params(params)
n_episodes = 10000
few_shot_params = dict(n_way=params.n_way, n_support=params.n_shot)
model = ProtoMSP(opt=params)
model = model.cuda()
train_mean, cl_data_file = load_features(params)
time_list = []
name = ['baseline', 'sub reg', 'sub', 'pca', 'ica',
'cluster', 'pca cluster', 'ica cluster',
'msp', 'pca msp', 'ica msp',
]
for i in range(1, n_episodes + 1):
times = run_episode(train_mean, cl_data_file, model, n_query=params.n_query, **few_shot_params)
time_list.append(times)
if i % verbose == 0:
tl = sum(time_list)/len(time_list)
msg = ''
for k, v in zip(name, tl):
msg += f'{k}: {v:.2e}, '
print(msg)
return
def table_exp():
params = parse_args('test')
for ds in ['mini', 'tiered']:
for shot in [1, 5]:
params.dataset = ds
params.n_shot = shot
run_exp(params, verbose=500)
if __name__ == '__main__':
get_features()
table_exp() | 0.307774 | 0.315327 |
#%%##############################
# 리스트는 어떻게 만들고 사용할까 #
#################################
# n개의 요소를 포함하는 리스트를 만들기
# 리스트명 = [요소0, 요소1, ...요소n-1]
odd = [1, 3, 5, 7, 9]
# 다양한 리스트의 생김새
# d,e처럼 숫자와 문자열을 함께 요소값으로 가질 수 있다.
a = []
b = [1,2,3]
c = ['Life','is','too','short']
d = [1,2,'Life','is']
e = [1,2,['Life','is']]
#%%#########################
# 리스트의 인덱싱과 슬라이싱 #
############################
##################
# 리스트의 인덱싱 #
##################
a = [1,2,3]
a[0]
# 1
a[0]+a[2] # 1+3
# 4
a[-1] # The last element
# 3
# A list of lists: 리스트 안에 다른 리스트를 포함하는 예
a = [1,2,3,['a','b','c']]
a[0]
# 1
a[-1]
# ['a', 'b', 'c']
a[3]
# ['a', 'b', 'c']
a[-1][0]
# 'a'
a[-1][1]
# 'b'
a[-1][2]
# 'c'
#############################
# 삼중 리스트에서 인덱싱 하기 #
#############################
# 조금 더 복잡한 인덱싱 예
# 삼중 중청은 복잡해서 자주 사용되지는 않지만 알아두자.
a = [1,2,['a','b',['Life','is']]]
a[2][2][0]
# 'Life'
a[2][2][1]
# 'is'
####################
# 리스트의 슬라이싱 #
####################
a = [1,2,3,4,5]
a[0:2]
# [1, 2]
# 문자열의 슬라이싱과 비교
# 슬라이싱의 사용법이 동일하다!
a = "12345"
a[0:2]
# '12'
# 몇 가지 예
a = [1,2,3,4,5]
b = a[:2] # 처음부터 2까지
c = a[2:] # 2부터 끝까지
b
# [1, 2]
c
# [3, 4, 5]
#################################
# 중첩된 리스트에서 슬라이싱 하기 #
#################################
# 중첩된 리스트에서도 슬라이싱 방법은 똑같다.
a = [1,2,3,['a','b','c'], 4,5]
a[2:5]
#[3, ['a', 'b', 'c'], 4]
a[3][:2]
# ['a', 'b']
#%%#############
# 리스트 연산자 #
################
# 1. 리스트 더하기 (+)
a = [1,2,3]
b = [4,5,6]
a + b
# [1, 2, 3, 4, 5, 6]
# 2. 리스트 반복하기 (*)
a = [1,2,3]
a * 3
# [1, 2, 3, 1, 2, 3, 1, 2, 3]
# 초보자가 범하기 쉬운 리스트 연산 오류
a = [1,2,3]
# 아래와 같이 입력했을 때 결과값은?
# a[2] + "hi"
# TypeError: unsupported operand type(s) for +: 'int' and 'str'
# 에러 수정을 위해서는 아래와 같이 입력!
str( a[2] )+"hi"
# '3hi'
#%%#########################
# 리스트의 수정, 변경과 삭제 #
############################
# 1. 리스트에서 하나의 값 수정하기
a = [1,2,3]
a[2] = 4
a
# [1, 2, 4]
# 2. 리스트에서 연속된 범위의 값 수정하기 # GREP
a = [1,2,3] # GREP
a[1:2] # GREP
# [2] # GREP
a[1:2] = ['a','b','c'] # GREP
a # GREP
# [1, 'a', 'b', 'c', 4] # GREP
# [1, ['a', 'b', 'c'], 4]가 아님에 주의 # GREP
# 리스트 수정할 때 주의할 점 # GREP
# 위와 같이 슬라이싱이 아닌, # GREP
# 아래와 같은 요소로 수정하면 결과가 다르다. # GREP
a = [1,2,3] # GREP
a[1] = ['a','b','c'] # GREP
a # GREP
# [1, ['a', 'b', 'c'], 3] # GREP
# 3. [] 사용해 리스트 요소 삭제하기
a = [1,2,3]
a[2] = 4
a[1:2] = ['a','b','c']
a
# [1, 'a', 'b', 'c', 4]
a[1:3] = []
a
# [1, 'c', 4]
# 4. del함수 사용해 리스트 요소 삭제하기
# del 객체
# 는 Python이 내장하는 삭제함수이다.
# del a[x]
# 는 x번째 요소값을 삭제한다.
# del a[x:y]
# 는 x번째부터 y번째 요소 사이의 값을 삭제한다.
a
# [1, 'c', 4]
del a[1]
a
# [1, 4]
#%%##################
# 리스트 관련 함수들 #
#####################
# 리스트에 요소 추가 (append)
a = [1,2,3]
a.append(4)
a
# [1, 2, 3, 4]
a.append( [5,6] ) # 리스트 안에 리스트를 append했다.
a
# [1, 2, 3, 4, [5, 6]]
# 리스트 정렬 (sort)
# 정렬을 하나, 이 값을 리턴하지 않는 점에 주의
# 리턴받고 싶을 때는 b = sorted(a)
a = [1,4,3,2]
a.sort()
a
# [1, 2, 3, 4]
a = ['a','c','b']
a.sort()
a
# ['a', 'b', 'c']
# 리스트 뒤집기 (reverse)
a = ['a','c','b']
a.reverse()
a
# ['b', 'c', 'a']
# 위치 반환 (index)
# index(x)함수는
# 리스트에 x라는 값이 있으면 x의 위치값을 리턴하고,
# 값이 없으면 ValueError를 리턴한다.
a = [1,2,3]
a.index(3)
# 2
a.index(1)
# 0
a.index(0)
# ValueError: 0 is not in list
# 리스트에 요소 삽입 (insert)
# insert(a,b)는 리스트의 a번째 위치에 b를 삽입한다.
# 주의: 파이썬 리스트는 0에서 시작한다.
a = [1,2,3]
a.insert(0,4)
a
# [4, 1, 2, 3]
a.insert(3,5)
a
# [4, 1, 2, 5, 3]
# 리스트 요소 제거 (remove)
# remove(x)는 첫 번째 나오는 x를 삭제한다.
a = [1,2,3,1,2,3]
a.remove(3)
a
# [1, 2, 1, 2, 3]
a.remove(3)
a
# [1, 2, 1, 2]
#리스트 요소 끄집어 내기 (pop)
# pop()은 리스트의 마지막 요소를 리턴하고, 그 요소를 삭제한다.
# pop(x)는 리스트의 x번째 요소를 리턴하고, 그 요소를 삭제한다.
a = [1,2,3]
a.pop()
# 3
a
# [1, 2]
a = [1,2,3]
a.pop(1)
# 2
a
# [1, 3]
# 리스트에 포함된 요소 x의 개수 세기 (count)
# count(x)는 리스트 내에 x가 몇 개 있는지 카운트해서 리턴한다.
a = [1,2,3,1]
a.count(1)
# 2
# 리스트 확장 (extend)
# extend(x)에서 x에는 리스트만 올 수 있다.
# 원래의 a 리스트에 x리스트를 더하게 된다.
a = [1,2,3]
a.extend( [4,5] )
a
# [1, 2, 3, 4, 5]
b = [6,7]
a.extend(b)
a
# [1, 2, 3, 4, 5, 6, 7] | python/en/archive/books/jump2python/j2p-02_3-list.py | #%%##############################
# 리스트는 어떻게 만들고 사용할까 #
#################################
# n개의 요소를 포함하는 리스트를 만들기
# 리스트명 = [요소0, 요소1, ...요소n-1]
odd = [1, 3, 5, 7, 9]
# 다양한 리스트의 생김새
# d,e처럼 숫자와 문자열을 함께 요소값으로 가질 수 있다.
a = []
b = [1,2,3]
c = ['Life','is','too','short']
d = [1,2,'Life','is']
e = [1,2,['Life','is']]
#%%#########################
# 리스트의 인덱싱과 슬라이싱 #
############################
##################
# 리스트의 인덱싱 #
##################
a = [1,2,3]
a[0]
# 1
a[0]+a[2] # 1+3
# 4
a[-1] # The last element
# 3
# A list of lists: 리스트 안에 다른 리스트를 포함하는 예
a = [1,2,3,['a','b','c']]
a[0]
# 1
a[-1]
# ['a', 'b', 'c']
a[3]
# ['a', 'b', 'c']
a[-1][0]
# 'a'
a[-1][1]
# 'b'
a[-1][2]
# 'c'
#############################
# 삼중 리스트에서 인덱싱 하기 #
#############################
# 조금 더 복잡한 인덱싱 예
# 삼중 중청은 복잡해서 자주 사용되지는 않지만 알아두자.
a = [1,2,['a','b',['Life','is']]]
a[2][2][0]
# 'Life'
a[2][2][1]
# 'is'
####################
# 리스트의 슬라이싱 #
####################
a = [1,2,3,4,5]
a[0:2]
# [1, 2]
# 문자열의 슬라이싱과 비교
# 슬라이싱의 사용법이 동일하다!
a = "12345"
a[0:2]
# '12'
# 몇 가지 예
a = [1,2,3,4,5]
b = a[:2] # 처음부터 2까지
c = a[2:] # 2부터 끝까지
b
# [1, 2]
c
# [3, 4, 5]
#################################
# 중첩된 리스트에서 슬라이싱 하기 #
#################################
# 중첩된 리스트에서도 슬라이싱 방법은 똑같다.
a = [1,2,3,['a','b','c'], 4,5]
a[2:5]
#[3, ['a', 'b', 'c'], 4]
a[3][:2]
# ['a', 'b']
#%%#############
# 리스트 연산자 #
################
# 1. 리스트 더하기 (+)
a = [1,2,3]
b = [4,5,6]
a + b
# [1, 2, 3, 4, 5, 6]
# 2. 리스트 반복하기 (*)
a = [1,2,3]
a * 3
# [1, 2, 3, 1, 2, 3, 1, 2, 3]
# 초보자가 범하기 쉬운 리스트 연산 오류
a = [1,2,3]
# 아래와 같이 입력했을 때 결과값은?
# a[2] + "hi"
# TypeError: unsupported operand type(s) for +: 'int' and 'str'
# 에러 수정을 위해서는 아래와 같이 입력!
str( a[2] )+"hi"
# '3hi'
#%%#########################
# 리스트의 수정, 변경과 삭제 #
############################
# 1. 리스트에서 하나의 값 수정하기
a = [1,2,3]
a[2] = 4
a
# [1, 2, 4]
# 2. 리스트에서 연속된 범위의 값 수정하기 # GREP
a = [1,2,3] # GREP
a[1:2] # GREP
# [2] # GREP
a[1:2] = ['a','b','c'] # GREP
a # GREP
# [1, 'a', 'b', 'c', 4] # GREP
# [1, ['a', 'b', 'c'], 4]가 아님에 주의 # GREP
# 리스트 수정할 때 주의할 점 # GREP
# 위와 같이 슬라이싱이 아닌, # GREP
# 아래와 같은 요소로 수정하면 결과가 다르다. # GREP
a = [1,2,3] # GREP
a[1] = ['a','b','c'] # GREP
a # GREP
# [1, ['a', 'b', 'c'], 3] # GREP
# 3. [] 사용해 리스트 요소 삭제하기
a = [1,2,3]
a[2] = 4
a[1:2] = ['a','b','c']
a
# [1, 'a', 'b', 'c', 4]
a[1:3] = []
a
# [1, 'c', 4]
# 4. del함수 사용해 리스트 요소 삭제하기
# del 객체
# 는 Python이 내장하는 삭제함수이다.
# del a[x]
# 는 x번째 요소값을 삭제한다.
# del a[x:y]
# 는 x번째부터 y번째 요소 사이의 값을 삭제한다.
a
# [1, 'c', 4]
del a[1]
a
# [1, 4]
#%%##################
# 리스트 관련 함수들 #
#####################
# 리스트에 요소 추가 (append)
a = [1,2,3]
a.append(4)
a
# [1, 2, 3, 4]
a.append( [5,6] ) # 리스트 안에 리스트를 append했다.
a
# [1, 2, 3, 4, [5, 6]]
# 리스트 정렬 (sort)
# 정렬을 하나, 이 값을 리턴하지 않는 점에 주의
# 리턴받고 싶을 때는 b = sorted(a)
a = [1,4,3,2]
a.sort()
a
# [1, 2, 3, 4]
a = ['a','c','b']
a.sort()
a
# ['a', 'b', 'c']
# 리스트 뒤집기 (reverse)
a = ['a','c','b']
a.reverse()
a
# ['b', 'c', 'a']
# 위치 반환 (index)
# index(x)함수는
# 리스트에 x라는 값이 있으면 x의 위치값을 리턴하고,
# 값이 없으면 ValueError를 리턴한다.
a = [1,2,3]
a.index(3)
# 2
a.index(1)
# 0
a.index(0)
# ValueError: 0 is not in list
# 리스트에 요소 삽입 (insert)
# insert(a,b)는 리스트의 a번째 위치에 b를 삽입한다.
# 주의: 파이썬 리스트는 0에서 시작한다.
a = [1,2,3]
a.insert(0,4)
a
# [4, 1, 2, 3]
a.insert(3,5)
a
# [4, 1, 2, 5, 3]
# 리스트 요소 제거 (remove)
# remove(x)는 첫 번째 나오는 x를 삭제한다.
a = [1,2,3,1,2,3]
a.remove(3)
a
# [1, 2, 1, 2, 3]
a.remove(3)
a
# [1, 2, 1, 2]
#리스트 요소 끄집어 내기 (pop)
# pop()은 리스트의 마지막 요소를 리턴하고, 그 요소를 삭제한다.
# pop(x)는 리스트의 x번째 요소를 리턴하고, 그 요소를 삭제한다.
a = [1,2,3]
a.pop()
# 3
a
# [1, 2]
a = [1,2,3]
a.pop(1)
# 2
a
# [1, 3]
# 리스트에 포함된 요소 x의 개수 세기 (count)
# count(x)는 리스트 내에 x가 몇 개 있는지 카운트해서 리턴한다.
a = [1,2,3,1]
a.count(1)
# 2
# 리스트 확장 (extend)
# extend(x)에서 x에는 리스트만 올 수 있다.
# 원래의 a 리스트에 x리스트를 더하게 된다.
a = [1,2,3]
a.extend( [4,5] )
a
# [1, 2, 3, 4, 5]
b = [6,7]
a.extend(b)
a
# [1, 2, 3, 4, 5, 6, 7] | 0.118704 | 0.430536 |
import os
import gzip
import re
import shutil
ptrs = []
def add_patterns():
ptrs.append(re.compile('^\[[0-2][0-9]:[0-9][0-9]:[0-9][0-9]\] \[Client thread/ERROR\]: '))
ptrs.append(re.compile('^\[[0-2][0-9]:[0-9][0-9]:[0-9][0-9]\] \[Client thread/WARN\]: '))
def matching(txt):
for ptr in ptrs:
if re.search(ptr, txt):
return True
return False
def read_file(fpath):
if re.search('\.log.gz$', fpath, re.IGNORECASE):
return gzip.open(fpath, 'rt', encoding='cp932', errors='ignore'), 'gzip'
elif re.search('\.log$', fpath, re.IGNORECASE):
return open(fpath, 'r', encoding='cp932', errors='ignore'), 'log'
else:
return False, ''
def conv(root, fname, backup=''):
if backup == '': backup = os.path.join(root, 'backup')
if os.path.isdir(backup) == False:
if os.path.isfile(backup):
print('Error!')
return
os.makedirs(backup)
fpath = os.path.join(root, fname)
bpath = os.path.join(backup, fname)
fsize = os.path.getsize(fpath) // 1024
f, ex = read_file(fpath)
if f == False:
print(fpath + ' is not log file.\n')
return
print('filename: ' + fpath)
print('filesize: ' + str(fsize) + ' KB')
shutil.copy2(fpath, bpath)
print('backup path: ' + bpath)
new_data = ''
try:
line = f.readline()
except:
print(fpath + ' is not log file.\n')
return
while line:
if matching(line) == False: new_data += line
line = f.readline()
f.close()
if ex == 'log':
with open(fpath, 'w', encoding='cp932') as f:
f.write(new_data)
elif ex == 'gzip':
with gzip.open(fpath, 'wt', encoding='cp932') as f:
f.write(new_data)
print('complete.\n')
def main():
root = 'C:/Users/User/AppData/Roaming/.minecraft/logs'#ログフォルダ
backup = ''#バックアップ用フォルダ
add_patterns()
ls = os.listdir(root)
length = str(len(ls))
for i, name in enumerate(ls):
print(str(i+1) + '/' + length + ' files')
conv(root, name, backup)
if __name__ == '__main__':
main() | MCLogConverter.py | import os
import gzip
import re
import shutil
ptrs = []
def add_patterns():
ptrs.append(re.compile('^\[[0-2][0-9]:[0-9][0-9]:[0-9][0-9]\] \[Client thread/ERROR\]: '))
ptrs.append(re.compile('^\[[0-2][0-9]:[0-9][0-9]:[0-9][0-9]\] \[Client thread/WARN\]: '))
def matching(txt):
for ptr in ptrs:
if re.search(ptr, txt):
return True
return False
def read_file(fpath):
if re.search('\.log.gz$', fpath, re.IGNORECASE):
return gzip.open(fpath, 'rt', encoding='cp932', errors='ignore'), 'gzip'
elif re.search('\.log$', fpath, re.IGNORECASE):
return open(fpath, 'r', encoding='cp932', errors='ignore'), 'log'
else:
return False, ''
def conv(root, fname, backup=''):
if backup == '': backup = os.path.join(root, 'backup')
if os.path.isdir(backup) == False:
if os.path.isfile(backup):
print('Error!')
return
os.makedirs(backup)
fpath = os.path.join(root, fname)
bpath = os.path.join(backup, fname)
fsize = os.path.getsize(fpath) // 1024
f, ex = read_file(fpath)
if f == False:
print(fpath + ' is not log file.\n')
return
print('filename: ' + fpath)
print('filesize: ' + str(fsize) + ' KB')
shutil.copy2(fpath, bpath)
print('backup path: ' + bpath)
new_data = ''
try:
line = f.readline()
except:
print(fpath + ' is not log file.\n')
return
while line:
if matching(line) == False: new_data += line
line = f.readline()
f.close()
if ex == 'log':
with open(fpath, 'w', encoding='cp932') as f:
f.write(new_data)
elif ex == 'gzip':
with gzip.open(fpath, 'wt', encoding='cp932') as f:
f.write(new_data)
print('complete.\n')
def main():
root = 'C:/Users/User/AppData/Roaming/.minecraft/logs'#ログフォルダ
backup = ''#バックアップ用フォルダ
add_patterns()
ls = os.listdir(root)
length = str(len(ls))
for i, name in enumerate(ls):
print(str(i+1) + '/' + length + ' files')
conv(root, name, backup)
if __name__ == '__main__':
main() | 0.063824 | 0.055669 |
from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import render, redirect
from .models import Customuser, Application
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from .forms import CustomuserCreationForm, ApplicationForm
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from .decorators import not_moderator_required, writer_required, moderator_required, not_frozen
from accounts.models import Wallet
# Create your views here.
def signup_view(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
customform = CustomuserCreationForm(request.POST)
if(form.is_valid()):
builtin_user = form.save()
if(customform.is_valid()):
customised_user = customform.save(commit=False)
customised_user.djangouser = builtin_user
customised_user.save()
wallet = Wallet(balance=0.00, owner=builtin_user)
wallet.save()
login(request, builtin_user)
return redirect('home-view')
else:
builtin_user.delete()
else:
form = UserCreationForm()
customform = CustomuserCreationForm()
return render(request, 'users/signup.html', { 'djangoform': form, 'customform': customform })
def login_view(request):
if request.method == 'POST':
form = AuthenticationForm(data=request.POST)
if(form.is_valid()):
user = form.get_user()
customuser = Customuser.objects.get(djangouser=user)
login(request, user)
return redirect(f'{reverse("home-view")}?name={customuser.name}')
else:
form = AuthenticationForm()
return render(request, 'users/login.html', { 'form': form })
@login_required(login_url='/users/login/')
def logout_view(request):
if request.method == 'POST':
logout(request)
return redirect('home-view')
else:
return HttpResponse("Illegal")
@login_required(login_url='/users/login/')
@not_frozen
def settings_view(request):
userinfo = Customuser.objects.get(djangouser=request.user)
is_mod = userinfo.usertype >= Customuser.Category.MODERATOR
return render(request, 'users/settings.html', { 'is_mod': is_mod, 'userinfo': userinfo })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def freeze_view(request):
available_users = Customuser.objects.all().exclude(usertype=Customuser.Category.MODERATOR).filter(frozen=False)
return render(request, 'users/freeze.html', { 'available_users': available_users })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def unfreeze_view(request):
banned_users = Customuser.objects.all().exclude(usertype=Customuser.Category.MODERATOR).filter(frozen=True)
return render(request, 'users/unfreeze.html', { 'banned_users': banned_users })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def freeze_id(request, id):
query = Customuser.objects.filter(pk=id, frozen=False).exclude(usertype=Customuser.Category.MODERATOR)
if len(query) > 0:
query.update(frozen=True)
return redirect('users:freeze-view')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def unfreeze_id(request, id):
query = Customuser.objects.filter(pk=id, frozen=True).exclude(usertype=Customuser.Category.MODERATOR)
if len(query) > 0:
query.update(frozen=False)
return redirect('users:unfreeze-view')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@not_moderator_required
@not_frozen
def privilege_application(request):
cust_err = None
customuser = Customuser.objects.get(djangouser=request.user)
if request.method == 'POST':
if len(Application.objects.filter(djangouser=request.user)) > 0:
return redirect('users:already-applied')
form = ApplicationForm(request.POST)
if form.is_valid():
if form.cleaned_data['totype'] > customuser.usertype:
application = form.save(commit=False)
application.djangouser = request.user
application.save()
return render(request, 'users/apply-success.html', { 'application': application })
else:
cust_err = "Requested Privileges must be higher than current privilege"
else:
form = ApplicationForm()
return render(request, 'users/apply.html', { 'form': form, 'cust_err': cust_err })
@login_required(login_url='/users/login/')
@not_moderator_required
@not_frozen
def already_applied(request):
return render(request, 'users/already-applied.html')
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def grant_privilege(request):
applications = Application.objects.all()
return render(request, 'users/grant-privilege.html', { 'applications': applications })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def grant_privilege_id(request, id):
query = Application.objects.filter(pk=id)
if len(query) > 0:
customuser = Customuser.objects.filter(djangouser=query[0].djangouser)
customuser.update(usertype=query[0].totype)
query[0].delete()
return redirect('users:grant-privilege')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def reject_privilege_id(request, id):
query = Application.objects.filter(pk=id)
if len(query) > 0:
query[0].delete()
return redirect('users:grant-privilege')
else:
return HttpResponseBadRequest("This is not a valid request") | users/views.py | from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import render, redirect
from .models import Customuser, Application
from django.contrib.auth import login, logout, authenticate
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from .forms import CustomuserCreationForm, ApplicationForm
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from .decorators import not_moderator_required, writer_required, moderator_required, not_frozen
from accounts.models import Wallet
# Create your views here.
def signup_view(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
customform = CustomuserCreationForm(request.POST)
if(form.is_valid()):
builtin_user = form.save()
if(customform.is_valid()):
customised_user = customform.save(commit=False)
customised_user.djangouser = builtin_user
customised_user.save()
wallet = Wallet(balance=0.00, owner=builtin_user)
wallet.save()
login(request, builtin_user)
return redirect('home-view')
else:
builtin_user.delete()
else:
form = UserCreationForm()
customform = CustomuserCreationForm()
return render(request, 'users/signup.html', { 'djangoform': form, 'customform': customform })
def login_view(request):
if request.method == 'POST':
form = AuthenticationForm(data=request.POST)
if(form.is_valid()):
user = form.get_user()
customuser = Customuser.objects.get(djangouser=user)
login(request, user)
return redirect(f'{reverse("home-view")}?name={customuser.name}')
else:
form = AuthenticationForm()
return render(request, 'users/login.html', { 'form': form })
@login_required(login_url='/users/login/')
def logout_view(request):
if request.method == 'POST':
logout(request)
return redirect('home-view')
else:
return HttpResponse("Illegal")
@login_required(login_url='/users/login/')
@not_frozen
def settings_view(request):
userinfo = Customuser.objects.get(djangouser=request.user)
is_mod = userinfo.usertype >= Customuser.Category.MODERATOR
return render(request, 'users/settings.html', { 'is_mod': is_mod, 'userinfo': userinfo })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def freeze_view(request):
available_users = Customuser.objects.all().exclude(usertype=Customuser.Category.MODERATOR).filter(frozen=False)
return render(request, 'users/freeze.html', { 'available_users': available_users })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def unfreeze_view(request):
banned_users = Customuser.objects.all().exclude(usertype=Customuser.Category.MODERATOR).filter(frozen=True)
return render(request, 'users/unfreeze.html', { 'banned_users': banned_users })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def freeze_id(request, id):
query = Customuser.objects.filter(pk=id, frozen=False).exclude(usertype=Customuser.Category.MODERATOR)
if len(query) > 0:
query.update(frozen=True)
return redirect('users:freeze-view')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def unfreeze_id(request, id):
query = Customuser.objects.filter(pk=id, frozen=True).exclude(usertype=Customuser.Category.MODERATOR)
if len(query) > 0:
query.update(frozen=False)
return redirect('users:unfreeze-view')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@not_moderator_required
@not_frozen
def privilege_application(request):
cust_err = None
customuser = Customuser.objects.get(djangouser=request.user)
if request.method == 'POST':
if len(Application.objects.filter(djangouser=request.user)) > 0:
return redirect('users:already-applied')
form = ApplicationForm(request.POST)
if form.is_valid():
if form.cleaned_data['totype'] > customuser.usertype:
application = form.save(commit=False)
application.djangouser = request.user
application.save()
return render(request, 'users/apply-success.html', { 'application': application })
else:
cust_err = "Requested Privileges must be higher than current privilege"
else:
form = ApplicationForm()
return render(request, 'users/apply.html', { 'form': form, 'cust_err': cust_err })
@login_required(login_url='/users/login/')
@not_moderator_required
@not_frozen
def already_applied(request):
return render(request, 'users/already-applied.html')
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def grant_privilege(request):
applications = Application.objects.all()
return render(request, 'users/grant-privilege.html', { 'applications': applications })
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def grant_privilege_id(request, id):
query = Application.objects.filter(pk=id)
if len(query) > 0:
customuser = Customuser.objects.filter(djangouser=query[0].djangouser)
customuser.update(usertype=query[0].totype)
query[0].delete()
return redirect('users:grant-privilege')
else:
return HttpResponseBadRequest("This is not a valid request")
@login_required(login_url='/users/login/')
@moderator_required
@not_frozen
def reject_privilege_id(request, id):
query = Application.objects.filter(pk=id)
if len(query) > 0:
query[0].delete()
return redirect('users:grant-privilege')
else:
return HttpResponseBadRequest("This is not a valid request") | 0.348202 | 0.062732 |
from datetime import datetime, timezone
from enum import Enum
from typing import List, Any
from dateutil import parser
class KeyType(Enum):
DIMENSION = 1
TIMESTAMP = 2
UNKNOWN = 10
class Key:
"""
A record in the store is identified by a key
"""
PARTITION = '/'
DIMENSION_PARTITION = ':'
# TODO: Consider adding a * to force parameterization of attributes.
def __init__(self,
key_type: KeyType,
identity: str,
group: str,
dimensions: List[str] = list(),
timestamp: datetime = None) -> None:
"""
Initializes a new key for storing data
:param identity: Primary identity of the record being stored
:param group: Secondary identity of the record
:param timestamp: Optional timestamp that can be used for time range queries
"""
if not identity or identity.isspace():
raise ValueError('`identity` must be present.')
if not group or group.isspace():
raise ValueError('`group` must be present.')
if dimensions and timestamp:
raise ValueError('Both dimensions and timestamp should not be set together.')
if key_type == KeyType.DIMENSION and timestamp:
raise ValueError('`timestamp` should not be set for KeyType.DIMENSION.')
if key_type == KeyType.TIMESTAMP and dimensions:
raise ValueError('`dimensions` should not be set for KeyType.TIMESTAMP.')
self.key_type = key_type
self.identity = identity
self.group = group
self.timestamp = timestamp if not timestamp or timestamp.tzinfo else timestamp.replace(
tzinfo=timezone.utc)
self.dimensions = dimensions
# TODO: Handle '/' and ':' values in dimensions
@property
def dimensions_str(self):
return ':'.join(self.dimensions) if self.dimensions else ''
@staticmethod
def parse(key_string: str) -> 'Key':
""" Parses a flat key string and returns a key """
parts = key_string.split(Key.PARTITION)
key_type = KeyType.DIMENSION
if parts[3]:
key_type = KeyType.TIMESTAMP
return Key(key_type, parts[0], parts[1], parts[2].split(Key.DIMENSION_PARTITION)
if parts[2] else [],
parser.parse(parts[3]) if parts[3] else None)
@staticmethod
def parse_sort_key(identity: str, sort_key_string: str) -> 'Key':
""" Parses a flat key string and returns a key """
parts = sort_key_string.split(Key.PARTITION)
key_type = KeyType.DIMENSION
if parts[2]:
key_type = KeyType.TIMESTAMP
return Key(key_type, identity, parts[0], parts[1].split(Key.DIMENSION_PARTITION)
if parts[1] else [],
parser.parse(parts[2]) if parts[2] else None)
def __str__(self):
""" Returns the string representation of the key"""
return Key.PARTITION.join([self.identity, self.sort_key])
@property
def sort_key(self):
return Key.PARTITION.join(
[self.group, self.dimensions_str,
self.timestamp.isoformat() if self.timestamp else ''])
@property
def sort_prefix_key(self):
if self.key_type == KeyType.DIMENSION:
return Key.PARTITION.join([self.group, self.dimensions_str]
if self.dimensions_str else [self.group, ''])
if self.key_type == KeyType.TIMESTAMP:
return self.sort_key
def __repr__(self):
return self.__str__()
def __eq__(self, other: 'Key') -> bool:
return other and (self.identity, self.group, self.timestamp,
self.dimensions) == (other.identity, other.group, other.timestamp,
other.dimensions)
def __lt__(self, other: 'Key') -> bool:
"""
Does a less than comparison on two keys. A None timestamp is considered
larger than a timestamp that has been set.
"""
if (self.identity, self.group, self.key_type) != (other.identity, other.group,
other.key_type):
return False
if self.key_type == KeyType.TIMESTAMP:
return self.timestamp < other.timestamp
return self.dimensions < other.dimensions
def __gt__(self, other: 'Key') -> bool:
"""
Does a greater than comparison on two keys. A None timestamp is
considered larger than a timestamp that has been set.
"""
if (self.identity, self.group, self.key_type) != (other.identity, other.group,
other.key_type):
return False
if self.key_type == KeyType.TIMESTAMP:
return self.timestamp > other.timestamp
return self.dimensions > other.dimensions
def __hash__(self):
return hash((self.identity, self.group, self.timestamp, self.dimensions_str))
def starts_with(self, other: 'Key') -> bool:
"""
Checks if this key starts with the other key provided. Returns False if key_type, identity
or group are different.
For `KeyType.TIMESTAMP` returns True.
For `KeyType.DIMENSION` does prefix match between the two dimensions property.
"""
if (self.key_type, self.identity, self.group) != (other.key_type, other.identity,
other.group):
return False
if self.key_type == KeyType.TIMESTAMP:
return True
if self.key_type == KeyType.DIMENSION:
if len(self.dimensions) < len(other.dimensions):
return False
return self.dimensions[0:len(other.dimensions)] == other.dimensions | blurr/core/store_key.py | from datetime import datetime, timezone
from enum import Enum
from typing import List, Any
from dateutil import parser
class KeyType(Enum):
DIMENSION = 1
TIMESTAMP = 2
UNKNOWN = 10
class Key:
"""
A record in the store is identified by a key
"""
PARTITION = '/'
DIMENSION_PARTITION = ':'
# TODO: Consider adding a * to force parameterization of attributes.
def __init__(self,
key_type: KeyType,
identity: str,
group: str,
dimensions: List[str] = list(),
timestamp: datetime = None) -> None:
"""
Initializes a new key for storing data
:param identity: Primary identity of the record being stored
:param group: Secondary identity of the record
:param timestamp: Optional timestamp that can be used for time range queries
"""
if not identity or identity.isspace():
raise ValueError('`identity` must be present.')
if not group or group.isspace():
raise ValueError('`group` must be present.')
if dimensions and timestamp:
raise ValueError('Both dimensions and timestamp should not be set together.')
if key_type == KeyType.DIMENSION and timestamp:
raise ValueError('`timestamp` should not be set for KeyType.DIMENSION.')
if key_type == KeyType.TIMESTAMP and dimensions:
raise ValueError('`dimensions` should not be set for KeyType.TIMESTAMP.')
self.key_type = key_type
self.identity = identity
self.group = group
self.timestamp = timestamp if not timestamp or timestamp.tzinfo else timestamp.replace(
tzinfo=timezone.utc)
self.dimensions = dimensions
# TODO: Handle '/' and ':' values in dimensions
@property
def dimensions_str(self):
return ':'.join(self.dimensions) if self.dimensions else ''
@staticmethod
def parse(key_string: str) -> 'Key':
""" Parses a flat key string and returns a key """
parts = key_string.split(Key.PARTITION)
key_type = KeyType.DIMENSION
if parts[3]:
key_type = KeyType.TIMESTAMP
return Key(key_type, parts[0], parts[1], parts[2].split(Key.DIMENSION_PARTITION)
if parts[2] else [],
parser.parse(parts[3]) if parts[3] else None)
@staticmethod
def parse_sort_key(identity: str, sort_key_string: str) -> 'Key':
""" Parses a flat key string and returns a key """
parts = sort_key_string.split(Key.PARTITION)
key_type = KeyType.DIMENSION
if parts[2]:
key_type = KeyType.TIMESTAMP
return Key(key_type, identity, parts[0], parts[1].split(Key.DIMENSION_PARTITION)
if parts[1] else [],
parser.parse(parts[2]) if parts[2] else None)
def __str__(self):
""" Returns the string representation of the key"""
return Key.PARTITION.join([self.identity, self.sort_key])
@property
def sort_key(self):
return Key.PARTITION.join(
[self.group, self.dimensions_str,
self.timestamp.isoformat() if self.timestamp else ''])
@property
def sort_prefix_key(self):
if self.key_type == KeyType.DIMENSION:
return Key.PARTITION.join([self.group, self.dimensions_str]
if self.dimensions_str else [self.group, ''])
if self.key_type == KeyType.TIMESTAMP:
return self.sort_key
def __repr__(self):
return self.__str__()
def __eq__(self, other: 'Key') -> bool:
return other and (self.identity, self.group, self.timestamp,
self.dimensions) == (other.identity, other.group, other.timestamp,
other.dimensions)
def __lt__(self, other: 'Key') -> bool:
"""
Does a less than comparison on two keys. A None timestamp is considered
larger than a timestamp that has been set.
"""
if (self.identity, self.group, self.key_type) != (other.identity, other.group,
other.key_type):
return False
if self.key_type == KeyType.TIMESTAMP:
return self.timestamp < other.timestamp
return self.dimensions < other.dimensions
def __gt__(self, other: 'Key') -> bool:
"""
Does a greater than comparison on two keys. A None timestamp is
considered larger than a timestamp that has been set.
"""
if (self.identity, self.group, self.key_type) != (other.identity, other.group,
other.key_type):
return False
if self.key_type == KeyType.TIMESTAMP:
return self.timestamp > other.timestamp
return self.dimensions > other.dimensions
def __hash__(self):
return hash((self.identity, self.group, self.timestamp, self.dimensions_str))
def starts_with(self, other: 'Key') -> bool:
"""
Checks if this key starts with the other key provided. Returns False if key_type, identity
or group are different.
For `KeyType.TIMESTAMP` returns True.
For `KeyType.DIMENSION` does prefix match between the two dimensions property.
"""
if (self.key_type, self.identity, self.group) != (other.key_type, other.identity,
other.group):
return False
if self.key_type == KeyType.TIMESTAMP:
return True
if self.key_type == KeyType.DIMENSION:
if len(self.dimensions) < len(other.dimensions):
return False
return self.dimensions[0:len(other.dimensions)] == other.dimensions | 0.640186 | 0.415254 |
import argparse as ap
def _args_parse():
""" Allows for command line hyperparameter inputs to models
Description
------------
--image_size: Sets up the size of a square image, default is 32*32 for CIFAR10
--features : Sizes of convolutional feature maps, the first one ought to be 3 corresponding to RGB channels
--norm : Batch-normalisation boolean flag (default is True)
--ptype : Specifies pooling type:
- 'max' (default) for max pooling
- 'average' for average pooling
--aug : Data augmentation boolean flag (default is False)
--dropout : Set dropout probability, p = 1.0 (default) implies nothing is dropped out
--optype : Specifies optimizer type, default set to 'ADAM', if anything else is chosen, RMSProp is used
--hsize : Specifies the size of the hidden layer in the fully connected layer that follows conv layers
--bsize : Specifies the batch size for training
Note
----
Assertions and exceptions are not added, please be careful in passing arguments. If in doubt, please allow default values.
For non-square images appropriate modifications need to be made in helper_nn.py
"""
parser = ap.ArgumentParser(description = 'Hyperparameters')
parser.add_argument('--image_size', type = int, dest = 'img_size',
action = 'store', default = 32)
parser.add_argument('--features', type = list, dest = 'conv_features',
action = 'store', default = [3, 64, 128, 256])
parser.add_argument('--norm', type = bool, dest = 'normalisation',
action = 'store', default = True)
parser.add_argument('--ptype', dest = 'pooling_type',
action = 'store', default = 'max')
parser.add_argument('--aug', type = bool, dest = 'augmentation',
action = 'store', default = False)
parser.add_argument('--dropout', dest = 'dropout_p',
action = 'store', default = 1.0)
parser.add_argument('--optype', type = str, dest = 'optimization',
action = 'store', default = 'ADAM')
parser.add_argument('--hsize', type = int, dest = 'hidden_layer_size',
action = 'store', default = 500)
parser.add_argument('--bsize', type = int, dest = 'batch_size',
action = 'store', default = 300)
parser.add_argument('--eta', type = float, dest = 'learning_rate',
action = 'store', default = 0.01)
flags = parser.parse_args()
return flags | parseargs_convnn.py |
import argparse as ap
def _args_parse():
""" Allows for command line hyperparameter inputs to models
Description
------------
--image_size: Sets up the size of a square image, default is 32*32 for CIFAR10
--features : Sizes of convolutional feature maps, the first one ought to be 3 corresponding to RGB channels
--norm : Batch-normalisation boolean flag (default is True)
--ptype : Specifies pooling type:
- 'max' (default) for max pooling
- 'average' for average pooling
--aug : Data augmentation boolean flag (default is False)
--dropout : Set dropout probability, p = 1.0 (default) implies nothing is dropped out
--optype : Specifies optimizer type, default set to 'ADAM', if anything else is chosen, RMSProp is used
--hsize : Specifies the size of the hidden layer in the fully connected layer that follows conv layers
--bsize : Specifies the batch size for training
Note
----
Assertions and exceptions are not added, please be careful in passing arguments. If in doubt, please allow default values.
For non-square images appropriate modifications need to be made in helper_nn.py
"""
parser = ap.ArgumentParser(description = 'Hyperparameters')
parser.add_argument('--image_size', type = int, dest = 'img_size',
action = 'store', default = 32)
parser.add_argument('--features', type = list, dest = 'conv_features',
action = 'store', default = [3, 64, 128, 256])
parser.add_argument('--norm', type = bool, dest = 'normalisation',
action = 'store', default = True)
parser.add_argument('--ptype', dest = 'pooling_type',
action = 'store', default = 'max')
parser.add_argument('--aug', type = bool, dest = 'augmentation',
action = 'store', default = False)
parser.add_argument('--dropout', dest = 'dropout_p',
action = 'store', default = 1.0)
parser.add_argument('--optype', type = str, dest = 'optimization',
action = 'store', default = 'ADAM')
parser.add_argument('--hsize', type = int, dest = 'hidden_layer_size',
action = 'store', default = 500)
parser.add_argument('--bsize', type = int, dest = 'batch_size',
action = 'store', default = 300)
parser.add_argument('--eta', type = float, dest = 'learning_rate',
action = 'store', default = 0.01)
flags = parser.parse_args()
return flags | 0.707 | 0.436562 |
import argparse
import json
import sys
from os import listdir
from os.path import join
import numpy as np
import pandas as pd
from src.utilities import mkdir_if_needed
def query_yes_no(question, default="no"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == "":
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
def summarise_subject(raw_data):
"""
This function creates a one line summary of a single subject's data.
The summary includes
- Number of recorded choices
- Number of missing responses
- Number of choices of higher-p alternative
- Number of choices of dominated alternatives in catch trials
- Bonus amount won
- Responses to red-green colourblindness and difficulties
- Response to seriousness
- Self-reported choice strategy
- Subject-reported comments
The summary also checks exclusion criteria specified in the preregistration:
-
"""
# Read participant ID
run_id = raw_data["run_id"].values[0]
# Read won amount
won_amount = raw_data["wonAmt"].values[0]
lucky_number = raw_data["luckyNumber"].values[0]
if (
won_amount == '"'
): # This happens, when a trial was chosen, where no response was given:
won_amount = 0
lucky_number = np.nan
chosen_trial = raw_data["chosenTrial"].values[0]
# Extract only choice data
choices = (
raw_data.loc[
(raw_data["trial_type"] == "two-gamble-sequence")
& ~(raw_data["condition.1"].str.startswith("practice_"))
][
[
"condition.1",
"rt",
"key_press",
"choice",
"p0",
"p1",
"m0",
"m1",
"sequence",
"webgazer_data",
]
]
.rename({"condition.1": "condition"}, axis=1)
.astype({"p0": float, "p1": float, "m0": float, "m1": float})
.reset_index(drop=True)
)
# Handle missing responses, recode choice to integer
choices["choice"] = np.where(choices["choice"] == '"', np.nan, choices["choice"])
choices = choices.astype({"choice": float})
# Identify options with higher P and higher M in each trial
choices["higher_p"] = (
choices[["p0", "p1"]].idxmax(axis=1).apply(lambda x: int(x[-1]))
)
choices["higher_m"] = (
choices[["m0", "m1"]].idxmax(axis=1).apply(lambda x: int(x[-1]))
)
n_records = len(choices)
# Compute number of choices for alternatives 0, 1, and missed responses
n_choose_0_all = np.sum(choices["choice"] == 0)
n_choose_1_all = np.sum(choices["choice"] == 1)
n_choose_nan = n_records - (n_choose_0_all + n_choose_1_all)
n_choose_higher_p = np.sum(
choices.loc[choices["condition"].str.startswith("exp")]["choice"]
== choices.loc[choices["condition"].str.startswith("exp")]["higher_p"]
)
# Choose number of choices for dominated alternative in catch trials
n_choose_dominated = (
20
- np.sum(
choices.loc[choices["condition"].str.startswith("catch")]["choice"]
== choices.loc[choices["condition"].str.startswith("catch")]["higher_p"]
)
- np.sum(
pd.isnull(
choices.loc[choices["condition"].str.startswith("catch")]["choice"]
)
)
)
# Read gender, red-green difficulty and seriousness
mc_questionnaire = json.loads(
raw_data.loc[raw_data["trial_type"] == "survey-multi-choice"][
["response"]
].values[-1][0]
)
rg_blind = mc_questionnaire["redGreenColorBlind"] == "yes"
rg_difficult = mc_questionnaire["redGreenDifficulties"] == "yes"
serious = mc_questionnaire["seriousness"] == "I have taken part seriously."
# Read strategy and comment
reports_string = raw_data.loc[raw_data["trial_type"] == "survey-text"][
"response"
].values[0]
age = reports_string.split('"selfReport"')[0][8:-2]
self_report = reports_string.split('"comments":"')[0][26:-2]
comment = reports_string.split('"comments":"')[1][:-2]
# Exclusion criteria
# Automatic rejection:
# 1) More than 4 choices of dominated alternative in catch trials
# 2) Red green color blind or difficulties
# 3) Reported non-serious participation in the task
exclude_automatic = (
(n_choose_dominated > 4) or (rg_blind) or (rg_difficult) or (not serious)
)
# Manual rejection
# 4) Reported technical difficulties (this needs to be checked manually)
# 5) Reported decision strategy suggests that task instructions were misunderstood (needs to be checked manually)
# Only check these if automatic checks have not resulted in exclusion
exclude_manual = False
if not exclude_automatic:
exclude_manual = query_yes_no(
question=f"ID {run_id}\n Self report: {self_report}\n Comment: {comment}\n Exclude for misunderstanding or technical difficulties?"
)
exclude = exclude_manual or exclude_automatic
if exclude:
if n_choose_dominated > 4:
reason = "Dominated choices"
elif rg_blind or rg_difficult:
reason = "Red-green problems"
elif not serious:
reason = "Non-serious"
elif exclude_manual:
reason = "Misunderstanding or technical difficulties"
else:
reason = None
# Put everything together
out = pd.DataFrame(
dict(
run_id=run_id,
exclude=exclude,
exclusion_reason=reason,
gender=mc_questionnaire.get("gender", np.nan),
age=age,
n_records=n_records,
n_choose_nan=n_choose_nan,
n_choose_higher_p=n_choose_higher_p,
n_choose_dominated=n_choose_dominated,
chosen_trial=chosen_trial,
lucky_number=lucky_number,
won_amount=won_amount,
rg_blind=rg_blind,
rg_difficult=rg_difficult,
serious=serious,
self_report=self_report,
comment=comment,
),
index=[0],
)
return out
def main():
# Summarise data quality
summary = []
files = [file for file in listdir(args.input_path) if file.endswith(".csv")]
print(f"Making data overview from {len(files)} files:")
for i, file in enumerate(files):
print(f"\t{join(args.input_path, file)}")
df = pd.read_csv(
join(args.input_path, file), error_bad_lines=False, escapechar="\\"
)
summary_s = summarise_subject(df)
summary_s["subject_id"] = i
summary.append(
summary_s[
[
"subject_id",
"run_id",
"exclude",
"exclusion_reason",
"gender",
"age",
"n_records",
"n_choose_nan",
"n_choose_dominated",
"n_choose_higher_p",
"chosen_trial",
"lucky_number",
"won_amount",
"rg_blind",
"rg_difficult",
"serious",
"self_report",
"comment",
]
]
)
summary = pd.concat(summary).reset_index(drop=True)
summary.to_csv(join(args.output_path, "subject_summary.csv"))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input-path", type=str)
parser.add_argument("--output-path", type=str)
args = parser.parse_args()
mkdir_if_needed(args.output_path)
main() | src/data/make_data_overview.py | import argparse
import json
import sys
from os import listdir
from os.path import join
import numpy as np
import pandas as pd
from src.utilities import mkdir_if_needed
def query_yes_no(question, default="no"):
"""Ask a yes/no question via raw_input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is True for "yes" or False for "no".
"""
valid = {"yes": True, "y": True, "ye": True, "no": False, "n": False}
if default is None:
prompt = " [y/n] "
elif default == "yes":
prompt = " [Y/n] "
elif default == "no":
prompt = " [y/N] "
else:
raise ValueError("invalid default answer: '%s'" % default)
while True:
sys.stdout.write(question + prompt)
choice = input().lower()
if default is not None and choice == "":
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write("Please respond with 'yes' or 'no' " "(or 'y' or 'n').\n")
def summarise_subject(raw_data):
"""
This function creates a one line summary of a single subject's data.
The summary includes
- Number of recorded choices
- Number of missing responses
- Number of choices of higher-p alternative
- Number of choices of dominated alternatives in catch trials
- Bonus amount won
- Responses to red-green colourblindness and difficulties
- Response to seriousness
- Self-reported choice strategy
- Subject-reported comments
The summary also checks exclusion criteria specified in the preregistration:
-
"""
# Read participant ID
run_id = raw_data["run_id"].values[0]
# Read won amount
won_amount = raw_data["wonAmt"].values[0]
lucky_number = raw_data["luckyNumber"].values[0]
if (
won_amount == '"'
): # This happens, when a trial was chosen, where no response was given:
won_amount = 0
lucky_number = np.nan
chosen_trial = raw_data["chosenTrial"].values[0]
# Extract only choice data
choices = (
raw_data.loc[
(raw_data["trial_type"] == "two-gamble-sequence")
& ~(raw_data["condition.1"].str.startswith("practice_"))
][
[
"condition.1",
"rt",
"key_press",
"choice",
"p0",
"p1",
"m0",
"m1",
"sequence",
"webgazer_data",
]
]
.rename({"condition.1": "condition"}, axis=1)
.astype({"p0": float, "p1": float, "m0": float, "m1": float})
.reset_index(drop=True)
)
# Handle missing responses, recode choice to integer
choices["choice"] = np.where(choices["choice"] == '"', np.nan, choices["choice"])
choices = choices.astype({"choice": float})
# Identify options with higher P and higher M in each trial
choices["higher_p"] = (
choices[["p0", "p1"]].idxmax(axis=1).apply(lambda x: int(x[-1]))
)
choices["higher_m"] = (
choices[["m0", "m1"]].idxmax(axis=1).apply(lambda x: int(x[-1]))
)
n_records = len(choices)
# Compute number of choices for alternatives 0, 1, and missed responses
n_choose_0_all = np.sum(choices["choice"] == 0)
n_choose_1_all = np.sum(choices["choice"] == 1)
n_choose_nan = n_records - (n_choose_0_all + n_choose_1_all)
n_choose_higher_p = np.sum(
choices.loc[choices["condition"].str.startswith("exp")]["choice"]
== choices.loc[choices["condition"].str.startswith("exp")]["higher_p"]
)
# Choose number of choices for dominated alternative in catch trials
n_choose_dominated = (
20
- np.sum(
choices.loc[choices["condition"].str.startswith("catch")]["choice"]
== choices.loc[choices["condition"].str.startswith("catch")]["higher_p"]
)
- np.sum(
pd.isnull(
choices.loc[choices["condition"].str.startswith("catch")]["choice"]
)
)
)
# Read gender, red-green difficulty and seriousness
mc_questionnaire = json.loads(
raw_data.loc[raw_data["trial_type"] == "survey-multi-choice"][
["response"]
].values[-1][0]
)
rg_blind = mc_questionnaire["redGreenColorBlind"] == "yes"
rg_difficult = mc_questionnaire["redGreenDifficulties"] == "yes"
serious = mc_questionnaire["seriousness"] == "I have taken part seriously."
# Read strategy and comment
reports_string = raw_data.loc[raw_data["trial_type"] == "survey-text"][
"response"
].values[0]
age = reports_string.split('"selfReport"')[0][8:-2]
self_report = reports_string.split('"comments":"')[0][26:-2]
comment = reports_string.split('"comments":"')[1][:-2]
# Exclusion criteria
# Automatic rejection:
# 1) More than 4 choices of dominated alternative in catch trials
# 2) Red green color blind or difficulties
# 3) Reported non-serious participation in the task
exclude_automatic = (
(n_choose_dominated > 4) or (rg_blind) or (rg_difficult) or (not serious)
)
# Manual rejection
# 4) Reported technical difficulties (this needs to be checked manually)
# 5) Reported decision strategy suggests that task instructions were misunderstood (needs to be checked manually)
# Only check these if automatic checks have not resulted in exclusion
exclude_manual = False
if not exclude_automatic:
exclude_manual = query_yes_no(
question=f"ID {run_id}\n Self report: {self_report}\n Comment: {comment}\n Exclude for misunderstanding or technical difficulties?"
)
exclude = exclude_manual or exclude_automatic
if exclude:
if n_choose_dominated > 4:
reason = "Dominated choices"
elif rg_blind or rg_difficult:
reason = "Red-green problems"
elif not serious:
reason = "Non-serious"
elif exclude_manual:
reason = "Misunderstanding or technical difficulties"
else:
reason = None
# Put everything together
out = pd.DataFrame(
dict(
run_id=run_id,
exclude=exclude,
exclusion_reason=reason,
gender=mc_questionnaire.get("gender", np.nan),
age=age,
n_records=n_records,
n_choose_nan=n_choose_nan,
n_choose_higher_p=n_choose_higher_p,
n_choose_dominated=n_choose_dominated,
chosen_trial=chosen_trial,
lucky_number=lucky_number,
won_amount=won_amount,
rg_blind=rg_blind,
rg_difficult=rg_difficult,
serious=serious,
self_report=self_report,
comment=comment,
),
index=[0],
)
return out
def main():
# Summarise data quality
summary = []
files = [file for file in listdir(args.input_path) if file.endswith(".csv")]
print(f"Making data overview from {len(files)} files:")
for i, file in enumerate(files):
print(f"\t{join(args.input_path, file)}")
df = pd.read_csv(
join(args.input_path, file), error_bad_lines=False, escapechar="\\"
)
summary_s = summarise_subject(df)
summary_s["subject_id"] = i
summary.append(
summary_s[
[
"subject_id",
"run_id",
"exclude",
"exclusion_reason",
"gender",
"age",
"n_records",
"n_choose_nan",
"n_choose_dominated",
"n_choose_higher_p",
"chosen_trial",
"lucky_number",
"won_amount",
"rg_blind",
"rg_difficult",
"serious",
"self_report",
"comment",
]
]
)
summary = pd.concat(summary).reset_index(drop=True)
summary.to_csv(join(args.output_path, "subject_summary.csv"))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--input-path", type=str)
parser.add_argument("--output-path", type=str)
args = parser.parse_args()
mkdir_if_needed(args.output_path)
main() | 0.381335 | 0.306813 |
import json
from app.main.model.sensor_type import SensorType
from app.main.repository.device_group_repository import DeviceGroupRepository
from app.main.util.auth_utils import Auth
from app.main.util.constants import Constants
def test_get_sensor_type_info_should_return_sensor_info_when_valid_request(
client,
insert_device_group,
insert_user,
get_user_group_default_values,
insert_user_group,
get_sensor_type_default_values,
insert_sensor_type,
insert_sensor_reading_enumerator):
content_type = 'application/json'
device_group = insert_device_group()
user = insert_user()
user_group_values = get_user_group_default_values()
user_group_values['users'] = [user]
user_group = insert_user_group(user_group_values)
device_group.user_groups = [user_group]
DeviceGroupRepository.get_instance().update_database()
sensor_type = insert_sensor_type()
reading_enumerator = insert_sensor_reading_enumerator()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types/' + sensor_type.name,
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(user.id, False)
}
)
expected_returned_values = {
'name': sensor_type.name,
'readingType': sensor_type.reading_type,
'rangeMin': sensor_type.range_min,
'rangeMax': sensor_type.range_max,
'enumerator': [
{
'number': reading_enumerator.number,
'text': reading_enumerator.text
}
]
}
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_returned_values
def test_get_sensor_type_info_should_return_sensor_info_when_valid_request_and_user_is_admin(
client,
insert_device_group,
insert_admin,
get_user_group_default_values,
insert_user_group,
get_sensor_type_default_values,
insert_sensor_type,
insert_sensor_reading_enumerator):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
assert device_group.admin_id == admin.id
user_group_values = get_user_group_default_values()
user_group = insert_user_group(user_group_values)
device_group.user_groups = [user_group]
DeviceGroupRepository.get_instance().update_database()
sensor_type = insert_sensor_type()
reading_enumerator = insert_sensor_reading_enumerator()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types/' + sensor_type.name,
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
expected_returned_values = {
'name': sensor_type.name,
'readingType': sensor_type.reading_type,
'rangeMin': sensor_type.range_min,
'rangeMax': sensor_type.range_max,
'enumerator': [
{
'number': reading_enumerator.number,
'text': reading_enumerator.text
}
]
}
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_returned_values
def test_get_list_of_types_names_should_return_list_of_sensor_types_names_when_valid_request(
client,
insert_device_group,
insert_admin,
get_sensor_type_default_values,
get_device_group_default_values,
insert_sensor_type):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
first_sensor_type_values = get_sensor_type_default_values()
first_sensor_type_values['name'] = 'first_sensor_type'
insert_sensor_type(first_sensor_type_values)
second_sensor_type_values = get_sensor_type_default_values()
second_sensor_type_values['name'] = 'second_sensor_type'
second_sensor_type_values['id'] += 1
insert_sensor_type(second_sensor_type_values)
third_sensor_type_values = get_sensor_type_default_values()
third_sensor_type_values['name'] = 'third_sensor_type'
third_sensor_type_values['id'] += 2
insert_sensor_type(third_sensor_type_values)
expected_values = ['first_sensor_type', 'second_sensor_type', 'third_sensor_type']
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types',
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_values
def test_get_list_of_types_names_should_return_error_message_when_admin_is_not_admin(
client,
insert_device_group,
insert_admin,
get_sensor_type_default_values,
insert_sensor_type):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types',
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, False)
}
)
assert response is not None
assert response.status_code == 403
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_create_sensor_type_should_create_sensor_type_in_device_group_when_valid_request(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response
assert response.status_code == 201
response_data = json.loads(response.data.decode())
assert not response_data
sensor_types = SensorType.query.filter(SensorType.name == sensor_type_name).all()
assert sensor_types
def test_create_sensor_type_should_return_error_message_when_invalid_request(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 0,
"enumerator": [
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response
assert response.status_code == 400
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_BAD_REQUEST
def test_create_sensor_type_should_return_error_message_when_user_not_authorized(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type
)
assert response
assert response.status_code == 400
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED
def test_create_sensor_type_should_return_no_privileges_error_message_when_user_is_not_admin(
client,
insert_device_group,
insert_user):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_user()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, False)
}
)
assert response
assert response.status_code == 403
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES | app/test/integrationtest/test_sensor_type.py | import json
from app.main.model.sensor_type import SensorType
from app.main.repository.device_group_repository import DeviceGroupRepository
from app.main.util.auth_utils import Auth
from app.main.util.constants import Constants
def test_get_sensor_type_info_should_return_sensor_info_when_valid_request(
client,
insert_device_group,
insert_user,
get_user_group_default_values,
insert_user_group,
get_sensor_type_default_values,
insert_sensor_type,
insert_sensor_reading_enumerator):
content_type = 'application/json'
device_group = insert_device_group()
user = insert_user()
user_group_values = get_user_group_default_values()
user_group_values['users'] = [user]
user_group = insert_user_group(user_group_values)
device_group.user_groups = [user_group]
DeviceGroupRepository.get_instance().update_database()
sensor_type = insert_sensor_type()
reading_enumerator = insert_sensor_reading_enumerator()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types/' + sensor_type.name,
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(user.id, False)
}
)
expected_returned_values = {
'name': sensor_type.name,
'readingType': sensor_type.reading_type,
'rangeMin': sensor_type.range_min,
'rangeMax': sensor_type.range_max,
'enumerator': [
{
'number': reading_enumerator.number,
'text': reading_enumerator.text
}
]
}
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_returned_values
def test_get_sensor_type_info_should_return_sensor_info_when_valid_request_and_user_is_admin(
client,
insert_device_group,
insert_admin,
get_user_group_default_values,
insert_user_group,
get_sensor_type_default_values,
insert_sensor_type,
insert_sensor_reading_enumerator):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
assert device_group.admin_id == admin.id
user_group_values = get_user_group_default_values()
user_group = insert_user_group(user_group_values)
device_group.user_groups = [user_group]
DeviceGroupRepository.get_instance().update_database()
sensor_type = insert_sensor_type()
reading_enumerator = insert_sensor_reading_enumerator()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types/' + sensor_type.name,
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
expected_returned_values = {
'name': sensor_type.name,
'readingType': sensor_type.reading_type,
'rangeMin': sensor_type.range_min,
'rangeMax': sensor_type.range_max,
'enumerator': [
{
'number': reading_enumerator.number,
'text': reading_enumerator.text
}
]
}
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_returned_values
def test_get_list_of_types_names_should_return_list_of_sensor_types_names_when_valid_request(
client,
insert_device_group,
insert_admin,
get_sensor_type_default_values,
get_device_group_default_values,
insert_sensor_type):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
first_sensor_type_values = get_sensor_type_default_values()
first_sensor_type_values['name'] = 'first_sensor_type'
insert_sensor_type(first_sensor_type_values)
second_sensor_type_values = get_sensor_type_default_values()
second_sensor_type_values['name'] = 'second_sensor_type'
second_sensor_type_values['id'] += 1
insert_sensor_type(second_sensor_type_values)
third_sensor_type_values = get_sensor_type_default_values()
third_sensor_type_values['name'] = 'third_sensor_type'
third_sensor_type_values['id'] += 2
insert_sensor_type(third_sensor_type_values)
expected_values = ['first_sensor_type', 'second_sensor_type', 'third_sensor_type']
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types',
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response is not None
assert response.status_code == 200
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data == expected_values
def test_get_list_of_types_names_should_return_error_message_when_admin_is_not_admin(
client,
insert_device_group,
insert_admin,
get_sensor_type_default_values,
insert_sensor_type):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
response = client.get(
'/api/hubs/' + device_group.product_key + '/sensor-types',
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, False)
}
)
assert response is not None
assert response.status_code == 403
assert response.content_type == content_type
response_data = json.loads(response.data.decode())
assert response_data is not None
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
def test_create_sensor_type_should_create_sensor_type_in_device_group_when_valid_request(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response
assert response.status_code == 201
response_data = json.loads(response.data.decode())
assert not response_data
sensor_types = SensorType.query.filter(SensorType.name == sensor_type_name).all()
assert sensor_types
def test_create_sensor_type_should_return_error_message_when_invalid_request(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_admin()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 0,
"enumerator": [
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, True)
}
)
assert response
assert response.status_code == 400
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_BAD_REQUEST
def test_create_sensor_type_should_return_error_message_when_user_not_authorized(
client,
insert_device_group,
insert_admin):
content_type = 'application/json'
device_group = insert_device_group()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type
)
assert response
assert response.status_code == 400
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_NOT_DEFINED
def test_create_sensor_type_should_return_no_privileges_error_message_when_user_is_not_admin(
client,
insert_device_group,
insert_user):
content_type = 'application/json'
device_group = insert_device_group()
admin = insert_user()
sensor_type_name = 'test sensor type name'
response = client.post(
'/api/hubs/' + device_group.product_key + '/sensor-types',
data=json.dumps(
{
"name": sensor_type_name,
"readingType": "Enum",
"rangeMin": 0,
"rangeMax": 1,
"enumerator": [
{
"number": 0,
"text": "zero"
},
{
"number": 1,
"text": "one"
}
]
}
),
content_type=content_type,
headers={
'Authorization': 'Bearer ' + Auth.encode_auth_token(admin.id, False)
}
)
assert response
assert response.status_code == 403
response_data = json.loads(response.data.decode())
assert response_data
assert 'errorMessage' in response_data
assert response_data['errorMessage'] == Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES | 0.535584 | 0.269254 |
from datetime import datetime, timedelta
import re
import core.db.reminders_db as db
import core.nomic_time as nomic_time
from core.log import log
from config.config import PREFIX
import core.utils as utils
def set_new_reminder(userId: str,
messageId: int,
channelId: int,
createdAt: datetime,
remindAfter: timedelta,
remindMsg: str):
'''CreatedAt and remindAfter should be a UTC timestamp in seconds'''
_createdAt = nomic_time.get_timestamp(createdAt)
_remindAfter = nomic_time.get_timestamp(createdAt + remindAfter)
rowId = db.add_reminder(userId, messageId, channelId, _createdAt, _remindAfter, remindMsg)
if rowId:
return(f'I\'ll remind you about this at about <t:{_remindAfter}>.\n'
f'Use `{PREFIX}forget {rowId}` to delete this reminder.\n')
else:
return 'An error occured trying to set this reminder :(. Some kind of reminder database issue.'
def check_for_triggered_reminders():
'''
Returns a list of dictionaries [{RowId, MessageId, ReplyMessage}]
'''
reminders = db.get_reminders(f'WHERE Active = 1 AND RemindAfter <= {nomic_time.unix_now()}')
return reminders
def get_reminder(rowId):
try:
# Also accounts for sql injection attempts
rowId = int(rowId)
except ValueError:
log.exception(f'User gave a bad rowId to delete: "{rowId}"')
return 'That is not a valid reminder Id. Please send the integer Id of a reminder that has been made before'
_reminders = db.get_reminders(f'WHERE RowId = {rowId}')
if len(_reminders) == 0:
return 'No reminder found with id {rowId}.'
reminder = _reminders[0]
remindAfter = reminder['RemindAfter']
remindMsg = reminder['RemindMsg']
return (f"Reminder set to trigger <t:{remindAfter}:R>\n"
f"> {remindMsg}")
def unset_reminder(rowId, requesterId=None, serverId=None, overrideId=False):
'''Either requesterId or overrideId must be set.'''
try:
# Also accounts for sql injection attempts
rowId = int(rowId)
except ValueError:
log.exception(f'User gave a bad rowId to delete: "{rowId}"')
return 'That is not a valid reminder Id. Please send the integer Id of a reminder that has been made before'
reminders = db.get_reminders(f'WHERE rowid = {rowId}')
if len(reminders) == 0:
return 'No reminder found with id {rowId}.'
if reminders[0]['Active'] == 0:
return f'Reminder {rowId} is old and wasn\'t going to trigger anyway'
if overrideId or str(requesterId) == reminders[0]['UserId'] or utils.is_admin(requesterId, serverId):
if db.unset_reminder(rowId):
return f'You will no longer be reminded of reminder number {rowId}.'
else:
return 'An error occured trying to delete this reminder. Oof.'
else:
return 'Only an admin or the person who created a reminder can delete it.'
def parse_remind_message(_msg, createdAt=None):
msg = _msg
span = None
timestamp = None
# Check if we were just given a timestamp
criteria = r'^\s*<?t?:?(\d{10})'
if re.match(criteria, _msg):
try:
timestamp = int(re.match(criteria, _msg).group(1))
span = nomic_time.get_timespan_from_timestamp(timestamp, createdAt)
parts = _msg.split(' ')
msg = ' '.join(parts[1:])
except Exception:
timestamp = None
# Check if we have an arbitrary date format on our hands
if timestamp is None and ';' in _msg:
parts = _msg.split(';')
datestring, msg = parts[0], ';'.join(parts[1:])
try:
timestamp = int(nomic_time.get_datestring_timestamp(datestring))
span = nomic_time.get_timespan_from_timestamp(timestamp, createdAt)
except Exception:
timestamp = None
# If we still don't have a timestamp, parse it by relative time
if timestamp is None:
# Parse the timestamp as <integer> <minutes|hours|days|weeks|months>
parts = msg.split(' ')
# The time unit might have a newline after it instead of a space.
# i.e ['1', 'second\nThis\nmessage', 'here'] should be ['1', 'second', 'This\message', 'here']
if len(parts) > 1 and '\n' in parts[1]:
# subparts = ['second', 'This', 'message]
subparts = parts[1].split('\n')
# 'second', 'This\nmessage'
timepart, firstWord = subparts[0], '\n'.join(subparts[1:])
# parts = ['1', 'second', 'here']
parts[1] = timepart
# ['1', 'second', 'This\message', 'here']
parts.insert(2, firstWord)
if len(parts) < 2:
return (None, ('Incorrect syntax for reminder or I couldn\'t understand your date format. '
'See `{PREFIX}help remind` for more details.'))
try:
number = float(parts[0])
except ValueError:
return (None, ('Couldn\'t understand your time format. '
'You might have an extra comma in there confusing things. '
f'See `{PREFIX}help remind` for more details.'))
timeUnit = parts[1]
span = nomic_time.parse_timespan_by_units(number, timeUnit)
msg = None if len(parts) < 2 else ' '.join(parts[2:])
if not span:
return (None, f'Incorrect syntax for reminder. See `{PREFIX}help remind` for more details.')
if span.total_seconds() < 1:
return (None, 'Please give a time that is in the future (remember that times are in UTC).')
return (span, msg)
def can_quick_remind(span: timedelta):
return span.total_seconds() < 60 * 10 | core/reminders.py | from datetime import datetime, timedelta
import re
import core.db.reminders_db as db
import core.nomic_time as nomic_time
from core.log import log
from config.config import PREFIX
import core.utils as utils
def set_new_reminder(userId: str,
messageId: int,
channelId: int,
createdAt: datetime,
remindAfter: timedelta,
remindMsg: str):
'''CreatedAt and remindAfter should be a UTC timestamp in seconds'''
_createdAt = nomic_time.get_timestamp(createdAt)
_remindAfter = nomic_time.get_timestamp(createdAt + remindAfter)
rowId = db.add_reminder(userId, messageId, channelId, _createdAt, _remindAfter, remindMsg)
if rowId:
return(f'I\'ll remind you about this at about <t:{_remindAfter}>.\n'
f'Use `{PREFIX}forget {rowId}` to delete this reminder.\n')
else:
return 'An error occured trying to set this reminder :(. Some kind of reminder database issue.'
def check_for_triggered_reminders():
'''
Returns a list of dictionaries [{RowId, MessageId, ReplyMessage}]
'''
reminders = db.get_reminders(f'WHERE Active = 1 AND RemindAfter <= {nomic_time.unix_now()}')
return reminders
def get_reminder(rowId):
try:
# Also accounts for sql injection attempts
rowId = int(rowId)
except ValueError:
log.exception(f'User gave a bad rowId to delete: "{rowId}"')
return 'That is not a valid reminder Id. Please send the integer Id of a reminder that has been made before'
_reminders = db.get_reminders(f'WHERE RowId = {rowId}')
if len(_reminders) == 0:
return 'No reminder found with id {rowId}.'
reminder = _reminders[0]
remindAfter = reminder['RemindAfter']
remindMsg = reminder['RemindMsg']
return (f"Reminder set to trigger <t:{remindAfter}:R>\n"
f"> {remindMsg}")
def unset_reminder(rowId, requesterId=None, serverId=None, overrideId=False):
'''Either requesterId or overrideId must be set.'''
try:
# Also accounts for sql injection attempts
rowId = int(rowId)
except ValueError:
log.exception(f'User gave a bad rowId to delete: "{rowId}"')
return 'That is not a valid reminder Id. Please send the integer Id of a reminder that has been made before'
reminders = db.get_reminders(f'WHERE rowid = {rowId}')
if len(reminders) == 0:
return 'No reminder found with id {rowId}.'
if reminders[0]['Active'] == 0:
return f'Reminder {rowId} is old and wasn\'t going to trigger anyway'
if overrideId or str(requesterId) == reminders[0]['UserId'] or utils.is_admin(requesterId, serverId):
if db.unset_reminder(rowId):
return f'You will no longer be reminded of reminder number {rowId}.'
else:
return 'An error occured trying to delete this reminder. Oof.'
else:
return 'Only an admin or the person who created a reminder can delete it.'
def parse_remind_message(_msg, createdAt=None):
msg = _msg
span = None
timestamp = None
# Check if we were just given a timestamp
criteria = r'^\s*<?t?:?(\d{10})'
if re.match(criteria, _msg):
try:
timestamp = int(re.match(criteria, _msg).group(1))
span = nomic_time.get_timespan_from_timestamp(timestamp, createdAt)
parts = _msg.split(' ')
msg = ' '.join(parts[1:])
except Exception:
timestamp = None
# Check if we have an arbitrary date format on our hands
if timestamp is None and ';' in _msg:
parts = _msg.split(';')
datestring, msg = parts[0], ';'.join(parts[1:])
try:
timestamp = int(nomic_time.get_datestring_timestamp(datestring))
span = nomic_time.get_timespan_from_timestamp(timestamp, createdAt)
except Exception:
timestamp = None
# If we still don't have a timestamp, parse it by relative time
if timestamp is None:
# Parse the timestamp as <integer> <minutes|hours|days|weeks|months>
parts = msg.split(' ')
# The time unit might have a newline after it instead of a space.
# i.e ['1', 'second\nThis\nmessage', 'here'] should be ['1', 'second', 'This\message', 'here']
if len(parts) > 1 and '\n' in parts[1]:
# subparts = ['second', 'This', 'message]
subparts = parts[1].split('\n')
# 'second', 'This\nmessage'
timepart, firstWord = subparts[0], '\n'.join(subparts[1:])
# parts = ['1', 'second', 'here']
parts[1] = timepart
# ['1', 'second', 'This\message', 'here']
parts.insert(2, firstWord)
if len(parts) < 2:
return (None, ('Incorrect syntax for reminder or I couldn\'t understand your date format. '
'See `{PREFIX}help remind` for more details.'))
try:
number = float(parts[0])
except ValueError:
return (None, ('Couldn\'t understand your time format. '
'You might have an extra comma in there confusing things. '
f'See `{PREFIX}help remind` for more details.'))
timeUnit = parts[1]
span = nomic_time.parse_timespan_by_units(number, timeUnit)
msg = None if len(parts) < 2 else ' '.join(parts[2:])
if not span:
return (None, f'Incorrect syntax for reminder. See `{PREFIX}help remind` for more details.')
if span.total_seconds() < 1:
return (None, 'Please give a time that is in the future (remember that times are in UTC).')
return (span, msg)
def can_quick_remind(span: timedelta):
return span.total_seconds() < 60 * 10 | 0.662578 | 0.142113 |
import collections
class BigListError(StandardError):
pass
class bigList:
"""A list-like structure for managing allot (possibly infinite) data."""
def __init__(self, prefix, iterable=None, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
self._db = database
self._chunkSize = minChunkSize
self._iterables = [iterable]
self._prefix = prefix
self._suffix = []
self._curPreChunk = 0
self._curSufChunk = -1
def _loadPreChunk(self, n):
pass
def loadSufChunk(self, n):
pass
def _reChunk(self):
pass
def _chunkSizes(self):
pass
def __iter__(self):
i = 0
while(True):
yield self[i]
i+=1
def __getitem__(self, index):
if type(index) == int:
if index>= 0:
try:
retval = self._prefix[index]
except IndexError:
pass ####CHANGE THIS
elif index<0:
try:
retval = self._suffix[index]
except IndexError:
pass
elif type(index) == str:
pass
else:
raise TypeError("index must be an int or string.")
return retval
def append(self, item):
self._suffix.append(item)
def appendToFront(self, item):
self._loadChunk(0)
self._prefix = [item]+self._prefix
def extend(self, iterable):
if not isinstance(iterable, \
collections.Iterable):
raise TypeError("iterable must be iterable")
try:
length = len(iterable)
except TypeError:
self._iterables += [self._suffix, iterable]
self._suffix = []
self._suffix = iterable
class omegaList(bigList):
def __init__(self, prefix, iterable=None, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
self._db = database
self._chunkSize = minChunkSize
self._iterable = iterable
self._prefix = prefix
def _addToEndOfIterable(self, newItems):
iteratorCopy = self._iterable
for i in iteratorCopy:
yield i
for i in newItems:
yield i
def _appendPrefix(self, item):
self._prefix.append(item)
def _getFromPrefix(self, index):
return self._prefix[index]
def _extendPrefix(self, toIndex):
for i in range(len(self._prefix), toIndex):
self._appendPrefix(self._iterable.next())
def append(self, item):
self._iterable = self._addToEndOfIterable([item])
def appendToFront(self, item):
self._prefix.insert(0, item)
def weave(self, otherOmegaList, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
def weavedIterator(a, b):
flag = 0
while(flag == 0):
try:
yield a
except StopIteration:
flag = 1
try:
yield b
except StopIteration:
flag = 2
if flag == 1:
for i in b:
yield i
elif flag == 2:
for i in a:
yield i
newIter = weavedIterator(self, otherOmegaList):
return omegaList([], newIter, minChunkSize, database)
def extend(self, iterable):
self._iterable = self._addToEndOfIterable(iterable)
def __getitem__(self, index):
if index< 0:
raise IndexError("Index must be positive")
else:
try:
retval = self._getFromPrefix(index)
except IndexError:
self._extendPrefix(index)
retval = self._getFromPrefix(index)
return retval | bigList.py | import collections
class BigListError(StandardError):
pass
class bigList:
"""A list-like structure for managing allot (possibly infinite) data."""
def __init__(self, prefix, iterable=None, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
self._db = database
self._chunkSize = minChunkSize
self._iterables = [iterable]
self._prefix = prefix
self._suffix = []
self._curPreChunk = 0
self._curSufChunk = -1
def _loadPreChunk(self, n):
pass
def loadSufChunk(self, n):
pass
def _reChunk(self):
pass
def _chunkSizes(self):
pass
def __iter__(self):
i = 0
while(True):
yield self[i]
i+=1
def __getitem__(self, index):
if type(index) == int:
if index>= 0:
try:
retval = self._prefix[index]
except IndexError:
pass ####CHANGE THIS
elif index<0:
try:
retval = self._suffix[index]
except IndexError:
pass
elif type(index) == str:
pass
else:
raise TypeError("index must be an int or string.")
return retval
def append(self, item):
self._suffix.append(item)
def appendToFront(self, item):
self._loadChunk(0)
self._prefix = [item]+self._prefix
def extend(self, iterable):
if not isinstance(iterable, \
collections.Iterable):
raise TypeError("iterable must be iterable")
try:
length = len(iterable)
except TypeError:
self._iterables += [self._suffix, iterable]
self._suffix = []
self._suffix = iterable
class omegaList(bigList):
def __init__(self, prefix, iterable=None, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
self._db = database
self._chunkSize = minChunkSize
self._iterable = iterable
self._prefix = prefix
def _addToEndOfIterable(self, newItems):
iteratorCopy = self._iterable
for i in iteratorCopy:
yield i
for i in newItems:
yield i
def _appendPrefix(self, item):
self._prefix.append(item)
def _getFromPrefix(self, index):
return self._prefix[index]
def _extendPrefix(self, toIndex):
for i in range(len(self._prefix), toIndex):
self._appendPrefix(self._iterable.next())
def append(self, item):
self._iterable = self._addToEndOfIterable([item])
def appendToFront(self, item):
self._prefix.insert(0, item)
def weave(self, otherOmegaList, minChunkSize=2**8, database = None):
if database == None:
raise BigListError("Must specify a database")
def weavedIterator(a, b):
flag = 0
while(flag == 0):
try:
yield a
except StopIteration:
flag = 1
try:
yield b
except StopIteration:
flag = 2
if flag == 1:
for i in b:
yield i
elif flag == 2:
for i in a:
yield i
newIter = weavedIterator(self, otherOmegaList):
return omegaList([], newIter, minChunkSize, database)
def extend(self, iterable):
self._iterable = self._addToEndOfIterable(iterable)
def __getitem__(self, index):
if index< 0:
raise IndexError("Index must be positive")
else:
try:
retval = self._getFromPrefix(index)
except IndexError:
self._extendPrefix(index)
retval = self._getFromPrefix(index)
return retval | 0.525125 | 0.219024 |
__author__ = "<NAME>"
__contact__ = "University of Freiburg, IMTEK, <NAME>"
__credits__ = "<NAME>"
__version__ = "1.0.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>, <EMAIL>"
# Import dependencies
from tkinter import *
from tkinter.ttk import *
from FreiStat.Data_storage.constants import *
from FreiStat.Methods.run_electrochemical_method import Run_Electrochemical_Method
from FreiStat.Methods.run_chronoamperometry import Run_CA
from FreiStat.Methods.run_linear_sweep_voltammetry import Run_LSV
from FreiStat.Methods.run_cyclic_voltammetry import Run_CV
from FreiStat.Methods.run_normal_pulse_voltammetry import Run_NPV
from FreiStat.Methods.run_differential_pulse_voltammetry import Run_DPV
from FreiStat.Methods.run_square_wave_voltammetry import Run_SWV
from FreiStat.Methods.run_sequence import Run_Sequence
from matplotlib import pyplot as plt
from matplotlib.figure import SubplotParams
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_tkagg import NavigationToolbar2Tk
import matplotlib
matplotlib.use('TkAgg')
# Import internal dependencies
from ..Data_Storage.constants import *
def _executeExperiment(self) -> None:
"""
Description
-----------
Method which starts the different facades of the Python library with
the chosen experiment parameters
"""
# Clear central frame
self._clearFrame(self._fCentralFrame)
if (self._dataHandling.get_ExperimentType() == SEQUENCE):
# Execute the sequence
self._executeSequence()
else :
# Execute in single mode
self._executeSingleMethod()
def _executeSingleMethod(self) -> None:
"""
Description
-----------
Method which starts the single mode of the Python library with the chosen
experiment parameters
"""
# Initialize variables
iCommunicationMode : int
listExperimentParameters : list = []
if (self._iWLANMode.get() == True):
iCommunicationMode = FREISTAT_WLAN
else :
iCommunicationMode = FREISTAT_SERIAL
RunEcMethod : Run_Electrochemical_Method = None
# Check which experiment should be executed
if (self._dataHandling.get_ExperimentType() == CA):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[9][1]
# Run chronoamperometry on FreiStat
RunEcMethod = Run_CA(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(Potential_Steps= listExperimentParameters[0][1],
Pulse_Lengths= listExperimentParameters[1][1],
Sampling_Rate= listExperimentParameters[2][1],
Cycle= listExperimentParameters[3][1],
CurrentRange= listExperimentParameters[4][1],
MainsFilter = listExperimentParameters[5][1],
Sinc2_Oversampling = listExperimentParameters[6][1],
Sinc3_Oversampling = listExperimentParameters[7][1],
EnableOptimizer= listExperimentParameters[8][1],
LowPerformanceMode= listExperimentParameters[9][1])
if (listExperimentParameters[8][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == LSV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[11][1]
# Run linear sweep voltammetry on FreiStat
RunEcMethod = Run_LSV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
Stepsize= listExperimentParameters[2][1],
Scanrate= listExperimentParameters[3][1],
Cycle= listExperimentParameters[4][1],
CurrentRange= listExperimentParameters[5][1],
FixedWEPotential= listExperimentParameters[6][1],
MainsFilter = listExperimentParameters[7][1],
Sinc2_Oversampling = listExperimentParameters[8][1],
Sinc3_Oversampling = listExperimentParameters[9][1],
EnableOptimizer= listExperimentParameters[10][1],
LowPerformanceMode= listExperimentParameters[11][1])
if (listExperimentParameters[10][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == CV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[12][1]
# Run cyclic voltammetry on FreiStat
RunEcMethod = Run_CV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
SecondVertex= listExperimentParameters[1][1],
FirstVertex= listExperimentParameters[2][1],
Stepsize= listExperimentParameters[3][1],
Scanrate= listExperimentParameters[4][1],
Cycle= listExperimentParameters[5][1],
CurrentRange= listExperimentParameters[6][1],
FixedWEPotential= listExperimentParameters[7][1],
MainsFilter = listExperimentParameters[8][1],
Sinc2_Oversampling = listExperimentParameters[9][1],
Sinc3_Oversampling = listExperimentParameters[10][1],
EnableOptimizer= listExperimentParameters[11][1],
LowPerformanceMode= listExperimentParameters[12][1])
if (listExperimentParameters[11][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == NPV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run normal pulse voltammetry on FreiStat
RunEcMethod = Run_NPV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(BaseVoltage= listExperimentParameters[0][1],
StartVoltage= listExperimentParameters[1][1],
StopVoltage= listExperimentParameters[2][1],
DeltaV_Staircase= listExperimentParameters[3][1],
Pulse_Lengths= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == DPV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run differential pulse voltammetry on FreiStat
RunEcMethod = Run_DPV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
DeltaV_Staircase= listExperimentParameters[2][1],
DeltaV_Peak= listExperimentParameters[3][1],
Pulse_Lengths= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == SWV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run square wave voltammetry on FreiStat
RunEcMethod = Run_SWV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
DeltaV_Staircase= listExperimentParameters[2][1],
DeltaV_Peak= listExperimentParameters[3][1],
DutyCycle= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
# Save reference of the plotter
self._plotter = RunEcMethod.get_plotter()
# Save reference of the process
self._process = RunEcMethod.get_process()
# Save reference of the figure to prevent garbage collection
self._fig = self._plotter.get_figure()
# Save reference of the data queue to prevent garbage collection
self._dataQueue = RunEcMethod.get_dataQueue()
# Resize figure
iDpi = self._fCentralFrame.winfo_fpixels('3c')
self._fig.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height()/ iDpi,
forward=True)
# Draw frame of the figure
self._fLiveFeed = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fLiveFeed.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
self._canvas = FigureCanvasTkAgg(self._fig, master= self._fLiveFeed)
self._canvas.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvas.draw()
# Create the toolbar
self._toolbarFrame = Frame(master= self._fLiveFeed)
self._toolbarFrame.pack(fill= X, side= BOTTOM, expand= False, padx= 5)
toolbar = Toolbar(self._canvas, self._toolbarFrame)
toolbar.config(background= "white")
toolbar._message_label.config(background= "white", font= "Arial 10 bold")
self._plotter.set_listBox(self._TextTerminal)
self._plotter.set_progressBar(self._ProgressBar)
# Call animate function of plotter
if (self._bLowPerformanceMode == False):
self._plotter.T_Animate(self._dataQueue)
self._animate = self._plotter.get_animate()
self._canvas.draw()
else :
self._plotter.T_Print(self._strGLpmLatency.get(), self._dataQueue)
self._animate = self._plotter.get_animate()
# Update frame
self._fLiveFeed.update()
def _executeSequence(self) -> None:
"""
Description
-----------
Method which starts the squence mode of the Python library with the chosen
experiment parameters
"""
# Initialize variabels
bEnableOptimizer : bool = False
bLowPerformanceMode : bool = False
iCommunicationMode : int
self._listCanvas = []
if (self._iWLANMode.get() == True):
iCommunicationMode = FREISTAT_WLAN
else :
iCommunicationMode = FREISTAT_SERIAL
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Check if the optimizer and the low performance mode should be enabled
# Loop over the experiment parameter list
for iIndex in range(len(listExperimentParameters)):
# Loop over the experiment parameters in each method
for iParameter in range(len(listExperimentParameters[iIndex][2])):
if (listExperimentParameters[iIndex][2][iParameter][0] ==
ENABLE_OPTIMIZER):
# Check if the optimizer of one method is enabled
if (listExperimentParameters[iIndex][2][iParameter][1] == True):
bEnableOptimizer = True
if (listExperimentParameters[iIndex][2][iParameter][0] ==
LOW_PERFORMANCE_MODE):
# Check if the optimizer of one method is enabled
if (listExperimentParameters[iIndex][2][iParameter][1] == True):
bLowPerformanceMode = True
# Create a sequence object
RunEcMethod2 = Run_Sequence(EnableOptimizer= bEnableOptimizer,
logger=self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod2
# Add methods to the sequence
for iMethod in range(len(listExperimentParameters)):
if (listExperimentParameters[iMethod][0] == CA):
RunEcMethod2.add_CA(
Potential_Steps= listExperimentParameters[iMethod][2][0][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][1][1],
Sampling_Rate= listExperimentParameters[iMethod][2][2][1],
Cycle= listExperimentParameters[iMethod][2][3][1],
CurrentRange= listExperimentParameters[iMethod][2][4][1],
MainsFilter = listExperimentParameters[iMethod][2][5][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][6][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][7][1])
elif (listExperimentParameters[iMethod][0] == LSV):
RunEcMethod2.add_LSV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
Stepsize= listExperimentParameters[iMethod][2][2][1],
Scanrate= listExperimentParameters[iMethod][2][3][1],
Cycle= listExperimentParameters[iMethod][2][4][1],
CurrentRange= listExperimentParameters[iMethod][2][5][1],
FixedWEPotential= listExperimentParameters[iMethod][2][6][1],
MainsFilter = listExperimentParameters[iMethod][2][7][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][8][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][9][1])
elif (listExperimentParameters[iMethod][0] == CV):
RunEcMethod2.add_CV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
SecondVertex= listExperimentParameters[iMethod][2][1][1],
FirstVertex= listExperimentParameters[iMethod][2][2][1],
Stepsize= listExperimentParameters[iMethod][2][3][1],
Scanrate= listExperimentParameters[iMethod][2][4][1],
Cycle= listExperimentParameters[iMethod][2][5][1],
CurrentRange= listExperimentParameters[iMethod][2][6][1],
FixedWEPotential= listExperimentParameters[iMethod][2][7][1],
MainsFilter = listExperimentParameters[iMethod][2][8][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][9][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][10][1])
elif (listExperimentParameters[iMethod][0] == NPV):
RunEcMethod2.add_NPV(
BaseVoltage= listExperimentParameters[iMethod][2][0][1],
StartVoltage= listExperimentParameters[iMethod][2][1][1],
StopVoltage= listExperimentParameters[iMethod][2][2][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][3][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
elif (listExperimentParameters[iMethod][0] == DPV):
RunEcMethod2.add_DPV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][2][1],
DeltaV_Peak= listExperimentParameters[iMethod][2][3][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
elif (listExperimentParameters[iMethod][0] == SWV):
RunEcMethod2.add_SWV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][2][1],
DeltaV_Peak= listExperimentParameters[iMethod][2][3][1],
DutyCycle= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
# Start the sequence
RunEcMethod2.start(SequenceCycles= self._dataHandling.get_SequenceCycles(),
LowPerformanceMode= bLowPerformanceMode)
# Save reference of the plotter
self._plotter = RunEcMethod2.get_plotter()
# Save reference of the process
self._process = RunEcMethod2.get_process()
# Save reference of the figure to prevent garbage collection
self._fig = self._plotter.get_figure()
# Save reference of the list of figures to prevent garbage collection
self._listFigures = self._plotter.get_listfigures()
# Save reference of the data queue to prevent garbage collection
self._dataQueue = RunEcMethod2.get_dataQueue()
# Resize figure
iDpi = self._fCentralFrame.winfo_fpixels('3c')
self._fig.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height() / iDpi,
forward=True)
# Draw frame of the figure
self._fLiveFeed = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fLiveFeed.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
self._canvas = FigureCanvasTkAgg(self._fig, master= self._fLiveFeed)
self._canvas.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvas.draw()
# Create the toolbar
self._toolbarFrame = Frame(master= self._fLiveFeed)
self._toolbarFrame.pack(fill= X, side= BOTTOM, expand= False, padx= 5)
toolbar = Toolbar(self._canvas, self._toolbarFrame)
toolbar.config(background= "white")
toolbar._message_label.config(background= "white", font= "Arial 10 bold")
# Draw frame of the figure
self._fStaticPlot = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fStaticPlot.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
# Create canvas for each plot
for iIndex in range(len(self._listFigures)):
# Resize figure
iDpi = self._fPlotFrame.winfo_fpixels('3c')
self._listFigures[iIndex].set_size_inches(
self._fPlotFrameHeight / iDpi * 1.2,
self._fPlotFrameHeight / iDpi, forward=True)
# Draw frame of the figure
self._listCanvas.append(FigureCanvasTkAgg(self._listFigures[iIndex],
master= self._fPlotFrame))
self._listCanvas[iIndex].get_tk_widget().pack(side= LEFT,
anchor= N, padx = 5)
self._listCanvas[iIndex].mpl_connect("button_press_event", lambda event,
iPlotID = iIndex: self._on_mouse_press(event, iPlotID))
self._listCanvas[iIndex].draw()
# Create one central plot
subplotParams = SubplotParams(top= 0.95, right= 0.75)
self._figureStatic, (self._axesStatic) = plt.subplots(1, 1,
subplotpars= subplotParams)
self._figureStatic.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height()/ iDpi,
forward=True)
self._axesStatic.grid()
self._canvasStatic = FigureCanvasTkAgg(self._figureStatic,
master= self._fStaticPlot)
self._canvasStatic.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvasStatic.draw()
# Create the toolbar
self._toolbarFrameStatic = Frame(master= self._fStaticPlot)
toolbarStatic = Toolbar(self._canvasStatic, self._toolbarFrameStatic)
toolbarStatic.config(background= "white")
toolbarStatic._message_label.config(background= "white", font= "Arial 10 bold")
self._fStaticPlot.pack_forget()
self._plotter.set_listBox(self._TextTerminal)
self._plotter.set_progressBar(self._ProgressBar)
# Call animate function of plotter
self._plotter.T_Animate(self._dataQueue)
self._animate = self._plotter.get_animate()
self._canvas.draw()
# Update frame
self._fLiveFeed.update()
class Toolbar(NavigationToolbar2Tk):
"""
Description
-----------
Custom class overwriting the default matplotlib plot toolbar.
"""
def __init__(self, plotCanvas, frame):
# Create the default toolbar
NavigationToolbar2Tk.__init__(self, plotCanvas, frame)
# Remove the button to reconfig the subplots
self.children['!button4'].pack_forget() | Python/FreiStat_GUI/Main_Window/Experiment.py | __author__ = "<NAME>"
__contact__ = "University of Freiburg, IMTEK, <NAME>"
__credits__ = "<NAME>"
__version__ = "1.0.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>, <EMAIL>"
# Import dependencies
from tkinter import *
from tkinter.ttk import *
from FreiStat.Data_storage.constants import *
from FreiStat.Methods.run_electrochemical_method import Run_Electrochemical_Method
from FreiStat.Methods.run_chronoamperometry import Run_CA
from FreiStat.Methods.run_linear_sweep_voltammetry import Run_LSV
from FreiStat.Methods.run_cyclic_voltammetry import Run_CV
from FreiStat.Methods.run_normal_pulse_voltammetry import Run_NPV
from FreiStat.Methods.run_differential_pulse_voltammetry import Run_DPV
from FreiStat.Methods.run_square_wave_voltammetry import Run_SWV
from FreiStat.Methods.run_sequence import Run_Sequence
from matplotlib import pyplot as plt
from matplotlib.figure import SubplotParams
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
from matplotlib.backends.backend_tkagg import NavigationToolbar2Tk
import matplotlib
matplotlib.use('TkAgg')
# Import internal dependencies
from ..Data_Storage.constants import *
def _executeExperiment(self) -> None:
"""
Description
-----------
Method which starts the different facades of the Python library with
the chosen experiment parameters
"""
# Clear central frame
self._clearFrame(self._fCentralFrame)
if (self._dataHandling.get_ExperimentType() == SEQUENCE):
# Execute the sequence
self._executeSequence()
else :
# Execute in single mode
self._executeSingleMethod()
def _executeSingleMethod(self) -> None:
"""
Description
-----------
Method which starts the single mode of the Python library with the chosen
experiment parameters
"""
# Initialize variables
iCommunicationMode : int
listExperimentParameters : list = []
if (self._iWLANMode.get() == True):
iCommunicationMode = FREISTAT_WLAN
else :
iCommunicationMode = FREISTAT_SERIAL
RunEcMethod : Run_Electrochemical_Method = None
# Check which experiment should be executed
if (self._dataHandling.get_ExperimentType() == CA):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[9][1]
# Run chronoamperometry on FreiStat
RunEcMethod = Run_CA(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(Potential_Steps= listExperimentParameters[0][1],
Pulse_Lengths= listExperimentParameters[1][1],
Sampling_Rate= listExperimentParameters[2][1],
Cycle= listExperimentParameters[3][1],
CurrentRange= listExperimentParameters[4][1],
MainsFilter = listExperimentParameters[5][1],
Sinc2_Oversampling = listExperimentParameters[6][1],
Sinc3_Oversampling = listExperimentParameters[7][1],
EnableOptimizer= listExperimentParameters[8][1],
LowPerformanceMode= listExperimentParameters[9][1])
if (listExperimentParameters[8][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == LSV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[11][1]
# Run linear sweep voltammetry on FreiStat
RunEcMethod = Run_LSV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
Stepsize= listExperimentParameters[2][1],
Scanrate= listExperimentParameters[3][1],
Cycle= listExperimentParameters[4][1],
CurrentRange= listExperimentParameters[5][1],
FixedWEPotential= listExperimentParameters[6][1],
MainsFilter = listExperimentParameters[7][1],
Sinc2_Oversampling = listExperimentParameters[8][1],
Sinc3_Oversampling = listExperimentParameters[9][1],
EnableOptimizer= listExperimentParameters[10][1],
LowPerformanceMode= listExperimentParameters[11][1])
if (listExperimentParameters[10][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == CV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[12][1]
# Run cyclic voltammetry on FreiStat
RunEcMethod = Run_CV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
SecondVertex= listExperimentParameters[1][1],
FirstVertex= listExperimentParameters[2][1],
Stepsize= listExperimentParameters[3][1],
Scanrate= listExperimentParameters[4][1],
Cycle= listExperimentParameters[5][1],
CurrentRange= listExperimentParameters[6][1],
FixedWEPotential= listExperimentParameters[7][1],
MainsFilter = listExperimentParameters[8][1],
Sinc2_Oversampling = listExperimentParameters[9][1],
Sinc3_Oversampling = listExperimentParameters[10][1],
EnableOptimizer= listExperimentParameters[11][1],
LowPerformanceMode= listExperimentParameters[12][1])
if (listExperimentParameters[11][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == NPV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run normal pulse voltammetry on FreiStat
RunEcMethod = Run_NPV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(BaseVoltage= listExperimentParameters[0][1],
StartVoltage= listExperimentParameters[1][1],
StopVoltage= listExperimentParameters[2][1],
DeltaV_Staircase= listExperimentParameters[3][1],
Pulse_Lengths= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == DPV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run differential pulse voltammetry on FreiStat
RunEcMethod = Run_DPV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
DeltaV_Staircase= listExperimentParameters[2][1],
DeltaV_Peak= listExperimentParameters[3][1],
Pulse_Lengths= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
elif (self._dataHandling.get_ExperimentType() == SWV):
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Save low performance mode
self._bLowPerformanceMode = listExperimentParameters[13][1]
# Run square wave voltammetry on FreiStat
RunEcMethod = Run_SWV(logger= self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod
RunEcMethod.start(StartVoltage= listExperimentParameters[0][1],
StopVoltage= listExperimentParameters[1][1],
DeltaV_Staircase= listExperimentParameters[2][1],
DeltaV_Peak= listExperimentParameters[3][1],
DutyCycle= listExperimentParameters[4][1],
Sampling_Duration= listExperimentParameters[5][1],
Cycle= listExperimentParameters[6][1],
CurrentRange= listExperimentParameters[7][1],
FixedWEPotential= listExperimentParameters[8][1],
MainsFilter = listExperimentParameters[9][1],
Sinc2_Oversampling = listExperimentParameters[10][1],
Sinc3_Oversampling = listExperimentParameters[11][1],
EnableOptimizer= listExperimentParameters[12][1],
LowPerformanceMode= listExperimentParameters[13][1])
if (listExperimentParameters[12][1] == True):
self._dataHandling.save_ExperimentParmeters(
self._decodeOptimizerParameters(
RunEcMethod._listExperimentParameters))
self._update_PrameterbandFrame(self._fParameterBand)
# Save reference of the plotter
self._plotter = RunEcMethod.get_plotter()
# Save reference of the process
self._process = RunEcMethod.get_process()
# Save reference of the figure to prevent garbage collection
self._fig = self._plotter.get_figure()
# Save reference of the data queue to prevent garbage collection
self._dataQueue = RunEcMethod.get_dataQueue()
# Resize figure
iDpi = self._fCentralFrame.winfo_fpixels('3c')
self._fig.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height()/ iDpi,
forward=True)
# Draw frame of the figure
self._fLiveFeed = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fLiveFeed.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
self._canvas = FigureCanvasTkAgg(self._fig, master= self._fLiveFeed)
self._canvas.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvas.draw()
# Create the toolbar
self._toolbarFrame = Frame(master= self._fLiveFeed)
self._toolbarFrame.pack(fill= X, side= BOTTOM, expand= False, padx= 5)
toolbar = Toolbar(self._canvas, self._toolbarFrame)
toolbar.config(background= "white")
toolbar._message_label.config(background= "white", font= "Arial 10 bold")
self._plotter.set_listBox(self._TextTerminal)
self._plotter.set_progressBar(self._ProgressBar)
# Call animate function of plotter
if (self._bLowPerformanceMode == False):
self._plotter.T_Animate(self._dataQueue)
self._animate = self._plotter.get_animate()
self._canvas.draw()
else :
self._plotter.T_Print(self._strGLpmLatency.get(), self._dataQueue)
self._animate = self._plotter.get_animate()
# Update frame
self._fLiveFeed.update()
def _executeSequence(self) -> None:
"""
Description
-----------
Method which starts the squence mode of the Python library with the chosen
experiment parameters
"""
# Initialize variabels
bEnableOptimizer : bool = False
bLowPerformanceMode : bool = False
iCommunicationMode : int
self._listCanvas = []
if (self._iWLANMode.get() == True):
iCommunicationMode = FREISTAT_WLAN
else :
iCommunicationMode = FREISTAT_SERIAL
# Temporary store experiment paramters
listExperimentParameters = self._dataHandling.get_ExperimentParameters()
# Check if the optimizer and the low performance mode should be enabled
# Loop over the experiment parameter list
for iIndex in range(len(listExperimentParameters)):
# Loop over the experiment parameters in each method
for iParameter in range(len(listExperimentParameters[iIndex][2])):
if (listExperimentParameters[iIndex][2][iParameter][0] ==
ENABLE_OPTIMIZER):
# Check if the optimizer of one method is enabled
if (listExperimentParameters[iIndex][2][iParameter][1] == True):
bEnableOptimizer = True
if (listExperimentParameters[iIndex][2][iParameter][0] ==
LOW_PERFORMANCE_MODE):
# Check if the optimizer of one method is enabled
if (listExperimentParameters[iIndex][2][iParameter][1] == True):
bLowPerformanceMode = True
# Create a sequence object
RunEcMethod2 = Run_Sequence(EnableOptimizer= bEnableOptimizer,
logger=self._logger, mode= FREISTAT_BACKEND,
commnicationMode= iCommunicationMode,
wlanSetting=[self._strServerIP.get(),
int(self._strServerPort.get()),
self._strClientIP.get(),
int(self._strClientPort.get())])
self._EcMethod = RunEcMethod2
# Add methods to the sequence
for iMethod in range(len(listExperimentParameters)):
if (listExperimentParameters[iMethod][0] == CA):
RunEcMethod2.add_CA(
Potential_Steps= listExperimentParameters[iMethod][2][0][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][1][1],
Sampling_Rate= listExperimentParameters[iMethod][2][2][1],
Cycle= listExperimentParameters[iMethod][2][3][1],
CurrentRange= listExperimentParameters[iMethod][2][4][1],
MainsFilter = listExperimentParameters[iMethod][2][5][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][6][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][7][1])
elif (listExperimentParameters[iMethod][0] == LSV):
RunEcMethod2.add_LSV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
Stepsize= listExperimentParameters[iMethod][2][2][1],
Scanrate= listExperimentParameters[iMethod][2][3][1],
Cycle= listExperimentParameters[iMethod][2][4][1],
CurrentRange= listExperimentParameters[iMethod][2][5][1],
FixedWEPotential= listExperimentParameters[iMethod][2][6][1],
MainsFilter = listExperimentParameters[iMethod][2][7][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][8][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][9][1])
elif (listExperimentParameters[iMethod][0] == CV):
RunEcMethod2.add_CV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
SecondVertex= listExperimentParameters[iMethod][2][1][1],
FirstVertex= listExperimentParameters[iMethod][2][2][1],
Stepsize= listExperimentParameters[iMethod][2][3][1],
Scanrate= listExperimentParameters[iMethod][2][4][1],
Cycle= listExperimentParameters[iMethod][2][5][1],
CurrentRange= listExperimentParameters[iMethod][2][6][1],
FixedWEPotential= listExperimentParameters[iMethod][2][7][1],
MainsFilter = listExperimentParameters[iMethod][2][8][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][9][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][10][1])
elif (listExperimentParameters[iMethod][0] == NPV):
RunEcMethod2.add_NPV(
BaseVoltage= listExperimentParameters[iMethod][2][0][1],
StartVoltage= listExperimentParameters[iMethod][2][1][1],
StopVoltage= listExperimentParameters[iMethod][2][2][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][3][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
elif (listExperimentParameters[iMethod][0] == DPV):
RunEcMethod2.add_DPV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][2][1],
DeltaV_Peak= listExperimentParameters[iMethod][2][3][1],
Pulse_Lengths= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
elif (listExperimentParameters[iMethod][0] == SWV):
RunEcMethod2.add_SWV(
StartVoltage= listExperimentParameters[iMethod][2][0][1],
StopVoltage= listExperimentParameters[iMethod][2][1][1],
DeltaV_Staircase= listExperimentParameters[iMethod][2][2][1],
DeltaV_Peak= listExperimentParameters[iMethod][2][3][1],
DutyCycle= listExperimentParameters[iMethod][2][4][1],
Sampling_Duration= listExperimentParameters[iMethod][2][5][1],
Cycle= listExperimentParameters[iMethod][2][6][1],
CurrentRange= listExperimentParameters[iMethod][2][7][1],
FixedWEPotential= listExperimentParameters[iMethod][2][8][1],
MainsFilter = listExperimentParameters[iMethod][2][9][1],
Sinc2_Oversampling = listExperimentParameters[iMethod][2][10][1],
Sinc3_Oversampling = listExperimentParameters[iMethod][2][11][1])
# Start the sequence
RunEcMethod2.start(SequenceCycles= self._dataHandling.get_SequenceCycles(),
LowPerformanceMode= bLowPerformanceMode)
# Save reference of the plotter
self._plotter = RunEcMethod2.get_plotter()
# Save reference of the process
self._process = RunEcMethod2.get_process()
# Save reference of the figure to prevent garbage collection
self._fig = self._plotter.get_figure()
# Save reference of the list of figures to prevent garbage collection
self._listFigures = self._plotter.get_listfigures()
# Save reference of the data queue to prevent garbage collection
self._dataQueue = RunEcMethod2.get_dataQueue()
# Resize figure
iDpi = self._fCentralFrame.winfo_fpixels('3c')
self._fig.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height() / iDpi,
forward=True)
# Draw frame of the figure
self._fLiveFeed = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fLiveFeed.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
self._canvas = FigureCanvasTkAgg(self._fig, master= self._fLiveFeed)
self._canvas.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvas.draw()
# Create the toolbar
self._toolbarFrame = Frame(master= self._fLiveFeed)
self._toolbarFrame.pack(fill= X, side= BOTTOM, expand= False, padx= 5)
toolbar = Toolbar(self._canvas, self._toolbarFrame)
toolbar.config(background= "white")
toolbar._message_label.config(background= "white", font= "Arial 10 bold")
# Draw frame of the figure
self._fStaticPlot = Frame(self._fCentralFrame, style="fCentralFrame.TFrame")
self._fStaticPlot.pack(fill= 'both', side=TOP, expand=TRUE, padx= 2, pady= 2)
# Create canvas for each plot
for iIndex in range(len(self._listFigures)):
# Resize figure
iDpi = self._fPlotFrame.winfo_fpixels('3c')
self._listFigures[iIndex].set_size_inches(
self._fPlotFrameHeight / iDpi * 1.2,
self._fPlotFrameHeight / iDpi, forward=True)
# Draw frame of the figure
self._listCanvas.append(FigureCanvasTkAgg(self._listFigures[iIndex],
master= self._fPlotFrame))
self._listCanvas[iIndex].get_tk_widget().pack(side= LEFT,
anchor= N, padx = 5)
self._listCanvas[iIndex].mpl_connect("button_press_event", lambda event,
iPlotID = iIndex: self._on_mouse_press(event, iPlotID))
self._listCanvas[iIndex].draw()
# Create one central plot
subplotParams = SubplotParams(top= 0.95, right= 0.75)
self._figureStatic, (self._axesStatic) = plt.subplots(1, 1,
subplotpars= subplotParams)
self._figureStatic.set_size_inches(self._fCentralFrame.winfo_width() / iDpi,
self._fCentralFrame.winfo_height()/ iDpi,
forward=True)
self._axesStatic.grid()
self._canvasStatic = FigureCanvasTkAgg(self._figureStatic,
master= self._fStaticPlot)
self._canvasStatic.get_tk_widget().pack(side= TOP, expand= TRUE)
self._canvasStatic.draw()
# Create the toolbar
self._toolbarFrameStatic = Frame(master= self._fStaticPlot)
toolbarStatic = Toolbar(self._canvasStatic, self._toolbarFrameStatic)
toolbarStatic.config(background= "white")
toolbarStatic._message_label.config(background= "white", font= "Arial 10 bold")
self._fStaticPlot.pack_forget()
self._plotter.set_listBox(self._TextTerminal)
self._plotter.set_progressBar(self._ProgressBar)
# Call animate function of plotter
self._plotter.T_Animate(self._dataQueue)
self._animate = self._plotter.get_animate()
self._canvas.draw()
# Update frame
self._fLiveFeed.update()
class Toolbar(NavigationToolbar2Tk):
"""
Description
-----------
Custom class overwriting the default matplotlib plot toolbar.
"""
def __init__(self, plotCanvas, frame):
# Create the default toolbar
NavigationToolbar2Tk.__init__(self, plotCanvas, frame)
# Remove the button to reconfig the subplots
self.children['!button4'].pack_forget() | 0.604282 | 0.095983 |
from functools import partial
import os
from random import randint
from uuid import uuid4
from pandas import DataFrame
from numpy.random import binomial
from plumbum import local
from plumbum.cmd import picard
import wrapt
from fmbiopy.obj import get_param_names, replace_param_sig
from fmbiopy.paths import is_empty
SANDBOX = local.path("test/sandbox")
def assert_script_produces_files(
script, args, output, redirect=None, empty_ok=False, outdir=None
):
"""Assert that a script with given command line args produces expected files
Parameters
----------
script : str
Path to the script
args : List[str]
List of command line arguments
output: List[str] or List[plumbum.LocalPath]
List of output files
redirect: str or plumbum.LocalPath, optional
If defined, redirect the stdout of the script to the given file.
empty_ok : bool
If True, output files are valid even if they are empty
outdir: str or plumbum.LocalPath, optional
If given, the output filenames are relative to this directory
"""
execute = local[script]
command = execute.__getitem__(args)
if redirect:
(command > redirect)()
else:
command()
for f in output:
if outdir:
f = local.path(outdir) / f
else:
f = local.path(f)
assert f.exists()
if not empty_ok:
assert not is_empty(f)
def trim(read, prob_trim, trim_interval):
if binomial(1, prob_trim) == 1:
trimmed_bases = randint(trim_interval[0], trim_interval[1])
return read[:-trimmed_bases]
return read
def validate_bam_file(bam_or_sam):
picard(
"ValidateSamFile",
"I=" + bam_or_sam,
"MODE=SUMMARY",
"IGNORE_WARNINGS=true",
"iGNORE=MISSING_READ_GROUP",
)
def assert_df_equals(df, query):
"""Check if a DataFrame is equal to a DataFrame or dictionary."""
if isinstance(query, dict):
query = DataFrame.from_dict(query)
assert df.to_dict() == query.to_dict()
def gen_reads(
fasta, output_dir, bam_output=True, vcf_output=False, mutation_rate=0
):
"""Wrapper around NEAT gen-reads script."""
output_prefix = output_dir / uuid4().hex
output = {
"prefix": output_prefix,
"fwd": local.path(output_prefix + "_read1.fq"),
"rev": local.path(output_prefix + "_read2.fq"),
"bam": local.path(output_prefix + "_golden.bam"),
"vcf": local.path(output_prefix + "_golden.vcf"),
}
gen_reads_bin = local["python2"]["test/lib/neat-genreads/genReads.py"]
gen_reads_args = [
"-r",
fasta,
"-R",
"101",
"-o",
output_prefix,
"-M",
mutation_rate,
"--pe",
"300",
"30",
]
if bam_output:
gen_reads_args.append("--bam")
if vcf_output:
gen_reads_args.append("--vcf")
gen_reads_bin.__getitem__(gen_reads_args)()
return output
def file_generator(
wrapped=None,
ids=["file"],
names=[uuid4().hex],
suffixes=[""],
dirs=[SANDBOX],
properties=None,
):
"""Decorator which automates setup and return for file generation functions.
The decorator fulfills 3 tasks:
1. Generating required temporary file names.
2. Creating an output dictionary (`D`) which lists file locations.
3. Modifies the return of the wrapped function to instead return `D`.
Wrapped functions must contain a parameter named `meta`, this parameter will
be replaced by `D` at runtime, allowing the function to access the generated
filenames.
Parameters
----------
wrapped: Callable
Should not be defined manually, will be passed automatically by python.
ids: Iterable[str]
A list of ids for output files. These will be the keys for the output
dictionary.
names: Iterable[str]
A list of output file basenames.
suffixes: Iterable[str]
A list of file suffixes for output files.
dirs: Iterable[PathLike]
A list of output directories.
properties: Dict
Dictionary of extra metadata to be included in output dictionary.
"""
# warning: this function gets pretty hairy
if wrapped is None:
# Happens when some optional params are defined
# See wrapt docs for reasoning
return partial(
file_generator,
ids=ids,
names=names,
suffixes=suffixes,
dirs=dirs,
properties=properties,
)
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Wrapper function with decorator arguments included implicitely as
# variables in outer scope.
# First generate the metadata which will be passed into the decorated
# function.
filenames = [
local.path(directory) / (name + suffix)
for directory, name, suffix in zip(dirs, names, suffixes)
]
output_dict = dict(zip(ids, filenames))
if properties:
output_dict.update(properties)
# Generate a partial function with meta keyword arg predefined.
partial_wrapped_func = partial(wrapped, meta=output_dict)
def replacement_fixture_func(**kwargs):
# Function which will replace the decorated function. Run the
# wrapped function with injected `meta` variable. Then return the
# metadata.
partial_wrapped_func(**kwargs)
return output_dict
wrapped_argnames = get_param_names(wrapped)
# pytest checks that all fixture arguments are valid fixtures, so we
# need to purge all references to `meta` parameter
wrapped_argnames.remove("meta")
del kwargs["meta"]
replacement_fixture_func = replace_param_sig(
replacement_fixture_func, wrapped_argnames
)
return replacement_fixture_func(**kwargs)
return wrapper(wrapped) | test/helpers.py | from functools import partial
import os
from random import randint
from uuid import uuid4
from pandas import DataFrame
from numpy.random import binomial
from plumbum import local
from plumbum.cmd import picard
import wrapt
from fmbiopy.obj import get_param_names, replace_param_sig
from fmbiopy.paths import is_empty
SANDBOX = local.path("test/sandbox")
def assert_script_produces_files(
script, args, output, redirect=None, empty_ok=False, outdir=None
):
"""Assert that a script with given command line args produces expected files
Parameters
----------
script : str
Path to the script
args : List[str]
List of command line arguments
output: List[str] or List[plumbum.LocalPath]
List of output files
redirect: str or plumbum.LocalPath, optional
If defined, redirect the stdout of the script to the given file.
empty_ok : bool
If True, output files are valid even if they are empty
outdir: str or plumbum.LocalPath, optional
If given, the output filenames are relative to this directory
"""
execute = local[script]
command = execute.__getitem__(args)
if redirect:
(command > redirect)()
else:
command()
for f in output:
if outdir:
f = local.path(outdir) / f
else:
f = local.path(f)
assert f.exists()
if not empty_ok:
assert not is_empty(f)
def trim(read, prob_trim, trim_interval):
if binomial(1, prob_trim) == 1:
trimmed_bases = randint(trim_interval[0], trim_interval[1])
return read[:-trimmed_bases]
return read
def validate_bam_file(bam_or_sam):
picard(
"ValidateSamFile",
"I=" + bam_or_sam,
"MODE=SUMMARY",
"IGNORE_WARNINGS=true",
"iGNORE=MISSING_READ_GROUP",
)
def assert_df_equals(df, query):
"""Check if a DataFrame is equal to a DataFrame or dictionary."""
if isinstance(query, dict):
query = DataFrame.from_dict(query)
assert df.to_dict() == query.to_dict()
def gen_reads(
fasta, output_dir, bam_output=True, vcf_output=False, mutation_rate=0
):
"""Wrapper around NEAT gen-reads script."""
output_prefix = output_dir / uuid4().hex
output = {
"prefix": output_prefix,
"fwd": local.path(output_prefix + "_read1.fq"),
"rev": local.path(output_prefix + "_read2.fq"),
"bam": local.path(output_prefix + "_golden.bam"),
"vcf": local.path(output_prefix + "_golden.vcf"),
}
gen_reads_bin = local["python2"]["test/lib/neat-genreads/genReads.py"]
gen_reads_args = [
"-r",
fasta,
"-R",
"101",
"-o",
output_prefix,
"-M",
mutation_rate,
"--pe",
"300",
"30",
]
if bam_output:
gen_reads_args.append("--bam")
if vcf_output:
gen_reads_args.append("--vcf")
gen_reads_bin.__getitem__(gen_reads_args)()
return output
def file_generator(
wrapped=None,
ids=["file"],
names=[uuid4().hex],
suffixes=[""],
dirs=[SANDBOX],
properties=None,
):
"""Decorator which automates setup and return for file generation functions.
The decorator fulfills 3 tasks:
1. Generating required temporary file names.
2. Creating an output dictionary (`D`) which lists file locations.
3. Modifies the return of the wrapped function to instead return `D`.
Wrapped functions must contain a parameter named `meta`, this parameter will
be replaced by `D` at runtime, allowing the function to access the generated
filenames.
Parameters
----------
wrapped: Callable
Should not be defined manually, will be passed automatically by python.
ids: Iterable[str]
A list of ids for output files. These will be the keys for the output
dictionary.
names: Iterable[str]
A list of output file basenames.
suffixes: Iterable[str]
A list of file suffixes for output files.
dirs: Iterable[PathLike]
A list of output directories.
properties: Dict
Dictionary of extra metadata to be included in output dictionary.
"""
# warning: this function gets pretty hairy
if wrapped is None:
# Happens when some optional params are defined
# See wrapt docs for reasoning
return partial(
file_generator,
ids=ids,
names=names,
suffixes=suffixes,
dirs=dirs,
properties=properties,
)
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
# Wrapper function with decorator arguments included implicitely as
# variables in outer scope.
# First generate the metadata which will be passed into the decorated
# function.
filenames = [
local.path(directory) / (name + suffix)
for directory, name, suffix in zip(dirs, names, suffixes)
]
output_dict = dict(zip(ids, filenames))
if properties:
output_dict.update(properties)
# Generate a partial function with meta keyword arg predefined.
partial_wrapped_func = partial(wrapped, meta=output_dict)
def replacement_fixture_func(**kwargs):
# Function which will replace the decorated function. Run the
# wrapped function with injected `meta` variable. Then return the
# metadata.
partial_wrapped_func(**kwargs)
return output_dict
wrapped_argnames = get_param_names(wrapped)
# pytest checks that all fixture arguments are valid fixtures, so we
# need to purge all references to `meta` parameter
wrapped_argnames.remove("meta")
del kwargs["meta"]
replacement_fixture_func = replace_param_sig(
replacement_fixture_func, wrapped_argnames
)
return replacement_fixture_func(**kwargs)
return wrapper(wrapped) | 0.776029 | 0.339636 |
import unittest
from tahoma.protocol import Protocol
from test_protocol_getSetup import SetupInput
class TestProtocolGetStates(unittest.TestCase):
def test_requestBuilding(self):
tahoma = Protocol("", "", unittest=1 )
tahoma._getSetup(SetupInput) # only for unit tests
light = tahoma.getDevice("hue://1234-1234-1234/123456789012/lights/1")
window = tahoma.getDevice("io://1234-1234-1234/12345644")
request = tahoma._createGetStateRequest( [ light, window ] )
shouldBe = '''[{"deviceURL": "hue://1234-1234-1234/123456789012/lights/1","states": [{"name": "core:CieColorSpaceXState"},{"name": "core:CieColorSpaceYState"},{"name": "core:ColorHueState"},{"name": "core:ColorSaturationState"},{"name": "core:LightIntensityState"},{"name": "core:NameState"},{"name": "core:OnOffState"},{"name": "hue:HueColorModeState"}]},{"deviceURL": "io://1234-1234-1234/12345644","states": [{"name": "core:ClosureState"},{"name": "core:NameState"},{"name": "core:OpenClosedState"},{"name": "core:PriorityLockTimerState"}]}]'''
self.assertEqual(request, shouldBe)
def test_applyChanges(self):
tahoma = Protocol("", "", unittest=1 )
tahoma._getSetup(SetupInput) # only for unit tests
changeData = {
"devices": [{
"label": "Light name",
"deviceURL": "hue://1234-1234-1234/123456789012/lights/1",
"shortcut": False,
"states": [{
"name": "core:ColorHueState",
"type": 1,
"value": 265
}, {
"name": "core:ColorSaturationState",
"type": 1,
"value": 44
}, {
"name": "core:CieColorSpaceXState",
"type": 2,
"value": 0.432
}, {
"name": "core:CieColorSpaceYState",
"type": 2,
"value": 0.2792
}, {
"name": "hue:HueColorModeState",
"type": 3,
"value": "hs"
}, {
"name": "core:NameState",
"type": 3,
"value": "Light name"
}, {
"name": "core:OnOffState",
"type": 3,
"value": "on"
}, {
"name": "core:LightIntensityState",
"type": 1,
"value": 20
}],
"available": False,
"enabled": False,
"type": 1
}, {
"label": "window2",
"deviceURL": "io://1234-1234-1234/12345644",
"shortcut": False,
"states": [{
"name": "core:NameState",
"type": 3,
"value": "window2"
}, {
"name": "core:PriorityLockTimerState",
"type": 1,
"value": 0
}, {
"name": "core:ClosureState",
"type": 1,
"value": 22
}, {
"name": "core:OpenClosedState",
"type": 3,
"value": "open"
}],
"available": False,
"enabled": False,
"type": 1
}]
}
light = tahoma.getDevice("hue://1234-1234-1234/123456789012/lights/1")
window = tahoma.getDevice("io://1234-1234-1234/12345644")
self.assertEqual( light.activeStates["core:CieColorSpaceXState"], 0.3505 )
self.assertEqual( light.activeStates["core:LightIntensityState"], 0 )
self.assertEqual( light.activeStates["core:OnOffState"], "off" )
self.assertEqual( window.activeStates["core:ClosureState"], 100 )
self.assertEqual( window.activeStates["core:OpenClosedState"], "closed" )
tahoma._getStates(changeData) # only for unit tests
self.assertEqual( light.activeStates["core:CieColorSpaceXState"], 0.432 )
self.assertEqual( light.activeStates["core:LightIntensityState"], 20 )
self.assertEqual( light.activeStates["core:OnOffState"], "on" )
self.assertEqual( window.activeStates["core:ClosureState"], 22 )
self.assertEqual( window.activeStates["core:OpenClosedState"], "open" ) | tests/test_protocol_getStates.py | import unittest
from tahoma.protocol import Protocol
from test_protocol_getSetup import SetupInput
class TestProtocolGetStates(unittest.TestCase):
def test_requestBuilding(self):
tahoma = Protocol("", "", unittest=1 )
tahoma._getSetup(SetupInput) # only for unit tests
light = tahoma.getDevice("hue://1234-1234-1234/123456789012/lights/1")
window = tahoma.getDevice("io://1234-1234-1234/12345644")
request = tahoma._createGetStateRequest( [ light, window ] )
shouldBe = '''[{"deviceURL": "hue://1234-1234-1234/123456789012/lights/1","states": [{"name": "core:CieColorSpaceXState"},{"name": "core:CieColorSpaceYState"},{"name": "core:ColorHueState"},{"name": "core:ColorSaturationState"},{"name": "core:LightIntensityState"},{"name": "core:NameState"},{"name": "core:OnOffState"},{"name": "hue:HueColorModeState"}]},{"deviceURL": "io://1234-1234-1234/12345644","states": [{"name": "core:ClosureState"},{"name": "core:NameState"},{"name": "core:OpenClosedState"},{"name": "core:PriorityLockTimerState"}]}]'''
self.assertEqual(request, shouldBe)
def test_applyChanges(self):
tahoma = Protocol("", "", unittest=1 )
tahoma._getSetup(SetupInput) # only for unit tests
changeData = {
"devices": [{
"label": "Light name",
"deviceURL": "hue://1234-1234-1234/123456789012/lights/1",
"shortcut": False,
"states": [{
"name": "core:ColorHueState",
"type": 1,
"value": 265
}, {
"name": "core:ColorSaturationState",
"type": 1,
"value": 44
}, {
"name": "core:CieColorSpaceXState",
"type": 2,
"value": 0.432
}, {
"name": "core:CieColorSpaceYState",
"type": 2,
"value": 0.2792
}, {
"name": "hue:HueColorModeState",
"type": 3,
"value": "hs"
}, {
"name": "core:NameState",
"type": 3,
"value": "Light name"
}, {
"name": "core:OnOffState",
"type": 3,
"value": "on"
}, {
"name": "core:LightIntensityState",
"type": 1,
"value": 20
}],
"available": False,
"enabled": False,
"type": 1
}, {
"label": "window2",
"deviceURL": "io://1234-1234-1234/12345644",
"shortcut": False,
"states": [{
"name": "core:NameState",
"type": 3,
"value": "window2"
}, {
"name": "core:PriorityLockTimerState",
"type": 1,
"value": 0
}, {
"name": "core:ClosureState",
"type": 1,
"value": 22
}, {
"name": "core:OpenClosedState",
"type": 3,
"value": "open"
}],
"available": False,
"enabled": False,
"type": 1
}]
}
light = tahoma.getDevice("hue://1234-1234-1234/123456789012/lights/1")
window = tahoma.getDevice("io://1234-1234-1234/12345644")
self.assertEqual( light.activeStates["core:CieColorSpaceXState"], 0.3505 )
self.assertEqual( light.activeStates["core:LightIntensityState"], 0 )
self.assertEqual( light.activeStates["core:OnOffState"], "off" )
self.assertEqual( window.activeStates["core:ClosureState"], 100 )
self.assertEqual( window.activeStates["core:OpenClosedState"], "closed" )
tahoma._getStates(changeData) # only for unit tests
self.assertEqual( light.activeStates["core:CieColorSpaceXState"], 0.432 )
self.assertEqual( light.activeStates["core:LightIntensityState"], 20 )
self.assertEqual( light.activeStates["core:OnOffState"], "on" )
self.assertEqual( window.activeStates["core:ClosureState"], 22 )
self.assertEqual( window.activeStates["core:OpenClosedState"], "open" ) | 0.521227 | 0.434881 |
import random
import numpy as np
import torch
from torchvision import transforms as T
from torchvision.transforms import functional as F
def pad_if_smaller(img, size, fill=0):
size = (size, size) if isinstance(size, int) else size
h, w = size
min_size = min(img.size)
ow, oh = img.size
if ow < w or oh < h:
padh = h - oh if oh < h else 0
padw = w - ow if ow < w else 0
img = F.pad(img, (0, 0, padw, padh), fill=fill)
return img
class Compose:
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
class RandomResize:
def __init__(self, min_size, max_size=None):
self.min_size = (min_size, min_size) if isinstance(min_size, int) else min_size
if max_size is None:
max_size = min_size
self.max_size = (max_size, max_size) if isinstance(max_size, int) else max_size
def __call__(self, image, target):
h = random.randint(self.min_size[0], self.max_size[1])
w = random.randint(self.min_size[0], self.max_size[1])
size = (h, w)
image = F.resize(image, size)
target = F.resize(target, size, interpolation=T.InterpolationMode.NEAREST)
return image, target
class RandomHorizontalFlip:
def __init__(self, flip_prob):
self.flip_prob = flip_prob
def __call__(self, image, target):
if random.random() < self.flip_prob:
image = F.hflip(image)
target = F.hflip(target)
return image, target
class RandomCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = pad_if_smaller(image, self.size)
target = pad_if_smaller(target, self.size, fill=255)
crop_params = T.RandomCrop.get_params(image, self.size)
image = F.crop(image, *crop_params)
target = F.crop(target, *crop_params)
return image, target
class CenterCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = F.center_crop(image, self.size)
target = F.center_crop(target, self.size)
return image, target
class PILToTensor:
def __call__(self, image, target):
image = F.pil_to_tensor(image)
target = torch.as_tensor(np.array(target), dtype=torch.int64)
return image, target
class ConvertImageDtype:
def __init__(self, dtype):
self.dtype = dtype
def __call__(self, image, target):
image = F.convert_image_dtype(image, self.dtype)
return image, target
class Normalize:
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target):
image = F.normalize(image, mean=self.mean, std=self.std)
return image, target
if __name__ == "__main__":
from PIL import Image
import matplotlib.pyplot as plt
root = "G:/Codes/RealTime-Segementation/datasets/VOC2012"
img_path = f"{root}/JPEGImages/2007_000033.jpg"
label_path = f"{root}/SegmentationObject/2007_000033.png"
img = Image.open(img_path)
label = Image.open(label_path).convert('RGB')
plt.subplot(221), plt.title("Ori Image"), plt.imshow(np.asarray(img))
plt.subplot(222), plt.title("Ori Label"), plt.imshow(np.asarray(label))
transforms = Compose([
RandomResize((256, 256)),
PILToTensor()
])
img, label = transforms(img, label)
print(img.shape)
print(label.shape)
plt.subplot(223), plt.title("Ori Image"), plt.imshow(np.asarray(img[0]))
plt.subplot(224), plt.title("Ori Label"), plt.imshow(np.asarray(label))
plt.show() | seg_transforms/transforms.py | import random
import numpy as np
import torch
from torchvision import transforms as T
from torchvision.transforms import functional as F
def pad_if_smaller(img, size, fill=0):
size = (size, size) if isinstance(size, int) else size
h, w = size
min_size = min(img.size)
ow, oh = img.size
if ow < w or oh < h:
padh = h - oh if oh < h else 0
padw = w - ow if ow < w else 0
img = F.pad(img, (0, 0, padw, padh), fill=fill)
return img
class Compose:
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target):
for t in self.transforms:
image, target = t(image, target)
return image, target
class RandomResize:
def __init__(self, min_size, max_size=None):
self.min_size = (min_size, min_size) if isinstance(min_size, int) else min_size
if max_size is None:
max_size = min_size
self.max_size = (max_size, max_size) if isinstance(max_size, int) else max_size
def __call__(self, image, target):
h = random.randint(self.min_size[0], self.max_size[1])
w = random.randint(self.min_size[0], self.max_size[1])
size = (h, w)
image = F.resize(image, size)
target = F.resize(target, size, interpolation=T.InterpolationMode.NEAREST)
return image, target
class RandomHorizontalFlip:
def __init__(self, flip_prob):
self.flip_prob = flip_prob
def __call__(self, image, target):
if random.random() < self.flip_prob:
image = F.hflip(image)
target = F.hflip(target)
return image, target
class RandomCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = pad_if_smaller(image, self.size)
target = pad_if_smaller(target, self.size, fill=255)
crop_params = T.RandomCrop.get_params(image, self.size)
image = F.crop(image, *crop_params)
target = F.crop(target, *crop_params)
return image, target
class CenterCrop:
def __init__(self, size):
self.size = (size, size) if isinstance(size, int) else size
def __call__(self, image, target):
image = F.center_crop(image, self.size)
target = F.center_crop(target, self.size)
return image, target
class PILToTensor:
def __call__(self, image, target):
image = F.pil_to_tensor(image)
target = torch.as_tensor(np.array(target), dtype=torch.int64)
return image, target
class ConvertImageDtype:
def __init__(self, dtype):
self.dtype = dtype
def __call__(self, image, target):
image = F.convert_image_dtype(image, self.dtype)
return image, target
class Normalize:
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target):
image = F.normalize(image, mean=self.mean, std=self.std)
return image, target
if __name__ == "__main__":
from PIL import Image
import matplotlib.pyplot as plt
root = "G:/Codes/RealTime-Segementation/datasets/VOC2012"
img_path = f"{root}/JPEGImages/2007_000033.jpg"
label_path = f"{root}/SegmentationObject/2007_000033.png"
img = Image.open(img_path)
label = Image.open(label_path).convert('RGB')
plt.subplot(221), plt.title("Ori Image"), plt.imshow(np.asarray(img))
plt.subplot(222), plt.title("Ori Label"), plt.imshow(np.asarray(label))
transforms = Compose([
RandomResize((256, 256)),
PILToTensor()
])
img, label = transforms(img, label)
print(img.shape)
print(label.shape)
plt.subplot(223), plt.title("Ori Image"), plt.imshow(np.asarray(img[0]))
plt.subplot(224), plt.title("Ori Label"), plt.imshow(np.asarray(label))
plt.show() | 0.742982 | 0.464294 |
import numpy as np
import cv2
import sys
import open3d as o3d
from pylibfreenect2 import Freenect2, SyncMultiFrameListener
from pylibfreenect2 import FrameType, Registration, Frame
pic_counter = 0
try:
from pylibfreenect2 import OpenGLPacketPipeline
pipeline = OpenGLPacketPipeline()
except:
try:
from pylibfreenect2 import OpenCLPacketPipeline
pipeline = OpenCLPacketPipeline()
except:
from pylibfreenect2 import CpuPacketPipeline
pipeline = CpuPacketPipeline()
print("Packet pipeline:", type(pipeline).__name__)
fn2 = Freenect2()
# Test if Kinect is connected
if fn2.enumerateDevices() == 0:
print("No device connected!")
sys.exit(1)
# Setup device
serial = fn2.getDeviceSerialNumber(0)
device = fn2.openDevice(serial, pipeline=pipeline)
# Setup listener
listener = SyncMultiFrameListener(
FrameType.Color | FrameType.Ir | FrameType.Depth)
# Register listeners
device.setColorFrameListener(listener)
device.setIrAndDepthFrameListener(listener)
# Start the device
device.start()
# NOTE: must be called after device.start()
registration = Registration(device.getIrCameraParams(),
device.getColorCameraParams())
undistorted = Frame(512, 424, 4)
registered = Frame(512, 424, 4)
def depth2xyz():
global pic_counter
points = []
for v in range(undistorted.height):
for u in range(undistorted.width):
X, Y, Z = registration.getPointXYZ(undistorted, v, u)
# Discard all nan values
if X + Y + Z > 0:
points.append([X, Y, Z])
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(points)
o3d.io.write_point_cloud('XYZ_{}.pcd'.format(pic_counter), pcd)
pic_counter += 1
while True:
frames = listener.waitForNewFrame()
color = frames["color"]
ir = frames["ir"]
depth = frames["depth"]
registration.apply(color, depth, undistorted, registered)
depth_rect = cv2.rectangle(registered.asarray(dtype=np.uint8), (206, 162), (306, 262), (254, 0, 0), 1)
cv2.imshow("color+depth", depth_rect)
listener.release(frames)
key = cv2.waitKey(delay=1)
if key == ord('\r'):
depth2xyz()
elif key == ord('q'):
break
device.stop()
device.close()
sys.exit(0) | 1.DepthFromKinect/Python/KinectV2Depth.py |
import numpy as np
import cv2
import sys
import open3d as o3d
from pylibfreenect2 import Freenect2, SyncMultiFrameListener
from pylibfreenect2 import FrameType, Registration, Frame
pic_counter = 0
try:
from pylibfreenect2 import OpenGLPacketPipeline
pipeline = OpenGLPacketPipeline()
except:
try:
from pylibfreenect2 import OpenCLPacketPipeline
pipeline = OpenCLPacketPipeline()
except:
from pylibfreenect2 import CpuPacketPipeline
pipeline = CpuPacketPipeline()
print("Packet pipeline:", type(pipeline).__name__)
fn2 = Freenect2()
# Test if Kinect is connected
if fn2.enumerateDevices() == 0:
print("No device connected!")
sys.exit(1)
# Setup device
serial = fn2.getDeviceSerialNumber(0)
device = fn2.openDevice(serial, pipeline=pipeline)
# Setup listener
listener = SyncMultiFrameListener(
FrameType.Color | FrameType.Ir | FrameType.Depth)
# Register listeners
device.setColorFrameListener(listener)
device.setIrAndDepthFrameListener(listener)
# Start the device
device.start()
# NOTE: must be called after device.start()
registration = Registration(device.getIrCameraParams(),
device.getColorCameraParams())
undistorted = Frame(512, 424, 4)
registered = Frame(512, 424, 4)
def depth2xyz():
global pic_counter
points = []
for v in range(undistorted.height):
for u in range(undistorted.width):
X, Y, Z = registration.getPointXYZ(undistorted, v, u)
# Discard all nan values
if X + Y + Z > 0:
points.append([X, Y, Z])
pcd = o3d.geometry.PointCloud()
pcd.points = o3d.utility.Vector3dVector(points)
o3d.io.write_point_cloud('XYZ_{}.pcd'.format(pic_counter), pcd)
pic_counter += 1
while True:
frames = listener.waitForNewFrame()
color = frames["color"]
ir = frames["ir"]
depth = frames["depth"]
registration.apply(color, depth, undistorted, registered)
depth_rect = cv2.rectangle(registered.asarray(dtype=np.uint8), (206, 162), (306, 262), (254, 0, 0), 1)
cv2.imshow("color+depth", depth_rect)
listener.release(frames)
key = cv2.waitKey(delay=1)
if key == ord('\r'):
depth2xyz()
elif key == ord('q'):
break
device.stop()
device.close()
sys.exit(0) | 0.297266 | 0.206494 |
import re
from random import choice
import requests
class Wiwj(object):
def __init__(self):
"""
75163套 按每页30个 共有2506页
"""
self.start_url = 'https://sh.5i5j.com/ershoufang/'
# self.proxies = {"http": "http://localhost:1080", "https": "http://localhost:1080", }
def jpg_tool(self, text):
house_jpg = ''
if 'src' in text:
house_jpg = re.findall(r'src="(.*?)".*?', text, re.S)[0]
return house_jpg
def div_tool(self, text):
text = re.sub('<.*?>| i |<!--|-->', ' ', text)
return text
@staticmethod
def getadsl(res):
""" 随机取ip """
proxies = {"http": "http://" + choice(res['data']), }
# print(proxies)
return proxies
def gethouselist(self):
s = requests.session()
res = s.get('http://', headers={}).json()
r = s.get(self.start_url, proxies=self.getadsl(res))
print(r.text)
# 二手房url,封面图片(如果有src在内则有图片),标题,第一行,第二行,第三行,总价,单价,标签
basic_info_list = re.findall(r'<div class="listImg".*?><a href="(.*?)" target="_blank">.*?<img class='
r'"lazy" (.*?)title="(.*?)".*?>.*?<!-- <p>.*?</p> -->.*?<i class="i_01">'
r'</i>(.*?)</p>.*?<i class="i_02"></i>(.*?)</p>.*?<i class="i_03"></i>(.*?)</p>'
r'.*?<p class="redC">(.*?)</p>.*?<p>.*?(\d+).*?</p>.*?<div class="listTag">(.*?)<'
r'/div>', r.text, re.S)
if basic_info_list:
for basic_info in basic_info_list:
# print(basic_info)
house_url = 'https://sh.5i5j.com' + basic_info[0]
house_jpg = self.jpg_tool(basic_info[1])
house_title = basic_info[2]
first_line = basic_info[3].split(" · ")
# house_type = first_line[0]
# house_m2 = first_line[1]
# house_direction = first_line[2]
second_line = basic_info[4].split(" · ")
for i in range(0, len(second_line)):
second_line[i] = self.div_tool(second_line[i])
third_line = basic_info[5].split(" · ")
house_price = self.div_tool(basic_info[6])
house_m2_price = basic_info[7]
house_tag = self.div_tool(basic_info[8])
# print(second_line)
print(house_url, house_jpg, house_title, first_line, second_line, third_line, house_price, house_m2_price, house_tag)
if __name__ == '__main__':
wiwj = Wiwj()
wiwj.gethouselist() | 5i5j/wiwj.py |
import re
from random import choice
import requests
class Wiwj(object):
def __init__(self):
"""
75163套 按每页30个 共有2506页
"""
self.start_url = 'https://sh.5i5j.com/ershoufang/'
# self.proxies = {"http": "http://localhost:1080", "https": "http://localhost:1080", }
def jpg_tool(self, text):
house_jpg = ''
if 'src' in text:
house_jpg = re.findall(r'src="(.*?)".*?', text, re.S)[0]
return house_jpg
def div_tool(self, text):
text = re.sub('<.*?>| i |<!--|-->', ' ', text)
return text
@staticmethod
def getadsl(res):
""" 随机取ip """
proxies = {"http": "http://" + choice(res['data']), }
# print(proxies)
return proxies
def gethouselist(self):
s = requests.session()
res = s.get('http://', headers={}).json()
r = s.get(self.start_url, proxies=self.getadsl(res))
print(r.text)
# 二手房url,封面图片(如果有src在内则有图片),标题,第一行,第二行,第三行,总价,单价,标签
basic_info_list = re.findall(r'<div class="listImg".*?><a href="(.*?)" target="_blank">.*?<img class='
r'"lazy" (.*?)title="(.*?)".*?>.*?<!-- <p>.*?</p> -->.*?<i class="i_01">'
r'</i>(.*?)</p>.*?<i class="i_02"></i>(.*?)</p>.*?<i class="i_03"></i>(.*?)</p>'
r'.*?<p class="redC">(.*?)</p>.*?<p>.*?(\d+).*?</p>.*?<div class="listTag">(.*?)<'
r'/div>', r.text, re.S)
if basic_info_list:
for basic_info in basic_info_list:
# print(basic_info)
house_url = 'https://sh.5i5j.com' + basic_info[0]
house_jpg = self.jpg_tool(basic_info[1])
house_title = basic_info[2]
first_line = basic_info[3].split(" · ")
# house_type = first_line[0]
# house_m2 = first_line[1]
# house_direction = first_line[2]
second_line = basic_info[4].split(" · ")
for i in range(0, len(second_line)):
second_line[i] = self.div_tool(second_line[i])
third_line = basic_info[5].split(" · ")
house_price = self.div_tool(basic_info[6])
house_m2_price = basic_info[7]
house_tag = self.div_tool(basic_info[8])
# print(second_line)
print(house_url, house_jpg, house_title, first_line, second_line, third_line, house_price, house_m2_price, house_tag)
if __name__ == '__main__':
wiwj = Wiwj()
wiwj.gethouselist() | 0.05647 | 0.133443 |
import os
import sys
sys.path.insert(0, os.path.join(os.getcwd(), "asm"))
import clr
clr.AddReference("WatiN.Core")
from WatiN.Core import *
from System import *
from System.Threading import Thread, ApartmentState, ThreadStart
from math import floor
def ThreadMain():
try:
ie1 = IE.AttachTo[IE](Find.ByTitle("Gestione Giornaliera"))
ie2 = IE.AttachTo[IE](Find.ByTitle("Giustificativo Singolo"))
from datetime import datetime
fom = DateTime(int(ie1.SelectList(Find.ByName("cmbAnni")).SelectedOption.Value), int(ie1.SelectList(Find.ByName("cmbMesi")).SelectedOption.Value), 1)
eom = fom.AddMonths(1).AddDays(-1)
day1 = int(raw_input("Start=") or fom.Day)
day2 = int(raw_input("End=") or eom.Day)
print "%i - %i of %s" % (day1, day2, fom.ToString("MMM yyyy"))
for day in range(day1, day2+1):
ie1.Link(Find.ByText(str(day))).Click()
Thread.Sleep(1000)
f = ie1.Frame(Find.ByName("prestazioni"))
print f.Tables
print f.Tables.Count
print f.Tables.
t = f.Tables[0].Tables[0]
for r in t.TableRows:
if r.TableCells.Count >= 7:
if r.TableCells[4].Text == "Presenza Oltre Monte Ore Teorico":
inizio = r.TableCells[5].Text.strip()
fine = r.TableCells[6].Text.strip()
date = DateTime(fom.Year, fom.Month, day)
ie2.TextField(Find.ByName("cmbCodCausale")).Value = '001' # straordinari
#ie2.TextField(Find.ByName("cmbCodCausale")).Value = '008' # banca ore
ie2.TextField(Find.ByName("datai")).Value = date.ToString("dd/MM/yyyy")
ie2.TextField(Find.ByName("dataf")).Value = date.ToString("dd/MM/yyyy")
iHH, iMM = inizio.split(":")
fHH, fMM = fine.split(":")
iDT = DateTime(date.Year, date.Month, day, int(iHH), int(iMM), 0)
fDT = DateTime(date.Year, date.Month, day, int(fHH), int(fMM), 0)
ts = fDT - iDT
# print "%i\t%s\t%s - " % (day, inizio, fine)
mins = int(ts.TotalMinutes)
rmins = floor(mins/30)*30
if rmins < 60:
rmins = 0
print "%s - %s (%s mins rounded to %s - lost %s)" % (iDT, fDT, mins, rmins, mins - rmins)
fDT = iDT.AddMinutes(rmins)
fHH = "%02i" % fDT.Hour
fMM = "%02i" % fDT.Minute
if rmins > 0:
try:
ie2.SelectList(Find.ByName("cmbOraInizioPezzaHH")).Option(Find.ByValue(iHH)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraInizioPezzaMM")).Option(Find.ByValue(iMM)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraFinePezzaHH")).Option(Find.ByValue(fHH)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraFinePezzaMM")).Option(Find.ByValue(fMM)).Select()
except:
pass
ie2.Button(Find.ById("buttonInserisci")).Click()
#Console.ReadLine()
Thread.Sleep(1000)
except:
import traceback
traceback.print_exc()
t = Thread(ThreadStart(ThreadMain))
t.SetApartmentState(ApartmentState.STA)
t.Start()
t.Join() | gerip.py | import os
import sys
sys.path.insert(0, os.path.join(os.getcwd(), "asm"))
import clr
clr.AddReference("WatiN.Core")
from WatiN.Core import *
from System import *
from System.Threading import Thread, ApartmentState, ThreadStart
from math import floor
def ThreadMain():
try:
ie1 = IE.AttachTo[IE](Find.ByTitle("Gestione Giornaliera"))
ie2 = IE.AttachTo[IE](Find.ByTitle("Giustificativo Singolo"))
from datetime import datetime
fom = DateTime(int(ie1.SelectList(Find.ByName("cmbAnni")).SelectedOption.Value), int(ie1.SelectList(Find.ByName("cmbMesi")).SelectedOption.Value), 1)
eom = fom.AddMonths(1).AddDays(-1)
day1 = int(raw_input("Start=") or fom.Day)
day2 = int(raw_input("End=") or eom.Day)
print "%i - %i of %s" % (day1, day2, fom.ToString("MMM yyyy"))
for day in range(day1, day2+1):
ie1.Link(Find.ByText(str(day))).Click()
Thread.Sleep(1000)
f = ie1.Frame(Find.ByName("prestazioni"))
print f.Tables
print f.Tables.Count
print f.Tables.
t = f.Tables[0].Tables[0]
for r in t.TableRows:
if r.TableCells.Count >= 7:
if r.TableCells[4].Text == "Presenza Oltre Monte Ore Teorico":
inizio = r.TableCells[5].Text.strip()
fine = r.TableCells[6].Text.strip()
date = DateTime(fom.Year, fom.Month, day)
ie2.TextField(Find.ByName("cmbCodCausale")).Value = '001' # straordinari
#ie2.TextField(Find.ByName("cmbCodCausale")).Value = '008' # banca ore
ie2.TextField(Find.ByName("datai")).Value = date.ToString("dd/MM/yyyy")
ie2.TextField(Find.ByName("dataf")).Value = date.ToString("dd/MM/yyyy")
iHH, iMM = inizio.split(":")
fHH, fMM = fine.split(":")
iDT = DateTime(date.Year, date.Month, day, int(iHH), int(iMM), 0)
fDT = DateTime(date.Year, date.Month, day, int(fHH), int(fMM), 0)
ts = fDT - iDT
# print "%i\t%s\t%s - " % (day, inizio, fine)
mins = int(ts.TotalMinutes)
rmins = floor(mins/30)*30
if rmins < 60:
rmins = 0
print "%s - %s (%s mins rounded to %s - lost %s)" % (iDT, fDT, mins, rmins, mins - rmins)
fDT = iDT.AddMinutes(rmins)
fHH = "%02i" % fDT.Hour
fMM = "%02i" % fDT.Minute
if rmins > 0:
try:
ie2.SelectList(Find.ByName("cmbOraInizioPezzaHH")).Option(Find.ByValue(iHH)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraInizioPezzaMM")).Option(Find.ByValue(iMM)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraFinePezzaHH")).Option(Find.ByValue(fHH)).Select()
except:
pass
try:
ie2.SelectList(Find.ByName("cmbOraFinePezzaMM")).Option(Find.ByValue(fMM)).Select()
except:
pass
ie2.Button(Find.ById("buttonInserisci")).Click()
#Console.ReadLine()
Thread.Sleep(1000)
except:
import traceback
traceback.print_exc()
t = Thread(ThreadStart(ThreadMain))
t.SetApartmentState(ApartmentState.STA)
t.Start()
t.Join() | 0.043834 | 0.169509 |
# TODO: Warning - "/usr/local/lib/python3.8/dist-packages/gpiozero/output_devices.py:1532: PWMSoftwareFallback: To reduce servo jitter, use the pigpio pin factory.See https://gpiozero.readthedocs.io/en/stable/api_output.html#servo for more info"
# warnings.warn(PWMSoftwareFallback()
# $ export GPIOZERO_PIN_FACTORY=nat
# Device.pin_factory = NativeFactory()
# TODO: Get rid of time.sleep(), due to ROS2... with something more real time/concurrent and ROS2 friendly way.
# Include the ROS2 stuff...
import rclpy
from rclpy.node import Node
from rclpy.parameter import Parameter
from rcl_interfaces.msg import ParameterDescriptor
from std_msgs.msg import String
# Importe the Raspberry Pi I/O stuff
from gpiozero.pins.native import NativeFactory
from gpiozero import Servo
from gpiozero import LED
from gpiozero import Device
# Include Linux/Ubuntu stuff...
import sys
import signal
from time import *
class LightBeaconNode(Node):
def __init__(self):
super().__init__("light_beacon_node")
# Set default pin for ligh_beacon +3.3V-power. Accessed via ROS Parameters...
self.declare_parameter( 'gpio_pin_power', 23, ParameterDescriptor(description='GPIO-pin for beacon +3.3V-power [default <23>]') )
self.GPIO_PIN_POWER = self.get_parameter('gpio_pin_power').value
# Set default pin for ligh_beacon input/signal to toggle state. Accessed via ROS Parameters...
self.declare_parameter( 'gpio_pin_signal', 24, ParameterDescriptor(description='GPIO-pin for input/signal to toggle state [default <24>]') )
self.GPIO_PIN_SIGNAL = self.get_parameter('gpio_pin_signal').value
# Set default pin for ligh_beacon input/signal to toggle state. Accessed via ROS Parameters...
self.declare_parameter( 'beacon_topic', "beacon_mode", ParameterDescriptor(description='ROS2 topic to control beacon [default "beacon_mode"]') )
self.BEACON_TOPIC = self.get_parameter('beacon_topic').get_parameter_value().string_value
# .... something..else :-)
self.get_logger().info("- self.BEACON_TOPIC: '" + self.BEACON_TOPIC + "'" )
self.new_mode = self.create_subscription(String, self.BEACON_TOPIC, self.light_beacon_mode_callback, 10)
self.new_mode # prevent unused variable warning
exit = False
# Check we can open/contact the GPIO-pins for the Light Beacon
try:
#.... something
self.led = LED( self.GPIO_PIN_POWER )
sleep(0.1) # Wait for the light beacon to power up...
self.led.on() # Bring the light ON!
self.get_logger().info("light_beacon_node initiating")
self.beacon_signal = Servo( self.GPIO_PIN_SIGNAL )
self.beacon_signal.min()
# Set startmode for the PWM-controlled light-beacon
self.beacon_current_mode = "RotatingFast".lower()
self.beacon_new_mode = "LEDoff".lower()
self.beacon_current_mode = self.light_beacon_shiftMode()
self.get_logger().info("light_beacon_node has started")
self.get_logger().info("- beacon_power GPIO-pin: " + str( self.GPIO_PIN_POWER ) )
self.get_logger().info("- beacon_signal GPIO-pin: " + str( self.GPIO_PIN_SIGNAL ) )
self.get_logger().info("- beacon_mode ROS Topic: " + "beacon_mode" )
except:
# Note: a permission or operatings system error... ;-(
self.get_logger().error("light_beacon_node canceled:" + str(sys.exc_info()[1]) )
self.exit = True
# Callback when new value set for topic.
# Trigger: When BEACON_TOPIC is changed...
# Input: ROS2 topic msg.data
# Output: beacon_new_mode
def light_beacon_mode_callback(self, msg):
self.beacon_new_mode = msg.data # Save the nes ROS2 topic data as a String.
self.beacon_new_mode = self.beacon_new_mode.lower() # Convert to lower case
self.get_logger().info("+------light_beacon_mode_callback(self, msg):-------" )
self.light_beacon_shiftMode()
self.beacon_current_mode = self.beacon_new_mode
# Shift/Toogle truth the light-beacon-squence.'
# Input 1: self.beacon_current_mode
# Input 2: self.beacon_new_mode
# Output: The number of PWM-cycle-steps for the beacon
def light_beacon_shiftMode(self):
self.get_logger().info("+------light_beacon_shiftMode(self):----------------" )
self.get_logger().info("| Current:'" + str(self.beacon_current_mode) + "'" )
self.get_logger().info("| New: '" + str(self.beacon_new_mode) + "'" )
if ( self.beacon_new_mode == "RotatingFast".lower() ):
new = 1
elif ( self.beacon_new_mode == "RotatingSlow".lower() ):
new = 2
elif ( self.beacon_new_mode == "Flashing".lower() ):
new = 3
elif ( self.beacon_new_mode == "Strobing".lower() ):
new = 4
elif ( self.beacon_new_mode == "LEDoff".lower() ):
new = 5
elif ( self.beacon_new_mode == "Reset".lower() ):
# Reset = By power Off, then On again
new = 5
self.beacon_new_mode = "LEDoff".lower()
self.beacon_current_mode = "RotatingFast".lower()
self.led.off()
sleep(0.1)
self.led.on()
sleep(0.2)
else:
new = 0
self.get_logger().warning("| Input ERROR in Light Beacon node |")
if (self.beacon_current_mode == "RotatingFast".lower() ):
current = 1
elif (self.beacon_current_mode == "RotatingSlow".lower() ):
current = 2
elif (self.beacon_current_mode == "Flashing".lower() ):
current = 3
elif (self.beacon_current_mode == "Strobing".lower() ):
current = 4
elif (self.beacon_current_mode == "LEDoff".lower() ):
current = 5
else:
current = 0
self.get_logger().warning("| Input ERROR in Light Beacon node |")
self.get_logger().info("| We have a vote... Current=" + str(current) + " New=" + str(new) + " SUM=" + str(new - current) + " cycles to go.")
if ( new < current ):
self.get_logger().info("| New < Current (+5)")
self._cycles( new - current +5 )
else:
self.get_logger().info("| New => Current")
self._cycles( new - current)
# Performe the requested number of PWM-cycle-steps for the beacon
def _cycles(self, i ):
for x in range(0, i):
self._cycle_once()
# Performe one PWM-cycle-step for the beacon
def _cycle_once(self):
self.get_logger().warning("| CYCLES |")
self.beacon_signal.min()
sleep(0.1)
self.beacon_signal.max()
sleep(0.1)
self.beacon_signal.min()
sleep(0.1)
self.beacon_signal.detach()
def main(args=None):
rclpy.init(args=args)
node = LightBeaconNode()
#The try statement is used to detect errors in the try block.
#The except statement catches the exception information and processes it.
#The finally statement always run. Regardless reson to end the script.
try:
rclpy.spin(node)
except KeyboardInterrupt:
print("light_beacon_node **** 💀 Ctrl-C detected...")
finally:
#
print("light_beacon_node **** 🪦 Ending... ")
print( str(sys.exc_info()[1]) )
# Time to clean up stuff!
# - Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
node.destroy_node()
rclpy.shutdown()
# self.beacon_signal.cleanup()
# self.led.cleanup()
if __name__ == "__main__":
main() | pet_ros2_lightbeacon_pkg/pet_light_beacon_node.py |
# TODO: Warning - "/usr/local/lib/python3.8/dist-packages/gpiozero/output_devices.py:1532: PWMSoftwareFallback: To reduce servo jitter, use the pigpio pin factory.See https://gpiozero.readthedocs.io/en/stable/api_output.html#servo for more info"
# warnings.warn(PWMSoftwareFallback()
# $ export GPIOZERO_PIN_FACTORY=nat
# Device.pin_factory = NativeFactory()
# TODO: Get rid of time.sleep(), due to ROS2... with something more real time/concurrent and ROS2 friendly way.
# Include the ROS2 stuff...
import rclpy
from rclpy.node import Node
from rclpy.parameter import Parameter
from rcl_interfaces.msg import ParameterDescriptor
from std_msgs.msg import String
# Importe the Raspberry Pi I/O stuff
from gpiozero.pins.native import NativeFactory
from gpiozero import Servo
from gpiozero import LED
from gpiozero import Device
# Include Linux/Ubuntu stuff...
import sys
import signal
from time import *
class LightBeaconNode(Node):
def __init__(self):
super().__init__("light_beacon_node")
# Set default pin for ligh_beacon +3.3V-power. Accessed via ROS Parameters...
self.declare_parameter( 'gpio_pin_power', 23, ParameterDescriptor(description='GPIO-pin for beacon +3.3V-power [default <23>]') )
self.GPIO_PIN_POWER = self.get_parameter('gpio_pin_power').value
# Set default pin for ligh_beacon input/signal to toggle state. Accessed via ROS Parameters...
self.declare_parameter( 'gpio_pin_signal', 24, ParameterDescriptor(description='GPIO-pin for input/signal to toggle state [default <24>]') )
self.GPIO_PIN_SIGNAL = self.get_parameter('gpio_pin_signal').value
# Set default pin for ligh_beacon input/signal to toggle state. Accessed via ROS Parameters...
self.declare_parameter( 'beacon_topic', "beacon_mode", ParameterDescriptor(description='ROS2 topic to control beacon [default "beacon_mode"]') )
self.BEACON_TOPIC = self.get_parameter('beacon_topic').get_parameter_value().string_value
# .... something..else :-)
self.get_logger().info("- self.BEACON_TOPIC: '" + self.BEACON_TOPIC + "'" )
self.new_mode = self.create_subscription(String, self.BEACON_TOPIC, self.light_beacon_mode_callback, 10)
self.new_mode # prevent unused variable warning
exit = False
# Check we can open/contact the GPIO-pins for the Light Beacon
try:
#.... something
self.led = LED( self.GPIO_PIN_POWER )
sleep(0.1) # Wait for the light beacon to power up...
self.led.on() # Bring the light ON!
self.get_logger().info("light_beacon_node initiating")
self.beacon_signal = Servo( self.GPIO_PIN_SIGNAL )
self.beacon_signal.min()
# Set startmode for the PWM-controlled light-beacon
self.beacon_current_mode = "RotatingFast".lower()
self.beacon_new_mode = "LEDoff".lower()
self.beacon_current_mode = self.light_beacon_shiftMode()
self.get_logger().info("light_beacon_node has started")
self.get_logger().info("- beacon_power GPIO-pin: " + str( self.GPIO_PIN_POWER ) )
self.get_logger().info("- beacon_signal GPIO-pin: " + str( self.GPIO_PIN_SIGNAL ) )
self.get_logger().info("- beacon_mode ROS Topic: " + "beacon_mode" )
except:
# Note: a permission or operatings system error... ;-(
self.get_logger().error("light_beacon_node canceled:" + str(sys.exc_info()[1]) )
self.exit = True
# Callback when new value set for topic.
# Trigger: When BEACON_TOPIC is changed...
# Input: ROS2 topic msg.data
# Output: beacon_new_mode
def light_beacon_mode_callback(self, msg):
self.beacon_new_mode = msg.data # Save the nes ROS2 topic data as a String.
self.beacon_new_mode = self.beacon_new_mode.lower() # Convert to lower case
self.get_logger().info("+------light_beacon_mode_callback(self, msg):-------" )
self.light_beacon_shiftMode()
self.beacon_current_mode = self.beacon_new_mode
# Shift/Toogle truth the light-beacon-squence.'
# Input 1: self.beacon_current_mode
# Input 2: self.beacon_new_mode
# Output: The number of PWM-cycle-steps for the beacon
def light_beacon_shiftMode(self):
self.get_logger().info("+------light_beacon_shiftMode(self):----------------" )
self.get_logger().info("| Current:'" + str(self.beacon_current_mode) + "'" )
self.get_logger().info("| New: '" + str(self.beacon_new_mode) + "'" )
if ( self.beacon_new_mode == "RotatingFast".lower() ):
new = 1
elif ( self.beacon_new_mode == "RotatingSlow".lower() ):
new = 2
elif ( self.beacon_new_mode == "Flashing".lower() ):
new = 3
elif ( self.beacon_new_mode == "Strobing".lower() ):
new = 4
elif ( self.beacon_new_mode == "LEDoff".lower() ):
new = 5
elif ( self.beacon_new_mode == "Reset".lower() ):
# Reset = By power Off, then On again
new = 5
self.beacon_new_mode = "LEDoff".lower()
self.beacon_current_mode = "RotatingFast".lower()
self.led.off()
sleep(0.1)
self.led.on()
sleep(0.2)
else:
new = 0
self.get_logger().warning("| Input ERROR in Light Beacon node |")
if (self.beacon_current_mode == "RotatingFast".lower() ):
current = 1
elif (self.beacon_current_mode == "RotatingSlow".lower() ):
current = 2
elif (self.beacon_current_mode == "Flashing".lower() ):
current = 3
elif (self.beacon_current_mode == "Strobing".lower() ):
current = 4
elif (self.beacon_current_mode == "LEDoff".lower() ):
current = 5
else:
current = 0
self.get_logger().warning("| Input ERROR in Light Beacon node |")
self.get_logger().info("| We have a vote... Current=" + str(current) + " New=" + str(new) + " SUM=" + str(new - current) + " cycles to go.")
if ( new < current ):
self.get_logger().info("| New < Current (+5)")
self._cycles( new - current +5 )
else:
self.get_logger().info("| New => Current")
self._cycles( new - current)
# Performe the requested number of PWM-cycle-steps for the beacon
def _cycles(self, i ):
for x in range(0, i):
self._cycle_once()
# Performe one PWM-cycle-step for the beacon
def _cycle_once(self):
self.get_logger().warning("| CYCLES |")
self.beacon_signal.min()
sleep(0.1)
self.beacon_signal.max()
sleep(0.1)
self.beacon_signal.min()
sleep(0.1)
self.beacon_signal.detach()
def main(args=None):
rclpy.init(args=args)
node = LightBeaconNode()
#The try statement is used to detect errors in the try block.
#The except statement catches the exception information and processes it.
#The finally statement always run. Regardless reson to end the script.
try:
rclpy.spin(node)
except KeyboardInterrupt:
print("light_beacon_node **** 💀 Ctrl-C detected...")
finally:
#
print("light_beacon_node **** 🪦 Ending... ")
print( str(sys.exc_info()[1]) )
# Time to clean up stuff!
# - Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
node.destroy_node()
rclpy.shutdown()
# self.beacon_signal.cleanup()
# self.led.cleanup()
if __name__ == "__main__":
main() | 0.346099 | 0.179064 |
import cv2 as cv
import dlib
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torchvision.transforms as transforms
from config import device
from retinaface.detector import Detector
from utils.ddfa import ToTensorGjz, NormalizeGjz, _parse_param
from utils.inference import crop_img, parse_roi_box_from_bbox, parse_roi_box_from_landmark
def extract(img_ori):
rects = face_detector(img_ori, 1)
rect = rects[0]
# dets, landms = detector.detect_faces(img_ori)
# det = dets[0]
# bbox = [det[0], det[1], det[2], det[3]]
# print('bbox: ' + str(bbox))
# roi_box = parse_roi_box_from_bbox(bbox)
# print('roi_box: ' + str(roi_box))
# - use landmark for cropping
pts = face_regressor(img_ori, rect).parts()
pts = np.array([[pt.x, pt.y] for pt in pts]).T
roi_box = parse_roi_box_from_landmark(pts)
img = crop_img(img_ori, roi_box)
img = cv.resize(img, (120, 120), interpolation=cv.INTER_LINEAR)
input = transform(img).unsqueeze(0)
input = input.to(device)
with torch.no_grad():
param = model(input)
param = param.squeeze().cpu().numpy().flatten().astype(np.float32)
# print('param: ' + str(param))
p, offset, alpha_shp, alpha_exp = _parse_param(param)
# print('alpha_exp: ' + str(alpha_exp))
return alpha_exp, p
if __name__ == '__main__':
filename_scripted = '3ddfa_scripted.pt'
model = torch.jit.load(filename_scripted)
cudnn.benchmark = True
model = model.to(device)
model.eval()
face_detector = dlib.get_frontal_face_detector()
dlib_landmark_model = 'models/shape_predictor_68_face_landmarks.dat'
face_regressor = dlib.shape_predictor(dlib_landmark_model)
# detector = Detector()
transform = transforms.Compose([ToTensorGjz(), NormalizeGjz(mean=127.5, std=128)])
alpha_exp_list = []
pose_list = []
video = 'data/kuazhangbiaoqing.mp4'
cap = cv.VideoCapture(video)
idx = 0
while cap.isOpened():
success, frame = cap.read()
if not success:
break
try:
alpha_exp, p = extract(frame)
alpha_exp_list.append(alpha_exp)
pose_list.append(p)
except IndexError as err:
print(err)
idx += 1
print(idx)
import pickle
print(len(alpha_exp_list))
data = dict()
data['alpha_exp'] = alpha_exp_list
data['pose'] = pose_list
with open('data.pkl', 'wb') as fp:
pickle.dump(data, fp) | extract.py | import cv2 as cv
import dlib
import numpy as np
import torch
import torch.backends.cudnn as cudnn
import torchvision.transforms as transforms
from config import device
from retinaface.detector import Detector
from utils.ddfa import ToTensorGjz, NormalizeGjz, _parse_param
from utils.inference import crop_img, parse_roi_box_from_bbox, parse_roi_box_from_landmark
def extract(img_ori):
rects = face_detector(img_ori, 1)
rect = rects[0]
# dets, landms = detector.detect_faces(img_ori)
# det = dets[0]
# bbox = [det[0], det[1], det[2], det[3]]
# print('bbox: ' + str(bbox))
# roi_box = parse_roi_box_from_bbox(bbox)
# print('roi_box: ' + str(roi_box))
# - use landmark for cropping
pts = face_regressor(img_ori, rect).parts()
pts = np.array([[pt.x, pt.y] for pt in pts]).T
roi_box = parse_roi_box_from_landmark(pts)
img = crop_img(img_ori, roi_box)
img = cv.resize(img, (120, 120), interpolation=cv.INTER_LINEAR)
input = transform(img).unsqueeze(0)
input = input.to(device)
with torch.no_grad():
param = model(input)
param = param.squeeze().cpu().numpy().flatten().astype(np.float32)
# print('param: ' + str(param))
p, offset, alpha_shp, alpha_exp = _parse_param(param)
# print('alpha_exp: ' + str(alpha_exp))
return alpha_exp, p
if __name__ == '__main__':
filename_scripted = '3ddfa_scripted.pt'
model = torch.jit.load(filename_scripted)
cudnn.benchmark = True
model = model.to(device)
model.eval()
face_detector = dlib.get_frontal_face_detector()
dlib_landmark_model = 'models/shape_predictor_68_face_landmarks.dat'
face_regressor = dlib.shape_predictor(dlib_landmark_model)
# detector = Detector()
transform = transforms.Compose([ToTensorGjz(), NormalizeGjz(mean=127.5, std=128)])
alpha_exp_list = []
pose_list = []
video = 'data/kuazhangbiaoqing.mp4'
cap = cv.VideoCapture(video)
idx = 0
while cap.isOpened():
success, frame = cap.read()
if not success:
break
try:
alpha_exp, p = extract(frame)
alpha_exp_list.append(alpha_exp)
pose_list.append(p)
except IndexError as err:
print(err)
idx += 1
print(idx)
import pickle
print(len(alpha_exp_list))
data = dict()
data['alpha_exp'] = alpha_exp_list
data['pose'] = pose_list
with open('data.pkl', 'wb') as fp:
pickle.dump(data, fp) | 0.366023 | 0.31012 |
from uuid import UUID
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.viewsets import ModelViewSet
from vbb_backend.program.api.serializers.slotMentor import (
MentorSlotBookingSerializer,
MentorSlotSerializer,
)
from vbb_backend.program.models import Slot, MentorSlotAssociation
from vbb_backend.users.models import UserTypeEnum, Mentor
class MentorSlotViewSet(ModelViewSet):
queryset = MentorSlotAssociation.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = MentorSlotSerializer
lookup_field = "external_id"
def get_queryset(self):
queryset = self.queryset
user = self.request.user
slot = Slot.objects.get(external_id=self.kwargs.get("slot_external_id"))
queryset = queryset.filter(slot=slot)
if user.is_superuser:
pass
elif user.user_type in [UserTypeEnum.HEADMASTER.value]:
queryset = queryset.filter(slot__computer__program__program_director=user)
else:
raise PermissionDenied()
return queryset
def check_if_uuid(self, uuid_to_test):
try:
uuid_obj = UUID(uuid_to_test)
except ValueError:
return False
return str(uuid_obj) == uuid_to_test
def get_slot(self):
return get_object_or_404(Slot, external_id=self.kwargs.get("slot_external_id"))
def get_mentor(self):
return get_object_or_404(Mentor, external_id=self.request.data.get("mentor"))
def perform_create(self, serializer):
mentor_id = self.request.data.get("mentor")
if not mentor_id:
raise ValidationError({"message": "mentor id required"})
if not self.check_if_uuid(mentor_id):
raise ValidationError({"message": "mentor id must be valid UUID"})
serializer.save(slot=self.get_slot(), mentor=self.get_mentor())
class MentorBookingViewSet(ModelViewSet):
queryset = MentorSlotAssociation.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = MentorSlotBookingSerializer
lookup_field = "external_id"
def get_queryset(self):
queryset = self.queryset
user = self.request.user
slot = Slot.objects.get(external_id=self.kwargs.get("slot_external_id"))
queryset = queryset.filter(slot=slot)
if user.is_superuser:
pass
elif user.user_type in [UserTypeEnum.MENTOR.value]:
queryset = queryset.filter(mentor=user)
else:
raise PermissionDenied()
return queryset
def get_slot(self):
return get_object_or_404(
Slot.objects.all(), external_id=self.kwargs.get("slot_external_id")
)
def perform_create(self, serializer):
serializer.save(slot=self.get_slot()) | vbb_backend/program/api/viewsets/slotMentor.py | from uuid import UUID
from rest_framework.exceptions import PermissionDenied, ValidationError
from rest_framework.generics import get_object_or_404
from rest_framework.permissions import IsAuthenticated
from rest_framework.viewsets import ModelViewSet
from vbb_backend.program.api.serializers.slotMentor import (
MentorSlotBookingSerializer,
MentorSlotSerializer,
)
from vbb_backend.program.models import Slot, MentorSlotAssociation
from vbb_backend.users.models import UserTypeEnum, Mentor
class MentorSlotViewSet(ModelViewSet):
queryset = MentorSlotAssociation.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = MentorSlotSerializer
lookup_field = "external_id"
def get_queryset(self):
queryset = self.queryset
user = self.request.user
slot = Slot.objects.get(external_id=self.kwargs.get("slot_external_id"))
queryset = queryset.filter(slot=slot)
if user.is_superuser:
pass
elif user.user_type in [UserTypeEnum.HEADMASTER.value]:
queryset = queryset.filter(slot__computer__program__program_director=user)
else:
raise PermissionDenied()
return queryset
def check_if_uuid(self, uuid_to_test):
try:
uuid_obj = UUID(uuid_to_test)
except ValueError:
return False
return str(uuid_obj) == uuid_to_test
def get_slot(self):
return get_object_or_404(Slot, external_id=self.kwargs.get("slot_external_id"))
def get_mentor(self):
return get_object_or_404(Mentor, external_id=self.request.data.get("mentor"))
def perform_create(self, serializer):
mentor_id = self.request.data.get("mentor")
if not mentor_id:
raise ValidationError({"message": "mentor id required"})
if not self.check_if_uuid(mentor_id):
raise ValidationError({"message": "mentor id must be valid UUID"})
serializer.save(slot=self.get_slot(), mentor=self.get_mentor())
class MentorBookingViewSet(ModelViewSet):
queryset = MentorSlotAssociation.objects.all()
permission_classes = [IsAuthenticated]
serializer_class = MentorSlotBookingSerializer
lookup_field = "external_id"
def get_queryset(self):
queryset = self.queryset
user = self.request.user
slot = Slot.objects.get(external_id=self.kwargs.get("slot_external_id"))
queryset = queryset.filter(slot=slot)
if user.is_superuser:
pass
elif user.user_type in [UserTypeEnum.MENTOR.value]:
queryset = queryset.filter(mentor=user)
else:
raise PermissionDenied()
return queryset
def get_slot(self):
return get_object_or_404(
Slot.objects.all(), external_id=self.kwargs.get("slot_external_id")
)
def perform_create(self, serializer):
serializer.save(slot=self.get_slot()) | 0.45423 | 0.13452 |
""" Module definition for intranetsgdf. """
from datetime import date as _date
from visyerres_sgdf_woob.backend import Module as _Module
from visyerres_sgdf_woob.capabilities import (
BankAccount as _BankAccount, Person as _Person,
Structure as _Structure,
)
from visyerres_sgdf_woob.utils import IID as _IID
from woob.tools.value import (
Value as _Value, ValueBackendPassword as _ValueBackendPassword,
ValueBool as _ValueBool,
)
from .api import IntranetSGDFAPIBrowser as _IntranetSGDFAPIBrowser
from .direct import IntranetSGDFBrowser as _IntranetSGDFBrowser
__all__ = ['IntranetSGDFModule']
class IntranetSGDFModule(_Module):
NAME = 'intranetsgdf'
DESCRIPTION = 'Intranet Scouts et Guides de France'
MAINTAINER = '<NAME>'
EMAIL = '<EMAIL>'
LICENSE = 'Proprietary'
code = _ValueBackendPassword(
label="Numéro d'adhérent",
regexp=r'[0-9]{9}',
masked=False,
required=False,
)
password = _ValueBackendPassword(
label='Mot de passe',
masked=True,
required=False,
)
api_client_id = _ValueBackendPassword(
label='Client ID',
masked=True,
required=False,
)
api_client_secret = _ValueBackendPassword(
label='API client secret',
masked=True,
required=False,
)
interactive = _ValueBool(
default=True,
transient=True,
)
environment = _Value(
label='Environnement technique',
choices={
'live': 'Production',
'test': 'Recette',
},
default='live',
required=False,
)
@property
def direct_browser(self):
self._direct_browser = self.create_browser(
self.config,
klass=_IntranetSGDFBrowser,
)
if hasattr(self._direct_browser, 'load_state'):
self._direct_browser.load_state(self.storage.get(
'direct_browser_state',
default={},
))
return self._direct_browser
@property
def api_browser(self):
if not self.config['api_client_id'].get():
return
if not self._api_browser:
self._api_browser = self.create_browser(
self.config,
klass=_IntranetSGDFAPIBrowser,
)
if hasattr(self._api_browser, 'load_state'):
self.api_browser.load_state(self.storage.get(
'api_browser_state',
default={},
))
return self._api_browser
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._direct_browser = None
self._api_browser = None
def dump_state(self):
should_save = False
if self._direct_browser and hasattr(self.direct_browser, 'dump_state'):
self.storage.set(
'direct_browser_state',
self.direct_browser.dump_state(),
)
should_save = True
if self._api_browser and hasattr(self.api_browser, 'dump_state'):
self.storage.set(
'api_browser_state',
self.api_browser.dump_state(),
)
should_save = True
if should_save:
self.storage.save()
def check_login(self):
self.direct_browser.check_login()
def request_code(
self,
last_name: str,
first_name: str,
birth_date: _date,
):
return self.direct_browser.request_code(
last_name,
first_name,
birth_date,
)
def request_new_password(self, code: str):
return self.direct_browser.request_new_password(
code,
)
def iter_people(self):
return self.direct_browser.iter_people()
def iter_people_by_name(self, last_name: str, first_name: str):
return self.direct_browser.iter_people_by_name(last_name, first_name)
def get_current_person_iid(self):
return self.direct_browser.get_person_iid()
def get_current_person(self):
iid = self.direct_browser.get_person_iid()
return self.get_person(iid)
def get_person(self, obj):
if isinstance(obj, (_IID, str, bytes)):
person = _Person()
person.iid = _IID(obj)
obj = person
return self.direct_browser.get_person(person)
def iter_delegations(self):
return self.direct_browser.iter_delegations()
def iter_structures(self):
return self.direct_browser.iter_structures()
def get_structure(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.get_structure(obj)
def get_structure_parent(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.get_structure_parent(obj)
def iter_structure_children(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.iter_structure_children(obj)
def iter_functions(self):
return self.browser.iter_functions()
def get_bank_account(self, obj):
if isinstance(obj, (_IID, str, bytes)):
account = _BankAccount()
account.iid = _IID(obj)
obj = account
return self.direct_browser.get_bank_account(obj)
# End of file. | visyerres_sgdf_woob/modules/intranetsgdf/module.py | """ Module definition for intranetsgdf. """
from datetime import date as _date
from visyerres_sgdf_woob.backend import Module as _Module
from visyerres_sgdf_woob.capabilities import (
BankAccount as _BankAccount, Person as _Person,
Structure as _Structure,
)
from visyerres_sgdf_woob.utils import IID as _IID
from woob.tools.value import (
Value as _Value, ValueBackendPassword as _ValueBackendPassword,
ValueBool as _ValueBool,
)
from .api import IntranetSGDFAPIBrowser as _IntranetSGDFAPIBrowser
from .direct import IntranetSGDFBrowser as _IntranetSGDFBrowser
__all__ = ['IntranetSGDFModule']
class IntranetSGDFModule(_Module):
NAME = 'intranetsgdf'
DESCRIPTION = 'Intranet Scouts et Guides de France'
MAINTAINER = '<NAME>'
EMAIL = '<EMAIL>'
LICENSE = 'Proprietary'
code = _ValueBackendPassword(
label="Numéro d'adhérent",
regexp=r'[0-9]{9}',
masked=False,
required=False,
)
password = _ValueBackendPassword(
label='Mot de passe',
masked=True,
required=False,
)
api_client_id = _ValueBackendPassword(
label='Client ID',
masked=True,
required=False,
)
api_client_secret = _ValueBackendPassword(
label='API client secret',
masked=True,
required=False,
)
interactive = _ValueBool(
default=True,
transient=True,
)
environment = _Value(
label='Environnement technique',
choices={
'live': 'Production',
'test': 'Recette',
},
default='live',
required=False,
)
@property
def direct_browser(self):
self._direct_browser = self.create_browser(
self.config,
klass=_IntranetSGDFBrowser,
)
if hasattr(self._direct_browser, 'load_state'):
self._direct_browser.load_state(self.storage.get(
'direct_browser_state',
default={},
))
return self._direct_browser
@property
def api_browser(self):
if not self.config['api_client_id'].get():
return
if not self._api_browser:
self._api_browser = self.create_browser(
self.config,
klass=_IntranetSGDFAPIBrowser,
)
if hasattr(self._api_browser, 'load_state'):
self.api_browser.load_state(self.storage.get(
'api_browser_state',
default={},
))
return self._api_browser
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._direct_browser = None
self._api_browser = None
def dump_state(self):
should_save = False
if self._direct_browser and hasattr(self.direct_browser, 'dump_state'):
self.storage.set(
'direct_browser_state',
self.direct_browser.dump_state(),
)
should_save = True
if self._api_browser and hasattr(self.api_browser, 'dump_state'):
self.storage.set(
'api_browser_state',
self.api_browser.dump_state(),
)
should_save = True
if should_save:
self.storage.save()
def check_login(self):
self.direct_browser.check_login()
def request_code(
self,
last_name: str,
first_name: str,
birth_date: _date,
):
return self.direct_browser.request_code(
last_name,
first_name,
birth_date,
)
def request_new_password(self, code: str):
return self.direct_browser.request_new_password(
code,
)
def iter_people(self):
return self.direct_browser.iter_people()
def iter_people_by_name(self, last_name: str, first_name: str):
return self.direct_browser.iter_people_by_name(last_name, first_name)
def get_current_person_iid(self):
return self.direct_browser.get_person_iid()
def get_current_person(self):
iid = self.direct_browser.get_person_iid()
return self.get_person(iid)
def get_person(self, obj):
if isinstance(obj, (_IID, str, bytes)):
person = _Person()
person.iid = _IID(obj)
obj = person
return self.direct_browser.get_person(person)
def iter_delegations(self):
return self.direct_browser.iter_delegations()
def iter_structures(self):
return self.direct_browser.iter_structures()
def get_structure(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.get_structure(obj)
def get_structure_parent(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.get_structure_parent(obj)
def iter_structure_children(self, obj):
if isinstance(obj, (_IID, str, bytes)):
structure = _Structure()
structure.iid = _IID(obj)
obj = structure
return self.direct_browser.iter_structure_children(obj)
def iter_functions(self):
return self.browser.iter_functions()
def get_bank_account(self, obj):
if isinstance(obj, (_IID, str, bytes)):
account = _BankAccount()
account.iid = _IID(obj)
obj = account
return self.direct_browser.get_bank_account(obj)
# End of file. | 0.613121 | 0.128443 |
from abc import ABCMeta, abstractmethod
from utils.magnet_info import MagnetInfo
from utils.title_checker import TitleChecker
from utils.web_delegate import WebDelegate
from utils.history_delegate import HistoryDelegate
from utils.transmission_delegate import TransmissionDelegate
from scraper.board_item_iterator import BoardItemIterator
from scraper.board_page_iterator import BoardPageIterator
from scraper.system_config import SystemConfig
from scraper.scraper_config import ScraperConfig
from scraper.category_config import CategoryConfig
import re
class ScraperTemplate(metaclass=ABCMeta):
def __init__(self, name, scraper_configuration_file,
local_machine_status_file, local_machine_history_file):
self.name = name
self.__categories = []
self.__title_checker = TitleChecker()
self.__web_delegate = WebDelegate()
self.__scraper_configuration_file = scraper_configuration_file
self.__local_machine_status_file = local_machine_status_file
self.__system_config = SystemConfig(self.__scraper_configuration_file,
self.__local_machine_status_file)
self.__scraper_config = ScraperConfig(self.name,
self.__scraper_configuration_file, self.__local_machine_status_file)
self.__local_machine_history_file = local_machine_history_file
self.__history_delegate = HistoryDelegate(self.__local_machine_history_file)
trans_id = self.__system_config.get_config_local("trans-id")
trans_pw = self.__system_config.get_config_local("trans-pw")
trans_host = self.__system_config.get_config_local("trans-host")
trans_port = self.__system_config.get_config_local("trans-port")
self.__transmission_delegate = TransmissionDelegate(trans_id, trans_pw,
trans_host, trans_port, self.history_delegate)
def __str__(self):
return self.name
@property
def categories(self):
return self.__categories
@property
def web_delegate(self):
return self.__web_delegate
@property
def history_delegate(self):
return self.__history_delegate
def check_site_alive(self):
'''각 site가 살아있는지 확인'''
return self.web_delegate.check_url_alive(self.__scraper_config.get_config_scraper('url'))
def correct_url(self):
'''접속불가 토렌트 사이트 URL 순회 접속시도'''
base = self.__scraper_config.get_config_scraper('url')
start_num = int(re.findall(r'\d+', base)[0])
for num in range(start_num, start_num+5):
try_url = re.sub('[0-9]+', str(num), base)
print("Looking for.. ", try_url)
if self.web_delegate.check_url_alive(try_url):
print('The new torrent site found!!.\n')
self.__scraper_config.set_base_url(try_url)
return True
print('Fail to find a new torrent site.')
return False
def aggregation_categories(self):
'''json parsing으로 site 내의 categories 생성'''
categories = [x.strip() for x in
self.__scraper_config.get_config_scraper('categories').split(',')]
if len(categories) == 1 and categories[0] is "":
print("Scrapping for this site is disabled.")
categories = []
for category_name in categories:
_ = CategoryConfig(self.name, category_name,
self.__scraper_configuration_file,
self.__local_machine_status_file,
self.__scraper_config.get_base_url())
self.__categories.append(_)
print("Aggregation categories for : " + str(self))
for category in self.categories:
print("\t" + category.get_category() + " : " + category.get_url())
@abstractmethod
def parse_page_data(self, url):
pass
@abstractmethod
def parse_magnet_from_page_url(self, url):
pass
@abstractmethod
def get_board_id_num(self, url):
pass
def execute_scraper(self):
for category in self.__categories:
self.__execute_scraper_for_category(category)
def __execute_scraper_for_category(self, category):
'''category는 class CategoryConfig()가 전달된 것임'''
base = category.get_base_url()
url = category.get_config_scraper('url')
strt_index = int(self.__scraper_config.get_config_scraper('start-index'))
max_page = int(self.__scraper_config.get_config_scraper('max-page'))
page_iterator = BoardPageIterator(base + url, strt_index, max_page)
_ = category.get_config_local('history')
''' new_latest_id은 게시물을 순회하면서 최신 id로 업데이트됨.
config_latest_id은 업데이트 되면 안됨. 지난 history 확인 용임. '''
new_latest_id = 0 if _ is None else _
config_latest_id = new_latest_id
print("read new_latest_id : %d" % new_latest_id)
'''웹사이트의 page 별로 iter 순회'''
for page in page_iterator:
board_list = self.parse_page_data(page)
item_iterator = BoardItemIterator(board_list)
'''한 page 내의 list item을 iter 순회'''
for title, href in item_iterator:
"""board_id_num을 만들어내는 방법이 web별로 달라서 iterator에서
할 수 없음."""
board_id_num = self.get_board_id_num(href)
#print("DEBUG : %d, %s" % (board_id_num, title))
if board_id_num > 0:
if board_id_num > new_latest_id:
new_latest_id = board_id_num
if board_id_num < config_latest_id:
page_iterator.mark_for_early_stop()
item_iterator.mark_for_early_stop()
else:
print("board_id_num is wrong, can't update latest ID - %d." % board_id_num)
#print("info: parse info=\t[%s][%s][%d] - %s"
# % (self.name, category.get_category(), board_id_num, title))
matched_name = self.__title_checker.validate_board_title(title)
if not matched_name:
#print("Not matched_name ", title)
continue
magnet = self.parse_magnet_from_page_url(href)
if magnet is None:
continue
magnet_info = MagnetInfo(title, magnet, matched_name, self.name)
ret = self.__transmission_delegate.add_magnet_transmission_remote(magnet_info)
if not ret:
continue
#TODO: remove_transmission_remote method는 pass 상태임
self.__transmission_delegate.remove_transmission_remote(matched_name)
category.set_config_local('history', new_latest_id) | scraper/scraper.py | from abc import ABCMeta, abstractmethod
from utils.magnet_info import MagnetInfo
from utils.title_checker import TitleChecker
from utils.web_delegate import WebDelegate
from utils.history_delegate import HistoryDelegate
from utils.transmission_delegate import TransmissionDelegate
from scraper.board_item_iterator import BoardItemIterator
from scraper.board_page_iterator import BoardPageIterator
from scraper.system_config import SystemConfig
from scraper.scraper_config import ScraperConfig
from scraper.category_config import CategoryConfig
import re
class ScraperTemplate(metaclass=ABCMeta):
def __init__(self, name, scraper_configuration_file,
local_machine_status_file, local_machine_history_file):
self.name = name
self.__categories = []
self.__title_checker = TitleChecker()
self.__web_delegate = WebDelegate()
self.__scraper_configuration_file = scraper_configuration_file
self.__local_machine_status_file = local_machine_status_file
self.__system_config = SystemConfig(self.__scraper_configuration_file,
self.__local_machine_status_file)
self.__scraper_config = ScraperConfig(self.name,
self.__scraper_configuration_file, self.__local_machine_status_file)
self.__local_machine_history_file = local_machine_history_file
self.__history_delegate = HistoryDelegate(self.__local_machine_history_file)
trans_id = self.__system_config.get_config_local("trans-id")
trans_pw = self.__system_config.get_config_local("trans-pw")
trans_host = self.__system_config.get_config_local("trans-host")
trans_port = self.__system_config.get_config_local("trans-port")
self.__transmission_delegate = TransmissionDelegate(trans_id, trans_pw,
trans_host, trans_port, self.history_delegate)
def __str__(self):
return self.name
@property
def categories(self):
return self.__categories
@property
def web_delegate(self):
return self.__web_delegate
@property
def history_delegate(self):
return self.__history_delegate
def check_site_alive(self):
'''각 site가 살아있는지 확인'''
return self.web_delegate.check_url_alive(self.__scraper_config.get_config_scraper('url'))
def correct_url(self):
'''접속불가 토렌트 사이트 URL 순회 접속시도'''
base = self.__scraper_config.get_config_scraper('url')
start_num = int(re.findall(r'\d+', base)[0])
for num in range(start_num, start_num+5):
try_url = re.sub('[0-9]+', str(num), base)
print("Looking for.. ", try_url)
if self.web_delegate.check_url_alive(try_url):
print('The new torrent site found!!.\n')
self.__scraper_config.set_base_url(try_url)
return True
print('Fail to find a new torrent site.')
return False
def aggregation_categories(self):
'''json parsing으로 site 내의 categories 생성'''
categories = [x.strip() for x in
self.__scraper_config.get_config_scraper('categories').split(',')]
if len(categories) == 1 and categories[0] is "":
print("Scrapping for this site is disabled.")
categories = []
for category_name in categories:
_ = CategoryConfig(self.name, category_name,
self.__scraper_configuration_file,
self.__local_machine_status_file,
self.__scraper_config.get_base_url())
self.__categories.append(_)
print("Aggregation categories for : " + str(self))
for category in self.categories:
print("\t" + category.get_category() + " : " + category.get_url())
@abstractmethod
def parse_page_data(self, url):
pass
@abstractmethod
def parse_magnet_from_page_url(self, url):
pass
@abstractmethod
def get_board_id_num(self, url):
pass
def execute_scraper(self):
for category in self.__categories:
self.__execute_scraper_for_category(category)
def __execute_scraper_for_category(self, category):
'''category는 class CategoryConfig()가 전달된 것임'''
base = category.get_base_url()
url = category.get_config_scraper('url')
strt_index = int(self.__scraper_config.get_config_scraper('start-index'))
max_page = int(self.__scraper_config.get_config_scraper('max-page'))
page_iterator = BoardPageIterator(base + url, strt_index, max_page)
_ = category.get_config_local('history')
''' new_latest_id은 게시물을 순회하면서 최신 id로 업데이트됨.
config_latest_id은 업데이트 되면 안됨. 지난 history 확인 용임. '''
new_latest_id = 0 if _ is None else _
config_latest_id = new_latest_id
print("read new_latest_id : %d" % new_latest_id)
'''웹사이트의 page 별로 iter 순회'''
for page in page_iterator:
board_list = self.parse_page_data(page)
item_iterator = BoardItemIterator(board_list)
'''한 page 내의 list item을 iter 순회'''
for title, href in item_iterator:
"""board_id_num을 만들어내는 방법이 web별로 달라서 iterator에서
할 수 없음."""
board_id_num = self.get_board_id_num(href)
#print("DEBUG : %d, %s" % (board_id_num, title))
if board_id_num > 0:
if board_id_num > new_latest_id:
new_latest_id = board_id_num
if board_id_num < config_latest_id:
page_iterator.mark_for_early_stop()
item_iterator.mark_for_early_stop()
else:
print("board_id_num is wrong, can't update latest ID - %d." % board_id_num)
#print("info: parse info=\t[%s][%s][%d] - %s"
# % (self.name, category.get_category(), board_id_num, title))
matched_name = self.__title_checker.validate_board_title(title)
if not matched_name:
#print("Not matched_name ", title)
continue
magnet = self.parse_magnet_from_page_url(href)
if magnet is None:
continue
magnet_info = MagnetInfo(title, magnet, matched_name, self.name)
ret = self.__transmission_delegate.add_magnet_transmission_remote(magnet_info)
if not ret:
continue
#TODO: remove_transmission_remote method는 pass 상태임
self.__transmission_delegate.remove_transmission_remote(matched_name)
category.set_config_local('history', new_latest_id) | 0.490724 | 0.067148 |
import numpy as np
import torch
import re
import nltk
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
from torch import optim
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
class Glove():
def __init__(self, fUrl):
"""
load model(no-binary model)
"""
with open(fUrl) as f:
self.word_dic = {line.split()[0]:torch.tensor(np.asarray(line.split()[1:], dtype='float'),dtype=torch.float32) for line in f}
def isinkey(self,word):
return word in self.word_dic.keys()
def __getitem__(self,index):
return self.word_dic[index]
class TextCNN(nn.Module):
'''
Binary classify model.
'''
def __init__(self,filter_num,filter_sizes,vocabulary_size,embedding_dim,dropout=0.5):
'''
filter_num: number of the convolutional kernel.
filter_sizes: size of the convolutional kernels.
vocabulary_size: max length of the input news.
embedding_dim: embedding size.
'''
super(TextCNN, self).__init__()
self.filter_num = filter_num
self.filter_sizes = filter_sizes
self.vocabulary_size = vocabulary_size
embedding_dimension =embedding_dim
self.conv1 = nn.Conv2d(1,3,(1,embedding_dim))
self.convs = nn.ModuleList(
[nn.Conv2d(1, filter_num, (size, embedding_dimension)) for size in filter_sizes])
self.att=nn.ModuleList(
[nn.Linear(3*size+1,1) for size in filter_sizes])
self.linear=nn.ModuleList(
[nn.Linear(vocabulary_size-size+1,1) for size in filter_sizes])
self.dropout = nn.Dropout(dropout)
self.fc = nn.Linear(len(filter_sizes) * filter_num, 1)
def forward(self, x, y=None):
conv1 = self.conv1(x)
conv1=[torch.cat([torch.cat([(conv1[:,:,i:l+i,:]).reshape(-1,1,1,3*l) for _ in range(self.filter_num)],1) for i in range(self.vocabulary_size-l+1)],2) for l in self.filter_sizes]
x = [F.relu(conv(x)) for conv in self.convs]
atten = [torch.cat([item,c],3) for item,c in zip(x,conv1)]
atten = [F.sigmoid(att(item)).squeeze(3) for item,att in zip(atten,self.att)]
x = [F.relu(item.squeeze(3)*att) for item,att in zip(x,atten)]
x = [F.relu(l(item)) for item,l in zip(x,self.linear)]
x = torch.cat(x, 1).squeeze(2)
x = self.dropout(x)
if y is None:
return F.sigmoid(self.fc(x))
return F.softmax(self.fc(x))#.argmax(1)
else:
return F.binary_cross_entropy(F.sigmoid(self.fc(x)),y)
return F.l1_loss(F.softmax(self.fc(x)),y)
def t2v(text):
# Vectorize the text.
re=[]
for i in tokener(text.lower()):
if w2v.isinkey(i):
re.append(w2v[i])
while len(re)<20:
re.append(torch.zeros(50,dtype=torch.float32))
return torch.cat(re,0).reshape(1,-1,50)[:,:20,:]
# load the word2vec pre-trained model.
w2v=Glove('glove.6B.50d.txt')
# load train data.
df_incident=pd.read_csv('incident_label.csv',sep='\t')
df_NYT=pd.read_csv("new_NYT.csv",sep='\t')
tokener=nltk.tokenize.word_tokenize
df_NYT['token']=df_NYT['title'].map(lambda x:tokener(x))
df_NYT['length']=df_NYT['token'].map(lambda x:len(x))
NYT_train=df_NYT.loc[df_NYT['length']>5].loc[df_NYT['length']<=20]
NYT_train['vec']=NYT_train['title'].map(lambda x:t2v(x))
df_incident['token']=df_NYT['title'].map(lambda x:tokener(x))
df_incident['length']=df_incident['token'].map(lambda x:len(x))
incident_train=df_incident.loc[df_incident['length']>5].loc[df_incident['length']<=20]
incident_train['vec']=incident_train['title'].map(lambda x:t2v(x))
X=[]
Y=[]
for i in incident_train['vec']:
X.append(i)
Y.append(1)
for i in NYT_train['vec']:
X.append(i)
Y.append(0)
x_train,x_test,y_train,y_test = train_test_split(X,Y,test_size=0.1,random_state=0)
X_train=torch.cat(x_train)
Y_train=torch.tensor(y_train,dtype=torch.float32)
X_eval=torch.cat(x_test)
Y_eval=torch.tensor(y_test,dtype=torch.float32)
traindata=TensorDataset(X_train,Y_train)
evaldata=TensorDataset(X_eval,Y_eval)
model=TextCNN(2,[2,3,4],20,50).cuda(device=0)
optimizer = optim.Adam(model.parameters(),lr=0.01)
dataloader=DataLoader(traindata,batch_size=40000)
model = nn.DataParallel(model)
optimizer = optim.Adam(model.parameters(),lr=0.01)
# Train model.
for i in range(200):
model=model.train()
for x,y in dataloader:
x=x.reshape(-1,1,20,50).cuda()
y=y.reshape(-1).cuda()
loss=model(x,y)
loss.backward()
optimizer.step()
optimizer.zero_grad()
if i%20==0:
model=model.eval()
print(classification_report(Y_eval.tolist(),[[int(i[0]>0.5)] for i in model(X_eval.reshape(-1,1,20,50).cuda()).tolist()],digits=4))
# Predict the reuters news.
df_reuter=pd.read_csv('reuter.csv',sep='\t')
df_reuter['token']=df_reuter['title'].map(lambda x:tokener(x))
df_reuter['vec']=df_reuter['title'].map(lambda x:t2v(x))
X_test=torch.cat(df_reuter['vec'].tolist())
X_test=TensorDataset(X_test)
X_test=DataLoader(X_test,batch_size=20000)
Y=[]
for i in X_test:
x=i[0].reshape(-1,1,20,50).cuda()
y=model(x)
Y.append(y)
Y=torch.cat(Y).reshape(-1)
df_reuter['Y']=Y.tolist()
df_reuter=df_reuter.loc[df_reuter['Y']==1].reset_index()
df_reuter.to_csv('reuter_incident.csv',sep='\t') | binary_classifier.py | import numpy as np
import torch
import re
import nltk
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import Dataset, DataLoader
from torch.utils.data import (DataLoader, RandomSampler, SequentialSampler,
TensorDataset)
from torch import optim
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
class Glove():
def __init__(self, fUrl):
"""
load model(no-binary model)
"""
with open(fUrl) as f:
self.word_dic = {line.split()[0]:torch.tensor(np.asarray(line.split()[1:], dtype='float'),dtype=torch.float32) for line in f}
def isinkey(self,word):
return word in self.word_dic.keys()
def __getitem__(self,index):
return self.word_dic[index]
class TextCNN(nn.Module):
'''
Binary classify model.
'''
def __init__(self,filter_num,filter_sizes,vocabulary_size,embedding_dim,dropout=0.5):
'''
filter_num: number of the convolutional kernel.
filter_sizes: size of the convolutional kernels.
vocabulary_size: max length of the input news.
embedding_dim: embedding size.
'''
super(TextCNN, self).__init__()
self.filter_num = filter_num
self.filter_sizes = filter_sizes
self.vocabulary_size = vocabulary_size
embedding_dimension =embedding_dim
self.conv1 = nn.Conv2d(1,3,(1,embedding_dim))
self.convs = nn.ModuleList(
[nn.Conv2d(1, filter_num, (size, embedding_dimension)) for size in filter_sizes])
self.att=nn.ModuleList(
[nn.Linear(3*size+1,1) for size in filter_sizes])
self.linear=nn.ModuleList(
[nn.Linear(vocabulary_size-size+1,1) for size in filter_sizes])
self.dropout = nn.Dropout(dropout)
self.fc = nn.Linear(len(filter_sizes) * filter_num, 1)
def forward(self, x, y=None):
conv1 = self.conv1(x)
conv1=[torch.cat([torch.cat([(conv1[:,:,i:l+i,:]).reshape(-1,1,1,3*l) for _ in range(self.filter_num)],1) for i in range(self.vocabulary_size-l+1)],2) for l in self.filter_sizes]
x = [F.relu(conv(x)) for conv in self.convs]
atten = [torch.cat([item,c],3) for item,c in zip(x,conv1)]
atten = [F.sigmoid(att(item)).squeeze(3) for item,att in zip(atten,self.att)]
x = [F.relu(item.squeeze(3)*att) for item,att in zip(x,atten)]
x = [F.relu(l(item)) for item,l in zip(x,self.linear)]
x = torch.cat(x, 1).squeeze(2)
x = self.dropout(x)
if y is None:
return F.sigmoid(self.fc(x))
return F.softmax(self.fc(x))#.argmax(1)
else:
return F.binary_cross_entropy(F.sigmoid(self.fc(x)),y)
return F.l1_loss(F.softmax(self.fc(x)),y)
def t2v(text):
# Vectorize the text.
re=[]
for i in tokener(text.lower()):
if w2v.isinkey(i):
re.append(w2v[i])
while len(re)<20:
re.append(torch.zeros(50,dtype=torch.float32))
return torch.cat(re,0).reshape(1,-1,50)[:,:20,:]
# load the word2vec pre-trained model.
w2v=Glove('glove.6B.50d.txt')
# load train data.
df_incident=pd.read_csv('incident_label.csv',sep='\t')
df_NYT=pd.read_csv("new_NYT.csv",sep='\t')
tokener=nltk.tokenize.word_tokenize
df_NYT['token']=df_NYT['title'].map(lambda x:tokener(x))
df_NYT['length']=df_NYT['token'].map(lambda x:len(x))
NYT_train=df_NYT.loc[df_NYT['length']>5].loc[df_NYT['length']<=20]
NYT_train['vec']=NYT_train['title'].map(lambda x:t2v(x))
df_incident['token']=df_NYT['title'].map(lambda x:tokener(x))
df_incident['length']=df_incident['token'].map(lambda x:len(x))
incident_train=df_incident.loc[df_incident['length']>5].loc[df_incident['length']<=20]
incident_train['vec']=incident_train['title'].map(lambda x:t2v(x))
X=[]
Y=[]
for i in incident_train['vec']:
X.append(i)
Y.append(1)
for i in NYT_train['vec']:
X.append(i)
Y.append(0)
x_train,x_test,y_train,y_test = train_test_split(X,Y,test_size=0.1,random_state=0)
X_train=torch.cat(x_train)
Y_train=torch.tensor(y_train,dtype=torch.float32)
X_eval=torch.cat(x_test)
Y_eval=torch.tensor(y_test,dtype=torch.float32)
traindata=TensorDataset(X_train,Y_train)
evaldata=TensorDataset(X_eval,Y_eval)
model=TextCNN(2,[2,3,4],20,50).cuda(device=0)
optimizer = optim.Adam(model.parameters(),lr=0.01)
dataloader=DataLoader(traindata,batch_size=40000)
model = nn.DataParallel(model)
optimizer = optim.Adam(model.parameters(),lr=0.01)
# Train model.
for i in range(200):
model=model.train()
for x,y in dataloader:
x=x.reshape(-1,1,20,50).cuda()
y=y.reshape(-1).cuda()
loss=model(x,y)
loss.backward()
optimizer.step()
optimizer.zero_grad()
if i%20==0:
model=model.eval()
print(classification_report(Y_eval.tolist(),[[int(i[0]>0.5)] for i in model(X_eval.reshape(-1,1,20,50).cuda()).tolist()],digits=4))
# Predict the reuters news.
df_reuter=pd.read_csv('reuter.csv',sep='\t')
df_reuter['token']=df_reuter['title'].map(lambda x:tokener(x))
df_reuter['vec']=df_reuter['title'].map(lambda x:t2v(x))
X_test=torch.cat(df_reuter['vec'].tolist())
X_test=TensorDataset(X_test)
X_test=DataLoader(X_test,batch_size=20000)
Y=[]
for i in X_test:
x=i[0].reshape(-1,1,20,50).cuda()
y=model(x)
Y.append(y)
Y=torch.cat(Y).reshape(-1)
df_reuter['Y']=Y.tolist()
df_reuter=df_reuter.loc[df_reuter['Y']==1].reset_index()
df_reuter.to_csv('reuter_incident.csv',sep='\t') | 0.907586 | 0.382949 |
from osrsmath.general.skills import *
import unittest
class TestExperience(unittest.TestCase):
def test_experience_for_levels_below_1_raises(self):
self.assertRaises(ValueError, lambda:experience(0))
self.assertRaises(ValueError, lambda:experience(-3))
def test_experience_for_levels_above_level_cap_with_no_flag_raises(self):
self.assertRaises(ValueError, lambda:experience(100, virtual_levels=False))
self.assertRaises(ValueError, lambda:experience(112, virtual_levels=False))
def test_experience_for_levels_above_virtual_cap_raises(self):
self.assertRaises(ValueError, lambda:experience(127))
self.assertRaises(ValueError, lambda:experience(140))
def test_experience_for_levels_below_level_cap(self):
self.assertEqual(experience(85), 3_258_594)
self.assertEqual(experience(34), 20_224)
def test_experience_for_levels_above_virtual_cap_with_flag(self):
self.assertEqual(experience(100, virtual_levels=True), 14_391_160)
self.assertEqual(experience(112, virtual_levels=True), 47_221_641)
class TestLevel(unittest.TestCase):
def test_experience_below_zero_raises(self):
self.assertRaises(ValueError, lambda:level(-1))
def test_experience_of_zero_is_lowest_level(self):
self.assertEqual(level(0), 1)
def test_experience_above_level_cap_returns_max_level_without_flag(self):
self.assertEqual(level(14_000_000, virtual_levels=False), 99)
self.assertEqual(level(200_000_000, virtual_levels=False), 99)
def test_experience_above_level_cap_with_flag(self):
self.assertEqual(level(14_000_000, virtual_levels=True), 99)
self.assertEqual(level(112_000_000, virtual_levels=True), 120)
self.assertEqual(level(200_000_000, virtual_levels=True), 126)
def test_experience_above_maximum_experience_raises(self):
self.assertRaises(ValueError, lambda:level(200_000_001))
self.assertRaises(ValueError, lambda:level(252_532_523))
def test_experience_within_bounds(self):
self.assertEqual(level(40_000), 40)
self.assertEqual(level(700_000), 69)
self.assertEqual(level(9_000_000), 95)
def test_invertability(self):
small_experience = 1
for l in range(1, 99+1):
with self.subTest(level=l):
self.assertEqual(level(experience(l)), l)
def test_experience_just_over_level_same_level(self):
small_experience = 1
for l in range(1, 99+1):
with self.subTest(level=l):
self.assertEqual(level(experience(l) + small_experience), l)
def test_experience_just_under_level_is_previous_level(self):
small_experience = 1
for l in range(2, 99+1):
with self.subTest(level=l):
if l == 1:
self.assertRaises(ValueError, lambda:level(experience(l) - small_experience))
else:
self.assertEqual(level(experience(l) - small_experience), l - 1) | osrsmath/tests/general/test_skills.py | from osrsmath.general.skills import *
import unittest
class TestExperience(unittest.TestCase):
def test_experience_for_levels_below_1_raises(self):
self.assertRaises(ValueError, lambda:experience(0))
self.assertRaises(ValueError, lambda:experience(-3))
def test_experience_for_levels_above_level_cap_with_no_flag_raises(self):
self.assertRaises(ValueError, lambda:experience(100, virtual_levels=False))
self.assertRaises(ValueError, lambda:experience(112, virtual_levels=False))
def test_experience_for_levels_above_virtual_cap_raises(self):
self.assertRaises(ValueError, lambda:experience(127))
self.assertRaises(ValueError, lambda:experience(140))
def test_experience_for_levels_below_level_cap(self):
self.assertEqual(experience(85), 3_258_594)
self.assertEqual(experience(34), 20_224)
def test_experience_for_levels_above_virtual_cap_with_flag(self):
self.assertEqual(experience(100, virtual_levels=True), 14_391_160)
self.assertEqual(experience(112, virtual_levels=True), 47_221_641)
class TestLevel(unittest.TestCase):
def test_experience_below_zero_raises(self):
self.assertRaises(ValueError, lambda:level(-1))
def test_experience_of_zero_is_lowest_level(self):
self.assertEqual(level(0), 1)
def test_experience_above_level_cap_returns_max_level_without_flag(self):
self.assertEqual(level(14_000_000, virtual_levels=False), 99)
self.assertEqual(level(200_000_000, virtual_levels=False), 99)
def test_experience_above_level_cap_with_flag(self):
self.assertEqual(level(14_000_000, virtual_levels=True), 99)
self.assertEqual(level(112_000_000, virtual_levels=True), 120)
self.assertEqual(level(200_000_000, virtual_levels=True), 126)
def test_experience_above_maximum_experience_raises(self):
self.assertRaises(ValueError, lambda:level(200_000_001))
self.assertRaises(ValueError, lambda:level(252_532_523))
def test_experience_within_bounds(self):
self.assertEqual(level(40_000), 40)
self.assertEqual(level(700_000), 69)
self.assertEqual(level(9_000_000), 95)
def test_invertability(self):
small_experience = 1
for l in range(1, 99+1):
with self.subTest(level=l):
self.assertEqual(level(experience(l)), l)
def test_experience_just_over_level_same_level(self):
small_experience = 1
for l in range(1, 99+1):
with self.subTest(level=l):
self.assertEqual(level(experience(l) + small_experience), l)
def test_experience_just_under_level_is_previous_level(self):
small_experience = 1
for l in range(2, 99+1):
with self.subTest(level=l):
if l == 1:
self.assertRaises(ValueError, lambda:level(experience(l) - small_experience))
else:
self.assertEqual(level(experience(l) - small_experience), l - 1) | 0.435902 | 0.505615 |
import sys
import re
import numpy as np
import argparse
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", dest="source_file", help="File with Scan commands")
parser.add_argument("-o", "--output", dest="output_file", help="File generated with Put Commands")
options = parser.parse_args()
if not options.source_file:
parser.error("[-] Please specify a target ip, --help for more info.")
return options
def get_data_lines(completeStr):
print("Getting Data Lines")
for dataLine in re.finditer(r".*column.*timestamp.*value.*", completeStr):
yield dataLine.group(0)
def extract_data_in_line(line):
key, column, value = np.array(line.split(' ')).take([1, 2, 4])
column = column.replace('column=', '').replace(',', '')
value = value.replace('value=', '')
return (key, column, value)
def extract_put_data_in_string(strScan):
patternToExtract = r"(?P<key>.*)(?: column=)(?P<column>.*)(?:\,\stimestamp=.*,\svalue=)(?P<value>.*)"
return re.findall(patternToExtract, strScan)
def generate_put_command(table, key, column, value):
return "put '{}','{}','{}','{}'".format(table.strip(), key.strip(), column.strip(), value.strip())
def data_to_put_commands(mapData):
print("Generating put commands for data")
for data in mapData:
key, column, value = data
yield generate_put_command(tableName, key, column, value)
def save_put_file(fileName, putList):
print("Saving put file")
with open(fileName, 'w') as f:
for put in putList:
f.write(put + "\n")
def get_table_name(fString):
# Find Table Name
print("Getting table name")
searchTableName = re.search(r"(scan\s\')([\w|\.]+)", fString)
if searchTableName:
return searchTableName.group(2)
print("Can't find a table name")
sys.exit(1)
options = get_arguments()
file_name = options.source_file
put_file = options.output_file
print("Reading file {}".format(file_name))
with open(file_name) as f:
fString = f.read()
tableName = get_table_name(fString)
save_put_file(put_file,
[put for put in data_to_put_commands(extract_put_data_in_string(fString))]) | python/hbase/scanToPut.py |
import sys
import re
import numpy as np
import argparse
def get_arguments():
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file", dest="source_file", help="File with Scan commands")
parser.add_argument("-o", "--output", dest="output_file", help="File generated with Put Commands")
options = parser.parse_args()
if not options.source_file:
parser.error("[-] Please specify a target ip, --help for more info.")
return options
def get_data_lines(completeStr):
print("Getting Data Lines")
for dataLine in re.finditer(r".*column.*timestamp.*value.*", completeStr):
yield dataLine.group(0)
def extract_data_in_line(line):
key, column, value = np.array(line.split(' ')).take([1, 2, 4])
column = column.replace('column=', '').replace(',', '')
value = value.replace('value=', '')
return (key, column, value)
def extract_put_data_in_string(strScan):
patternToExtract = r"(?P<key>.*)(?: column=)(?P<column>.*)(?:\,\stimestamp=.*,\svalue=)(?P<value>.*)"
return re.findall(patternToExtract, strScan)
def generate_put_command(table, key, column, value):
return "put '{}','{}','{}','{}'".format(table.strip(), key.strip(), column.strip(), value.strip())
def data_to_put_commands(mapData):
print("Generating put commands for data")
for data in mapData:
key, column, value = data
yield generate_put_command(tableName, key, column, value)
def save_put_file(fileName, putList):
print("Saving put file")
with open(fileName, 'w') as f:
for put in putList:
f.write(put + "\n")
def get_table_name(fString):
# Find Table Name
print("Getting table name")
searchTableName = re.search(r"(scan\s\')([\w|\.]+)", fString)
if searchTableName:
return searchTableName.group(2)
print("Can't find a table name")
sys.exit(1)
options = get_arguments()
file_name = options.source_file
put_file = options.output_file
print("Reading file {}".format(file_name))
with open(file_name) as f:
fString = f.read()
tableName = get_table_name(fString)
save_put_file(put_file,
[put for put in data_to_put_commands(extract_put_data_in_string(fString))]) | 0.254324 | 0.225076 |
from django.db import transaction
from django.http import JsonResponse
from django.views.generic import (
TemplateView, View, ListView, DetailView
)
from django.views.generic.edit import (
FormMixin, CreateView, UpdateView
)
from .forms import (
AttachmentForm, PostFileForm, PostFileAttachmentForm, PostForm, AttachmentInlineFormSet
)
from .models import (
Attachment, Post
)
class HomeView(TemplateView):
template_name = 'fileupload/home.html'
class FileUploadView(FormMixin, View):
form_class = AttachmentForm
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = self.get_form(form_class)
files = []
if form.is_valid():
for file in request.FILES.getlist('files'):
attachment = Attachment()
attachment.file = file
attachment.name = file.name
attachment.save(**kwargs)
files.append({
"pk": attachment.pk,
"name": file.name,
"size": file.size,
"url": attachment.file.url
})
data = {"files": files}
return JsonResponse(data)
else:
return JsonResponse({
'status': 'false',
'message': 'Bad Request'
}, status=400)
class PostListView(ListView):
model = Post
context_object_name = 'posts'
template_name = 'fileupload/post_list.html'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
template_name = 'fileupload/post_detail.html'
class PostCreateView(CreateView):
model = Post
template_name = 'fileupload/post_create.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields are not prepopulated but appended to form by AJAX.
return PostFileForm
def form_valid(self, form):
response = super().form_valid(form)
# Attachments are not related to any post yet.
attachments = Attachment.objects.filter(
pk__in=form.cleaned_data['attachments'],
post__isnull=True,
)
self.object.attachments.set(attachments)
return response
class PostCreateView2(CreateView):
model = Post
form_class = PostForm
template_name = 'fileupload/post_create2.html'
def get_context_data(self, **kwargs):
context = super(PostCreateView2, self).get_context_data(**kwargs)
if self.request.POST:
context['formset'] = AttachmentInlineFormSet(self.request.POST, self.request.FILES)
else:
context['formset'] = AttachmentInlineFormSet
return context
def form_valid(self, form):
context = self.get_context_data()
formset = context['formset']
with transaction.atomic():
self.object = form.save()
if formset.is_valid():
formset.instance = self.object
formset.save()
return super().form_valid(form)
class PostCreateView3(PostCreateView):
template_name = 'fileupload/post_create3.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields and file input are not prepopulated but appended to form by AJAX.
return PostForm
class PostUpdateView(UpdateView):
model = Post
template_name = 'fileupload/post_update.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields are not prepopulated but appended to form by AJAX.
return PostFileForm
def form_valid(self, form):
response = super().form_valid(form)
# Attachments are not related to any post yet.
attachments = Attachment.objects.filter(
pk__in=form.cleaned_data['attachments'],
post__isnull=True,
)
self.object.attachments.set(attachments)
return response
class PostUpdateView3(PostUpdateView):
template_name = 'fileupload/post_update3.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields and file input are not prepopulated but appended to form by AJAX.
return PostForm | fileupload/views.py | from django.db import transaction
from django.http import JsonResponse
from django.views.generic import (
TemplateView, View, ListView, DetailView
)
from django.views.generic.edit import (
FormMixin, CreateView, UpdateView
)
from .forms import (
AttachmentForm, PostFileForm, PostFileAttachmentForm, PostForm, AttachmentInlineFormSet
)
from .models import (
Attachment, Post
)
class HomeView(TemplateView):
template_name = 'fileupload/home.html'
class FileUploadView(FormMixin, View):
form_class = AttachmentForm
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = self.get_form(form_class)
files = []
if form.is_valid():
for file in request.FILES.getlist('files'):
attachment = Attachment()
attachment.file = file
attachment.name = file.name
attachment.save(**kwargs)
files.append({
"pk": attachment.pk,
"name": file.name,
"size": file.size,
"url": attachment.file.url
})
data = {"files": files}
return JsonResponse(data)
else:
return JsonResponse({
'status': 'false',
'message': 'Bad Request'
}, status=400)
class PostListView(ListView):
model = Post
context_object_name = 'posts'
template_name = 'fileupload/post_list.html'
class PostDetailView(DetailView):
model = Post
context_object_name = 'post'
template_name = 'fileupload/post_detail.html'
class PostCreateView(CreateView):
model = Post
template_name = 'fileupload/post_create.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields are not prepopulated but appended to form by AJAX.
return PostFileForm
def form_valid(self, form):
response = super().form_valid(form)
# Attachments are not related to any post yet.
attachments = Attachment.objects.filter(
pk__in=form.cleaned_data['attachments'],
post__isnull=True,
)
self.object.attachments.set(attachments)
return response
class PostCreateView2(CreateView):
model = Post
form_class = PostForm
template_name = 'fileupload/post_create2.html'
def get_context_data(self, **kwargs):
context = super(PostCreateView2, self).get_context_data(**kwargs)
if self.request.POST:
context['formset'] = AttachmentInlineFormSet(self.request.POST, self.request.FILES)
else:
context['formset'] = AttachmentInlineFormSet
return context
def form_valid(self, form):
context = self.get_context_data()
formset = context['formset']
with transaction.atomic():
self.object = form.save()
if formset.is_valid():
formset.instance = self.object
formset.save()
return super().form_valid(form)
class PostCreateView3(PostCreateView):
template_name = 'fileupload/post_create3.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields and file input are not prepopulated but appended to form by AJAX.
return PostForm
class PostUpdateView(UpdateView):
model = Post
template_name = 'fileupload/post_update.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields are not prepopulated but appended to form by AJAX.
return PostFileForm
def form_valid(self, form):
response = super().form_valid(form)
# Attachments are not related to any post yet.
attachments = Attachment.objects.filter(
pk__in=form.cleaned_data['attachments'],
post__isnull=True,
)
self.object.attachments.set(attachments)
return response
class PostUpdateView3(PostUpdateView):
template_name = 'fileupload/post_update3.html'
def get_form_class(self):
if self.request.method == 'POST':
# Hidden fields for attachments must be validated.
return PostFileAttachmentForm
else:
# Hidden fields and file input are not prepopulated but appended to form by AJAX.
return PostForm | 0.467818 | 0.094678 |
import sys,re
import pandas as pd
import numpy as np
import spacy
from sklearn.feature_extraction.stop_words import ENGLISH_STOP_WORDS
from string import punctuation,printable
nlp = spacy.load('en')
if not sys.version_info.major == 3:
raise Exception("Need to run with Python 3")
# Load the spacy.en module if it hasn't been loaded already
if not 'nlp' in locals():
print("Loading English Module...")
nlp = spacy.load('en')
def clean_article(doc, stop_words,punct_dict,entities=True):
"""
generalized function to lemmatize string
"""
# Remove punctuation form string
doc = doc.translate(punct_dict)
# remove unicode
clean_doc = "".join([char for char in doc if char in printable])
# Run the doc through spaCy
doc = nlp(clean_doc)
# Keep entities like 'the New York Times' from getting dropped
if entities:
for ent in doc.ents:
if ent.root.tag_ != 'DT':
ent.merge(ent.root.tag_, ent.text, ent.label_)
else:
ent.merge(ent[-1].tag_, ent.text, ent.label_)
## specify the parts of speech to keep
pos_lst = ['ADJ', 'ADV', 'NOUN', 'PROPN', 'VERB'] # NUM?
def clean_token(token):
## check that token is in parts of speech list
if token.pos_ not in pos_lst:
return None
# check that we have valid word characters in token
elif not re.search("\w",token.text):
return None
## handle how white space looks
token = re.sub(" +","_",token.lemma_.lower())
token = re.sub("\W+","",token)
return(token)
tokens = [clean_token(token) for token in doc if clean_token(token)]
# Lemmatize and lower text
return ' '.join(w for w in tokens if w not in stop_words)
if __name__=='__main__':
## define stoplist and punc
STOPLIST = set(list(ENGLISH_STOP_WORDS) + ["n't", "'s", "'m", "ca", "'", "'re",'pron'])
PUNCT_DICT = {ord(punc): None for punc in punctuation if punc not in ['_', '*']}
print("...reading articles")
df = pd.read_csv('npr_articles.csv', parse_dates=['date_published'])
print("...cleaning articles")
df['processed_text'] = df['article_text'].apply(lambda x: clean_article(x,STOPLIST, PUNCT_DICT,entities=True))
df.to_csv('npr_articles_clean.csv', index=False) | text_processing.py | import sys,re
import pandas as pd
import numpy as np
import spacy
from sklearn.feature_extraction.stop_words import ENGLISH_STOP_WORDS
from string import punctuation,printable
nlp = spacy.load('en')
if not sys.version_info.major == 3:
raise Exception("Need to run with Python 3")
# Load the spacy.en module if it hasn't been loaded already
if not 'nlp' in locals():
print("Loading English Module...")
nlp = spacy.load('en')
def clean_article(doc, stop_words,punct_dict,entities=True):
"""
generalized function to lemmatize string
"""
# Remove punctuation form string
doc = doc.translate(punct_dict)
# remove unicode
clean_doc = "".join([char for char in doc if char in printable])
# Run the doc through spaCy
doc = nlp(clean_doc)
# Keep entities like 'the New York Times' from getting dropped
if entities:
for ent in doc.ents:
if ent.root.tag_ != 'DT':
ent.merge(ent.root.tag_, ent.text, ent.label_)
else:
ent.merge(ent[-1].tag_, ent.text, ent.label_)
## specify the parts of speech to keep
pos_lst = ['ADJ', 'ADV', 'NOUN', 'PROPN', 'VERB'] # NUM?
def clean_token(token):
## check that token is in parts of speech list
if token.pos_ not in pos_lst:
return None
# check that we have valid word characters in token
elif not re.search("\w",token.text):
return None
## handle how white space looks
token = re.sub(" +","_",token.lemma_.lower())
token = re.sub("\W+","",token)
return(token)
tokens = [clean_token(token) for token in doc if clean_token(token)]
# Lemmatize and lower text
return ' '.join(w for w in tokens if w not in stop_words)
if __name__=='__main__':
## define stoplist and punc
STOPLIST = set(list(ENGLISH_STOP_WORDS) + ["n't", "'s", "'m", "ca", "'", "'re",'pron'])
PUNCT_DICT = {ord(punc): None for punc in punctuation if punc not in ['_', '*']}
print("...reading articles")
df = pd.read_csv('npr_articles.csv', parse_dates=['date_published'])
print("...cleaning articles")
df['processed_text'] = df['article_text'].apply(lambda x: clean_article(x,STOPLIST, PUNCT_DICT,entities=True))
df.to_csv('npr_articles_clean.csv', index=False) | 0.289071 | 0.156846 |
import base64
import os
import time
from arjuna.tpi.enums import ArjunaOption
from arjuna.interact.gui.auto.impl.base.element_container import ElementContainer
from .drivercaps import DriverCapabilities
from arjuna.interact.gui.auto.impl.source.parser import ElementXMLSourceParser
from arjuna.interact.gui.auto.dispatcher.selenium.driver import SeleniumDriverDispatcher
from arjuna.interact.gui.auto.impl.locator.emd import GuiElementMetaData
class GuiAutomator(ElementContainer):
def __init__(self, config, extended_config=None):
super().__init__(config)
self.__extended_config = extended_config and extended_config.config or dict()
self.__create_screenshots_dir()
self.__main_window = None
self.__in_slomo = config.get_arjuna_option_value(ArjunaOption.GUIAUTO_SLOMO_ON).as_bool()
self.__slomo_interval = config.get_arjuna_option_value(ArjunaOption.GUIAUTO_SLOMO_INTERVAL).as_int()
from .webalert_handler import WebAlertHandler
from .automator_conditions import GuiAutomatorConditions
from .viewcontext_handler import ViewContextHandler
self.__alert_handler = WebAlertHandler(self)
self.__conditions_handler = GuiAutomatorConditions(self)
self.__view_handler = ViewContextHandler(self)
self.__browser = None
self.__source_parser = None
self.__all_source_map = {}
# As of now it directly connects to Selenium Dispatcher
# Code should be introduced here which passes through DispatcherPicker
# based on choice of engine to support more libs.
self.__dispatcher = SeleniumDriverDispatcher()
self.__launch()
@property
def dispatcher(self):
return self.__dispatcher
def create_emd(self, *locators):
return GuiElementMetaData.create_emd(*locators)
def get_source_for_setu_id(self, id):
return self.__all_source_map[id]
def get_source_from_remote(self):
return self.dispatcher.get_source()
def load_source_parser(self):
raw_source = self.get_source_from_remote()
if self.__source_parser is None:
self.__source_parser = ElementXMLSourceParser(self, root_element="html")
self.__source_parser.load()
def _create_element_flat_or_nested(self, locator_meta_data):
from arjuna.interact.gui.auto.impl.element.guielement import GuiElement
return GuiElement(self, locator_meta_data)
def _create_multielement_flat_or_nested(self, locator_meta_data):
from arjuna.interact.gui.auto.impl.element.multielement import GuiMultiElement
return GuiMultiElement(self, locator_meta_data)
def create_dispatcher(self):
self._set_dispatcher(self.dispatcher_creator.create_gui_automator_dispatcher(self.config, self.setu_id))
def slomo(self):
if self.__in_slomo:
time.sleep(self.__slomo_interval)
def set_slomo(self, on, interval=None):
self.__in_slomo = on
if interval is not None:
self.__slomo_interval = interval
@property
def browser(self):
return self.__browser
@property
def main_window(self):
return self.__main_window
@property
def alert_handler(self):
return self.__alert_handler
@property
def view_handler(self):
return self.__view_handler
@property
def conditions(self):
return self.__conditions_handler
def __create_screenshots_dir(self):
sdir = self.config.get_arjuna_option_value(ArjunaOption.SCREENSHOTS_DIR).as_str()
if not os.path.isdir(sdir):
os.makedirs(sdir)
#Override
def _get_object_uri(self):
return self.__automator_uri
def __launch(self):
caps = DriverCapabilities(self.config, self.__extended_config)
self.dispatcher.launch(caps.processed_config)
from arjuna.interact.gui.auto.impl.element.window import MainWindow
self.__main_window = MainWindow(self)
from .browser import Browser
self.__browser = Browser(self)
def quit(self):
self.dispatcher.quit()
def __screenshot(self):
switch_view_context = None
if self.config.value(ArjunaOption.MOBILE_OS_NAME).lower() == "android":
view_name = self.view_handler.get_current_view_context()
if self.view_handler._does_name_represent_web_view(view_name) :
self.view_handler.switch_to_native_view()
switch_view_context = view_name
response = self.dispatcher.take_screenshot()
if switch_view_context:
self.view_handler.switch_to_view_context(switch_view_context)
return response
def take_screenshot(self):
response = self.__screenshot()
image = base64.b64decode(response["data"]["codedImage"])
path = os.path.join(self.config.value(ArjunaOption.SCREENSHOTS_DIR), "{}.png".format(str(time.time()).replace(".", "-")))
f = open(path, "wb")
f.write(image)
f.close()
def focus_on_main_window(self):
self.main_window.focus()
def get_source(self, reload=True):
if reload:
self.load_source_parser()
return self.__source_parser
def perform_action_chain(self, single_action_chain):
from arjuna.interact.gui.auto.automator.actions import SingleActionChain
action_chain = SingleActionChain(self)
action_chain.perform(single_action_chain)
def find_element_with_js(self, js):
return self.dispatcher.find_element_with_js(js)
def find_multielement_with_js(self, js):
return self.dispatcher.find_multielement_with_js(js) | arjuna/interact/gui/auto/impl/automator/__init__.py | import base64
import os
import time
from arjuna.tpi.enums import ArjunaOption
from arjuna.interact.gui.auto.impl.base.element_container import ElementContainer
from .drivercaps import DriverCapabilities
from arjuna.interact.gui.auto.impl.source.parser import ElementXMLSourceParser
from arjuna.interact.gui.auto.dispatcher.selenium.driver import SeleniumDriverDispatcher
from arjuna.interact.gui.auto.impl.locator.emd import GuiElementMetaData
class GuiAutomator(ElementContainer):
def __init__(self, config, extended_config=None):
super().__init__(config)
self.__extended_config = extended_config and extended_config.config or dict()
self.__create_screenshots_dir()
self.__main_window = None
self.__in_slomo = config.get_arjuna_option_value(ArjunaOption.GUIAUTO_SLOMO_ON).as_bool()
self.__slomo_interval = config.get_arjuna_option_value(ArjunaOption.GUIAUTO_SLOMO_INTERVAL).as_int()
from .webalert_handler import WebAlertHandler
from .automator_conditions import GuiAutomatorConditions
from .viewcontext_handler import ViewContextHandler
self.__alert_handler = WebAlertHandler(self)
self.__conditions_handler = GuiAutomatorConditions(self)
self.__view_handler = ViewContextHandler(self)
self.__browser = None
self.__source_parser = None
self.__all_source_map = {}
# As of now it directly connects to Selenium Dispatcher
# Code should be introduced here which passes through DispatcherPicker
# based on choice of engine to support more libs.
self.__dispatcher = SeleniumDriverDispatcher()
self.__launch()
@property
def dispatcher(self):
return self.__dispatcher
def create_emd(self, *locators):
return GuiElementMetaData.create_emd(*locators)
def get_source_for_setu_id(self, id):
return self.__all_source_map[id]
def get_source_from_remote(self):
return self.dispatcher.get_source()
def load_source_parser(self):
raw_source = self.get_source_from_remote()
if self.__source_parser is None:
self.__source_parser = ElementXMLSourceParser(self, root_element="html")
self.__source_parser.load()
def _create_element_flat_or_nested(self, locator_meta_data):
from arjuna.interact.gui.auto.impl.element.guielement import GuiElement
return GuiElement(self, locator_meta_data)
def _create_multielement_flat_or_nested(self, locator_meta_data):
from arjuna.interact.gui.auto.impl.element.multielement import GuiMultiElement
return GuiMultiElement(self, locator_meta_data)
def create_dispatcher(self):
self._set_dispatcher(self.dispatcher_creator.create_gui_automator_dispatcher(self.config, self.setu_id))
def slomo(self):
if self.__in_slomo:
time.sleep(self.__slomo_interval)
def set_slomo(self, on, interval=None):
self.__in_slomo = on
if interval is not None:
self.__slomo_interval = interval
@property
def browser(self):
return self.__browser
@property
def main_window(self):
return self.__main_window
@property
def alert_handler(self):
return self.__alert_handler
@property
def view_handler(self):
return self.__view_handler
@property
def conditions(self):
return self.__conditions_handler
def __create_screenshots_dir(self):
sdir = self.config.get_arjuna_option_value(ArjunaOption.SCREENSHOTS_DIR).as_str()
if not os.path.isdir(sdir):
os.makedirs(sdir)
#Override
def _get_object_uri(self):
return self.__automator_uri
def __launch(self):
caps = DriverCapabilities(self.config, self.__extended_config)
self.dispatcher.launch(caps.processed_config)
from arjuna.interact.gui.auto.impl.element.window import MainWindow
self.__main_window = MainWindow(self)
from .browser import Browser
self.__browser = Browser(self)
def quit(self):
self.dispatcher.quit()
def __screenshot(self):
switch_view_context = None
if self.config.value(ArjunaOption.MOBILE_OS_NAME).lower() == "android":
view_name = self.view_handler.get_current_view_context()
if self.view_handler._does_name_represent_web_view(view_name) :
self.view_handler.switch_to_native_view()
switch_view_context = view_name
response = self.dispatcher.take_screenshot()
if switch_view_context:
self.view_handler.switch_to_view_context(switch_view_context)
return response
def take_screenshot(self):
response = self.__screenshot()
image = base64.b64decode(response["data"]["codedImage"])
path = os.path.join(self.config.value(ArjunaOption.SCREENSHOTS_DIR), "{}.png".format(str(time.time()).replace(".", "-")))
f = open(path, "wb")
f.write(image)
f.close()
def focus_on_main_window(self):
self.main_window.focus()
def get_source(self, reload=True):
if reload:
self.load_source_parser()
return self.__source_parser
def perform_action_chain(self, single_action_chain):
from arjuna.interact.gui.auto.automator.actions import SingleActionChain
action_chain = SingleActionChain(self)
action_chain.perform(single_action_chain)
def find_element_with_js(self, js):
return self.dispatcher.find_element_with_js(js)
def find_multielement_with_js(self, js):
return self.dispatcher.find_multielement_with_js(js) | 0.516595 | 0.076339 |
import re
import time
import threading
from enum import Enum
import concurrent.futures as cf
class Engine:
def __init__(self, window):
self.window = window
self.reader = None
self.dispatcher = Dispatcher()
# thread safe
self.started = False
self.reader_in_use = False
# At most one task is running makes sure that it can exit normally
# i.e. pending tasks cannot set started to True
self.single_executor = cf.ThreadPoolExecutor(max_workers=1)
# called only after a self.stop()
def close_reader(self):
if self.reader is not None:
# make sure that the reader is not used
if self.reader_in_use:
time.sleep(1)
self.reader.close()
def set_reader(self, reader):
# firstly close the old one if any
self.close_reader()
# then set the new one
self.reader = reader
def add_writer(self, writer):
self.dispatcher.add_writer(writer)
def task(self):
self.started = True
self.dispatcher.dispatch(
"----- [start] " + self.get_current_time() + " -----\n")
self.reader_in_use = True
try:
while self.started:
data = self.reader.readline()
# some writers will write to the screen, which send events to the main thread
self.dispatcher.dispatch(data)
except BaseException as e:
print(e)
self.reader_in_use = False
self.dispatcher.dispatch(
"----- [stop] " + self.get_current_time() + " -----\n")
def start(self):
self.single_executor.submit(self.task)
def stop(self):
self.started = False
def close(self):
# signal to stop the running task
self.stop()
# append a close task
self.single_executor.submit(self.close_task)
# shutdown the executor (non-blocking, otherwise risky deadlock)
self.single_executor.shutdown(wait=False)
def close_task(self):
# close the reader
self.close_reader()
# close the dispatcher
self.dispatcher.close()
# close the window
# window destroy will destroy will sub wedgets
# so we need to make sure displayers are not used any more before destroying the window.
self.window.destroy()
def get_current_time(self):
t = time.localtime()
current_time = time.strftime("%H:%M:%S", t)
return current_time
class DataType(Enum):
ACC_L = 1
ACC_R = 2
DBG_L = 3
DBG_R = 4
OTHER = 5
class Parser():
def __init__(self, data_formats):
self.data_formats = data_formats
def parse(self, data):
for k, v in self.data_formats.items():
if re.match(k, data):
return v[0], v[1](data)
return DataType.OTHER, data
class Dispatcher:
def __init__(self):
import controller.formats as fmt
self.parser = Parser(fmt.data_formats)
# fromkeys: all values refer to the same [] !!!
self.groups = {data_type: [] for data_type in DataType}
def add_writer(self, writer):
data_type = writer.get_type()
self.groups[data_type].append(writer)
# Observer Pattern
def dispatch(self, data):
data_type, parsed_data = self.parser.parse(data)
# print(parsed_data)
group = self.groups[data_type]
for writer in group:
writer.write(parsed_data)
def cleanup(self):
for group in self.groups.values():
for writer in group:
writer.cleanup()
def close(self):
# close all writers
for group in self.groups.values():
for writer in group:
writer.close() | src/acc_monitor/controller/engine.py | import re
import time
import threading
from enum import Enum
import concurrent.futures as cf
class Engine:
def __init__(self, window):
self.window = window
self.reader = None
self.dispatcher = Dispatcher()
# thread safe
self.started = False
self.reader_in_use = False
# At most one task is running makes sure that it can exit normally
# i.e. pending tasks cannot set started to True
self.single_executor = cf.ThreadPoolExecutor(max_workers=1)
# called only after a self.stop()
def close_reader(self):
if self.reader is not None:
# make sure that the reader is not used
if self.reader_in_use:
time.sleep(1)
self.reader.close()
def set_reader(self, reader):
# firstly close the old one if any
self.close_reader()
# then set the new one
self.reader = reader
def add_writer(self, writer):
self.dispatcher.add_writer(writer)
def task(self):
self.started = True
self.dispatcher.dispatch(
"----- [start] " + self.get_current_time() + " -----\n")
self.reader_in_use = True
try:
while self.started:
data = self.reader.readline()
# some writers will write to the screen, which send events to the main thread
self.dispatcher.dispatch(data)
except BaseException as e:
print(e)
self.reader_in_use = False
self.dispatcher.dispatch(
"----- [stop] " + self.get_current_time() + " -----\n")
def start(self):
self.single_executor.submit(self.task)
def stop(self):
self.started = False
def close(self):
# signal to stop the running task
self.stop()
# append a close task
self.single_executor.submit(self.close_task)
# shutdown the executor (non-blocking, otherwise risky deadlock)
self.single_executor.shutdown(wait=False)
def close_task(self):
# close the reader
self.close_reader()
# close the dispatcher
self.dispatcher.close()
# close the window
# window destroy will destroy will sub wedgets
# so we need to make sure displayers are not used any more before destroying the window.
self.window.destroy()
def get_current_time(self):
t = time.localtime()
current_time = time.strftime("%H:%M:%S", t)
return current_time
class DataType(Enum):
ACC_L = 1
ACC_R = 2
DBG_L = 3
DBG_R = 4
OTHER = 5
class Parser():
def __init__(self, data_formats):
self.data_formats = data_formats
def parse(self, data):
for k, v in self.data_formats.items():
if re.match(k, data):
return v[0], v[1](data)
return DataType.OTHER, data
class Dispatcher:
def __init__(self):
import controller.formats as fmt
self.parser = Parser(fmt.data_formats)
# fromkeys: all values refer to the same [] !!!
self.groups = {data_type: [] for data_type in DataType}
def add_writer(self, writer):
data_type = writer.get_type()
self.groups[data_type].append(writer)
# Observer Pattern
def dispatch(self, data):
data_type, parsed_data = self.parser.parse(data)
# print(parsed_data)
group = self.groups[data_type]
for writer in group:
writer.write(parsed_data)
def cleanup(self):
for group in self.groups.values():
for writer in group:
writer.cleanup()
def close(self):
# close all writers
for group in self.groups.values():
for writer in group:
writer.close() | 0.374448 | 0.168754 |
from iot_api import bcrypt
from iot_api.user_api.enums import RoleTypes
from iot_api.user_api.models.DataCollector import *
from iot_api.user_api import Error
from iot_api import config
from sqlalchemy import Table, Column, ForeignKey, func, desc, asc, cast, case, and_, or_, distinct, \
DateTime, String, Integer, BigInteger, SmallInteger, Float, Boolean
from sqlalchemy.types import JSON
from sqlalchemy.orm import relationship
import json
from datetime import datetime
LOG = iot_logging.getLogger(__name__)
class Organization(db.Model):
__tablename__ = "organization"
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
name = db.Column(db.String(120), unique=True)
country = db.Column(db.String(120))
region = db.Column(db.String(120))
users = db.relationship("User", backref="organization", lazy=True)
def to_json(self):
return {
"name": self.name,
"country": self.country,
"region": self.region
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_name(cls, name):
return cls.query.filter_by(name=name).first()
@classmethod
def find_by_id(cls, organization_id, json):
if json:
return {"organizations": list(map(lambda organization: organization.to_json(), cls.query.filter_by(
id=organization_id).all()))}
else:
return cls.query.filter_by(id=organization_id).first()
@classmethod
def return_all(cls, json):
if json:
return {"organizations": list(map(lambda organization: organization.to_json(), cls.query.all()))}
else:
return cls.query.all()
user_to_data_collector_association_table = Table(
'user_to_data_collector',
db.Model.metadata,
Column('user_id', Integer, ForeignKey('iot_user.id')),
Column('data_collector_id', Integer, ForeignKey('data_collector.id'))
)
class User(db.Model):
__tablename__ = "iot_user"
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
username = db.Column(db.String(32), index=True, unique=True, nullable=False)
full_name = db.Column(db.String(64), nullable=False)
email = db.Column(db.String(320), unique=True, nullable=False)
phone = db.Column(db.String(30), nullable=False)
password = db.Column(db.String(120), nullable=False)
user_roles = db.relationship("UserToUserRole", back_populates="user")
organization_id = db.Column(db.BigInteger, db.ForeignKey("organization.id"), nullable=False)
active = db.Column(db.Boolean, nullable=False, default=False)
deleted = db.Column(db.Boolean, nullable=False, default=False)
blocked = db.Column(db.Boolean, nullable=False, default=False)
collectors = relationship("DataCollector", secondary=user_to_data_collector_association_table, lazy="joined")
def to_json(self):
try:
organization_name = Organization.find_by_id(self.organization_id, False).name
user_roles = list(map(lambda x: x.user_role_id, self.user_roles)) # listing user roles for active user
if not user_roles: # listing user roles for not active user created before delay
account_activation_list = AccountActivation.find_last_tokens_by_user_id(self.id)
if account_activation_list:
account_activation = account_activation_list[0]
user_roles = list(account_activation.user_roles_id)
user_roles = list(filter(lambda x: x != ',', user_roles))
user_roles = [int(x) for x in user_roles]
return {
"id": self.id,
"username": self.username,
"full_name": self.full_name,
"phone": self.phone,
"email": self.email,
"user_roles": user_roles,
"organization_id": self.organization_id,
"active": self.active,
"organization_name": organization_name
}
except Exception as e:
LOG.error(e)
def to_short_info_json(self):
return {
"id": self.id,
"username": self.username,
"full_name": self.full_name,
}
@staticmethod
def generate_hash(password):
return bcrypt.generate_password_hash(password).decode('utf - 8')
@staticmethod
def verify_hash(password, hash):
return bcrypt.check_password_hash(hash, password)
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_username(cls, username):
try:
return cls.query.filter_by(username=username.lower()).first()
except Exception as e:
LOG.error(e)
@classmethod
def find_by_id(cls, user_id):
try:
return cls.query.filter_by(id=user_id).first()
except Exception as e:
LOG.error(e)
@classmethod
def find_all_user_by_organization_id(cls, organization_id):
try:
return cls.query.filter(cls.id == UserToUserRole.user_id)\
.filter(~cls.user_roles.any(UserToUserRole.user_role_id == 9))\
.filter_by(organization_id=organization_id).all()
except Exception as e:
LOG.error(e)
@classmethod
def get_count_by_organization_id(cls, organization_id):
try:
return cls.query.filter_by(organization_id=organization_id).count()
except Exception as e:
LOG.error(e)
@classmethod
def find_by_email(cls, email):
try:
return cls.query.filter_by(email=email.lower()).all()
#filter(cls.id == UserToUserRole.user_id). -> commented due to the delay in creating the table user_to_user_role after user is activated
except Exception as e:
LOG.error(e)
@classmethod
def find(cls, organization_id=None, page=None, size=None):
try:
q1 = cls.query.filter(cls.deleted == False).filter(~cls.user_roles.any(UserToUserRole.user_role_id == 9))\
.filter(cls.id == UserToUserRole.user_id)
q2 = cls.query.filter(cls.active == False,cls.deleted == False).filter(cls.id == AccountActivation.user_id)
if organization_id is not None:
q1 = q1.filter(cls.organization_id == organization_id)
q2 = q2.filter(cls.organization_id == organization_id)
query = q1.union(q2)
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT,
max_per_page=config.MAX_PER_PAGE)
except Exception as e:
LOG.error(e)
@classmethod
def get_count_all(cls):
try:
return cls.query.count()
except Exception as e:
LOG.error(e)
class UserRole(db.Model):
__tablename__ = "user_role"
id = db.Column(db.BigInteger, primary_key=True)
role_name = db.Column(db.String(120), unique=True, nullable=False)
def to_json(self):
return {
'id': self.id,
'role_name': self.role_name
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.add(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_id(cls, id):
return cls.query.filter_by(id=id).first()
@classmethod
def find_by_role_name(cls, role_name):
return cls.query.filter_by(role_name=role_name).first()
@classmethod
def return_all(cls, json):
roles = cls.query.filter(cls.id != 9).all()
if json:
return {"user_roles": list(map(lambda user_role: user_role.to_json(), roles))}
else:
return roles
class UserToUserRole(db.Model):
__tablename__ = "user_to_user_role"
user_id = db.Column(db.BigInteger, db.ForeignKey(
"iot_user.id"), primary_key=True)
user_role_id = db.Column(db.BigInteger, db.ForeignKey(
"user_role.id"), primary_key=True)
user = db.relationship("User", back_populates="user_roles")
def to_json(self):
return {
"user_id": self.user_id,
"user_role_id": self.user_role_id,
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_all_user_by_user_role_id(cls, user_role_id):
return cls.query.filter_by(user_role_id=user_role_id).all()
@classmethod
def find_all_user_role_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id).all()
@classmethod
def find_by_user_id_and_user_role_id(cls, user_id, user_role_id):
return cls.query.filter_by(user_id=user_id, user_role_id=user_role_id).first()
class AccountActivation(db.Model):
__tablename__ = "account_activation"
id = db.Column(db.BigInteger, primary_key=True)
user_id = db.Column(db.BigInteger, db.ForeignKey(
"iot_user.id"), nullable=False)
token = db.Column(db.String(500), nullable=False)
creation_date = db.Column(db.DateTime(timezone=True), nullable=False)
active = db.Column(db.Boolean, nullable=False, default=True)
# organization_id = db.Column(Integer, nullable=True)
user_roles_id = db.Column(String(40))
def to_json(self):
return {
'id': self.id,
'user_id': self.user_id,
'token': self.token,
'creation_date': self.creation_date,
'active': self.active
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_token(cls, token):
return cls.query.filter_by(token=token, active=True).first()
@classmethod
def find_active_tokens_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id, active=True).all()
@classmethod
def find_last_tokens_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id).order_by(desc(AccountActivation.creation_date))
class AlertAssetType(Enum):
DEVICE = 'DEVICE'
GATEWAY = 'GATEWAY'
BOTH = 'BOTH'
NONE = 'NONE'
LOOK_IN_ALERT_PARAMS = 'LOOK_IN_ALERT_PARAMS'
class AlertType(db.Model):
__tablename__ = 'alert_type'
id = Column(BigInteger, primary_key=True, autoincrement=True)
code = Column(String(20), nullable=False, unique=True)
name = Column(String(120), nullable=False)
message = Column(String(4096), nullable=True)
risk = Column(String(20), nullable=False)
description = Column(String(3000), nullable=False)
parameters = Column(String(4096), nullable=True)
technical_description = Column(String(3000), nullable=True)
recommended_action = Column(String(3000), nullable=True)
quarantine_timeout = Column(Integer, nullable=True, default=0)
for_asset_type = Column(SQLEnum(AlertAssetType))
def to_json(self):
return {
'id': self.id,
'code': self.code,
'name': self.name,
'message': self.message,
'risk': self.risk,
'description': self.description,
'technicalDescription': self.technical_description,
'recommendedAction': self.recommended_action,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}')
}
@classmethod
def find_all(cls):
return cls.query.all()
@classmethod
def find_and_count_all(cls, organization_id, _from, to, resolved, risks, data_collectors, types):
try:
query = db.session.query(Alert.type.label('type'), func.count(1).label('count'))\
.join(DataCollector)\
.filter(DataCollector.organization_id == organization_id)\
.filter(Alert.show == True)
if _from:
query = query.filter(Alert.created_at >= _from)
if to:
query = query.filter(Alert.created_at <= to)
if resolved is not None:
if resolved:
query = query.filter(Alert.resolved_at != None)
else:
query = query.filter(Alert.resolved_at == None)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(Alert.data_collector_id.in_(data_collectors))
if types and len(types)>0:
query = query.filter(Alert.type.in_(types))
query = query.group_by(Alert.type)
return query.all()
except Exception as e:
LOG.error(e)
@classmethod
def find_one(cls, code):
return cls.query.filter(cls.code == code).first()
class Alert(db.Model):
__tablename__ = 'alert'
id = Column(BigInteger, primary_key=True, autoincrement=True)
type = Column(String(20), ForeignKey("alert_type.code"), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
device_session_id = Column(BigInteger, ForeignKey("device_session.id"), nullable=True)
gateway_id = Column(BigInteger, ForeignKey("gateway.id"), nullable=True)
device_auth_id = Column(BigInteger, ForeignKey("device_auth_data.id"), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
parameters = Column(String(4096), nullable=False)
resolved_at = Column(DateTime(timezone=True), nullable=True)
resolved_by_id = Column(BigInteger, ForeignKey("iot_user.id"), nullable=True)
resolution_comment = Column(String(1024), nullable=True)
show = Column(Boolean, nullable=False, default=True)
resolved_by = relationship("User", lazy="joined")
alert_type = relationship("AlertType", lazy="joined")
data_collector = relationship("DataCollector", lazy="joined")
device = relationship("Device", lazy="joined")
gateway = relationship("Gateway", lazy="joined")
def to_json(self):
parsed_user = self.resolved_by.to_short_info_json() if self.resolved_by else None
return {
'id': self.id,
'type': self.type,
'created_at': "{}".format(self.created_at) if self.created_at else None,
'device_id': self.device_id,
'data_collector_id': self.data_collector_id,
'data_collector_name': self.data_collector.name,
'device_session_id': self.device_session_id,
'gateway_id': self.gateway_id,
'device_auth_id': self.device_auth_id,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}'),
'resolved_at': None if self.resolved_at is None else "{}".format(self.resolved_at),
'resolution_comment': self.resolution_comment,
'resolved_by_id': self.resolved_by_id,
'resolved_by': parsed_user,
'asset_importance': self.get_asset_importance()
}
def to_count_json(self):
return {
'id': self.id,
'type': self.type,
'created_at': "{}".format(self.created_at)
}
def get_asset_importance(self):
if self.device:
asset_importance = self.device.importance.value
elif self.gateway:
asset_importance = self.gateway.importance.value
else:
asset_importance = None
return asset_importance
@classmethod
def find_one(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def find(cls, organization_id, since, until, types, resolved, risks, data_collectors, order_by, page, size):
try:
query = db.session.query(Alert)\
.join(DataCollector)\
.filter(DataCollector.organization_id == organization_id)\
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(AlertType.code == cls.type).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
if order_by and 'ASC' in order_by:
query = query.order_by(asc(cls.created_at))
else:
query = query.order_by(desc(cls.created_at))
if page is not None and size:
query = query.limit(size).offset(page*size)
result = query.all()
# print(f"found records: {len(result)}")
return result
except Exception as e:
LOG.error(e)
@classmethod
def find_with(cls, organization_id, since, until, types, resolved, risks, order_by, page, size, device_id=None, gateway_id=None, asset_type=None):
AlertTypeExplicit = db.aliased(AlertType)
AlertTypeImplicit = db.aliased(AlertType)
if asset_type == 'device':
alert_asset_type = AlertAssetType.DEVICE
elif asset_type == 'gateway':
alert_asset_type = AlertAssetType.GATEWAY
elif asset_type is None:
alert_asset_type = AlertAssetType.BOTH
else:
raise Error.BadRequest(f"Alert.find_with called with asset_type = {asset_type}")
query = db.session.query(Alert)\
.join(DataCollector)\
.join(AlertTypeExplicit, AlertTypeExplicit.code == Alert.type)\
.join(AlertTypeImplicit, or_(
AlertTypeImplicit.code == AlertTypeExplicit.code,
AlertTypeImplicit.code == cast(Alert.parameters, JSON)['alert_solved_type'].as_string(),
))\
.filter(DataCollector.organization_id == organization_id)\
.filter(cls.show == True)
if device_id is not None:
query = query.filter(cls.device_id == device_id)
if gateway_id is not None:
query = query.filter(cls.gateway_id == gateway_id)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(AlertTypeExplicit.risk.in_(risks))
#Get only alerts of types that correlates with the value of the param alert_asset_type
if alert_asset_type == AlertAssetType.BOTH:
valid_types = [AlertAssetType.BOTH, AlertAssetType.DEVICE, AlertAssetType.GATEWAY]
elif alert_asset_type == AlertAssetType.DEVICE or alert_asset_type == AlertAssetType.GATEWAY:
valid_types = [AlertAssetType.BOTH, alert_asset_type]
else:
valid_types = [alert_asset_type]
query = query.filter(or_(
AlertTypeExplicit.for_asset_type.in_(valid_types),
and_(
AlertTypeExplicit.for_asset_type == AlertAssetType.LOOK_IN_ALERT_PARAMS,
AlertTypeImplicit.for_asset_type.in_(valid_types)
)
))
if order_by:
order_field = order_by[0]
order_direction = order_by[1]
if 'ASC' == order_direction:
query = query.order_by(asc(getattr(cls, order_field)))
else:
query = query.order_by(desc(getattr(cls, order_field)))
else:
query = query.order_by(desc(cls.created_at)) # newest first if no order_by parameter is specified
if page and size:
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT, max_per_page=config.MAX_PER_PAGE)
return query.all()
@classmethod
def count(cls, organization_id, since, until, types, resolved, risks, data_collectors):
try:
query = db.session.query(func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(cls.type == AlertType.code).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.scalar()
except Exception as e:
LOG.error(e)
@classmethod
def count_by_date(cls, organization_id, since, until, types, resolved, risks):
try:
query = db.session.query(func.date(cls.created_at).label('date'), func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
query = query.group_by(func.date(cls.created_at)).order_by(asc(func.date(cls.created_at)))
return query.all()
except Exception as e:
LOG.error(e)
@classmethod
def count_by_hour(cls, organization_id, since, until, types, resolved, risks):
try:
query = db.session\
.query(func.date_trunc('hour', cls.created_at).label('hour'), func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
query = query\
.group_by(func.date_trunc('hour', cls.created_at))\
.order_by(asc(func.date_trunc('hour', cls.created_at)))
return query.all()
except Exception as e:
LOG.error(e)
def update(self):
db.session.commit()
@classmethod
def group_by_date_and_risk(cls, organization_id, since, until, types, resolved, data_collectors):
try:
query = db.session.query(func.date(cls.created_at).label('date'), AlertType.risk.label('risk'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
query = query.join(AlertType)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.distinct().all()
except Exception as e:
LOG.error(e)
@classmethod
def group_by_hour_and_risk(cls, organization_id, since, until, types, resolved, data_collectors):
try:
query = db.session\
.query(func.date_trunc('hour', cls.created_at).label('hour'), AlertType.risk.label('risk'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
query = query.join(AlertType)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.distinct().all()
except Exception as e:
LOG.error(e)
class AssetImportance(Enum):
LOW = 'LOW'
MEDIUM = 'MEDIUM'
HIGH = 'HIGH'
class Gateway(db.Model):
__tablename__ = 'gateway'
id = Column(BigInteger, primary_key=True, autoincrement=True)
gw_hex_id = Column(String(100), nullable=True)
name = Column(String, nullable=True)
vendor = Column(String, nullable=True)
location_latitude = Column(Float, nullable=True)
location_longitude = Column(Float, nullable=True)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, db.ForeignKey("organization.id"), nullable=False)
connected = Column(Boolean, nullable=False, default=True)
first_activity = Column(DateTime(timezone=True), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=False)
activity_freq = Column(Float, nullable=True)
npackets_up = Column(BigInteger, nullable=False, default=0)
npackets_down = Column(BigInteger, nullable=False, default=0)
importance = Column(SQLEnum(AssetImportance))
def to_json(self):
return {
'id': self.id,
'gw_hex_id': self.gw_hex_id,
'name': self.name,
'vendor': self.vendor,
'location': {
'latitude': self.location_latitude,
'longitude': self.location_longitude
},
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'connected': self.connected,
'last_activity': "{}".format(self.last_activity),
'activity_freq': self.activity_freq,
'importance': self.importance.value,
'npackets_up': self.npackets_up,
'npackets_down': self.npackets_down
}
class Device(db.Model):
__tablename__ = 'device'
id = Column(BigInteger, primary_key=True, autoincrement=True)
dev_eui = Column(String(16), nullable=False)
name = Column(String, nullable=True)
vendor = Column(String, nullable=True)
app_name = Column(String, nullable=True)
join_eui = Column(String(16), nullable=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
importance = Column(SQLEnum(AssetImportance))
repeated_dev_nonce = Column(Boolean, nullable=True)
join_request_counter = Column(Integer, nullable=False, default=0)
join_accept_counter = Column(Integer, nullable=False, default=0)
has_joined = Column(Boolean, nullable=True, default=False)
join_inferred = Column(Boolean, nullable=True, default=False)
is_otaa = Column(Boolean, nullable=True)
last_packet_id = Column(BigInteger, ForeignKey("packet.id"), nullable=True)
first_up_timestamp = Column(db.DateTime(timezone=True), nullable=True)
last_up_timestamp = Column(DateTime(timezone=True), nullable=True)
pending_first_connection = Column(Boolean, nullable=False, default=True)
connected = Column(Boolean, nullable=False, default=True)
first_activity = Column(DateTime(timezone=True), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=True)
activity_freq = Column(Float, nullable=True)
activity_freq_variance = Column(Float, nullable=False, default=0)
npackets_up = Column(BigInteger, nullable=False, default=0)
npackets_down = Column(BigInteger, nullable=False, default=0)
npackets_lost = Column(Float, nullable=False, default=0)
max_rssi = Column(Float, nullable=True)
max_lsnr = Column(Float, nullable=True)
ngateways_connected_to = Column(BigInteger, nullable=False, default=0)
payload_size = Column(BigInteger, nullable=True)
last_packets_list = Column(String(4096), nullable=True, default='[]')
def to_json(self):
return {
'id': self.id,
'dev_eui': self.dev_eui,
'name': self.name,
'vendor': self.vendor,
'app_name': self.app_name,
'join_eui': self.join_eui,
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'first_up_timestamp': "{}".format(self.first_up_timestamp),
'last_up_timestamp': "{}".format(self.last_up_timestamp),
'repeated_dev_nonce': self.repeated_dev_nonce,
'join_request_counter': self.join_request_counter,
'join_accept_counter': self.join_request_counter,
'has_joined': self.has_joined,
'join_inferred': self.join_inferred,
'is_otaa': self.is_otaa,
'last_packet_id': self.last_packet_id,
'connected': self.connected,
'last_activity': "{}".format(self.last_activity),
'activity_freq': self.activity_freq,
'activity_freq_variance': self.activity_freq_variance,
'importance': self.importance.value,
'npackets_up': self.npackets_up,
'npackets_down': self.npackets_down,
'npackets_lost': self.npackets_lost,
'max_rssi': self.max_rssi,
'pending_first_connection': self.pending_first_connection
}
@classmethod
def find(cls, organization_id, since, until, page, size):
query = cls.query.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.order_by(desc(cls.last_up_timestamp))
if page is not None and size:
query = query.limit(size).offset(page*size)
return query.all()
@classmethod
def count_by_date(cls, organization_id, since, until):
query = db.session.query(func.date(cls.last_up_timestamp).label('date'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.group_by(func.date(cls.last_up_timestamp))
return query.all()
@classmethod
def count_by_hour(cls, organization_id, since, until):
query = db.session\
.query(func.date_trunc('hour', cls.last_up_timestamp).label('hour'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.group_by(func.date_trunc('hour', cls.last_up_timestamp))
return query.all()
class GatewayToDevice(db.Model):
__tablename__ = 'gateway_to_device'
gateway_id = Column(BigInteger, db.ForeignKey("gateway.id"), nullable=False, primary_key=True)
device_id = Column(BigInteger, db.ForeignKey("device.id"), nullable=False, primary_key=True)
class DeviceSession(db.Model):
__tablename__ = 'device_session'
id = Column(BigInteger, primary_key=True, autoincrement=True)
may_be_abp = Column(Boolean, nullable=True)
reset_counter = Column(Integer, nullable=False, default=0)
is_confirmed = Column(Boolean, nullable=True)
dev_addr = Column(String(8), nullable=False)
up_link_counter = Column(Integer, nullable=False, default=-1)
down_link_counter = Column(Integer, nullable=False, default=-1)
max_down_counter = Column(Integer, nullable=False, default=-1)
max_up_counter = Column(Integer, nullable=False, default=-1)
total_down_link_packets = Column(BigInteger, nullable=False, default=0)
total_up_link_packets = Column(BigInteger, nullable=False, default=0)
first_down_timestamp = Column(DateTime(timezone=True), nullable=True)
first_up_timestamp = Column(DateTime(timezone=True), nullable=True)
last_down_timestamp = Column(DateTime(timezone=True), nullable=True)
last_up_timestamp = Column(DateTime(timezone=True), nullable=True)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
device_auth_data_id = Column(BigInteger, ForeignKey("device_auth_data.id"), nullable=True)
last_packet_id = Column(BigInteger, ForeignKey("packet.id"), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=False)
connected = Column(Boolean, nullable=False, default=True)
class Packet(db.Model):
__tablename__ = 'packet'
id = Column(BigInteger, primary_key=True, autoincrement=True)
date = Column(DateTime(timezone=True), nullable=False)
topic = Column(String(256), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
gateway = Column(String(16), nullable=True)
tmst = Column(BigInteger, nullable=True)
chan = Column(SmallInteger, nullable=True)
rfch = Column(Integer, nullable=True)
seqn = Column(Integer, nullable=True)
opts = Column(String(20), nullable=True)
port = Column(Integer, nullable=True)
freq = Column(Float, nullable=True)
stat = Column(SmallInteger, nullable=True)
modu = Column(String(4), nullable=True)
datr = Column(String(50), nullable=True)
codr = Column(String(10), nullable=True)
lsnr = Column(Float, nullable=True)
rssi = Column(Integer, nullable=True)
size = Column(Integer, nullable=True)
data = Column(String(300), nullable=True)
m_type = Column(String(20), nullable=True)
major = Column(String(10), nullable=True)
mic = Column(String(8), nullable=True)
join_eui = Column(String(16), nullable=True)
dev_eui = Column(String(16), nullable=True)
dev_nonce = Column(Integer, nullable=True)
dev_addr = Column(String(8), nullable=True)
adr = Column(Boolean, nullable=True)
ack = Column(Boolean, nullable=True)
adr_ack_req = Column(Boolean, nullable=True)
f_pending = Column(Boolean, nullable=True)
class_b = Column(Boolean, nullable=True)
f_count = Column(Integer, nullable=True)
f_opts = Column(String(500), nullable=True)
f_port = Column(Integer, nullable=True)
error = Column(String(300), nullable=True)
latitude = Column(Float, nullable=True)
longitude = Column(Float, nullable=True)
altitude = Column(Float, nullable=True)
app_name = Column(String(100), nullable=True)
dev_name = Column(String(100), nullable=True)
def to_json(self):
return {
'id': self.id,
'date': self.date.strftime(config.DATE_FORMAT),
'topic': self.topic,
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'gateway': self.gateway,
'tmst': self.tmst,
'chan': self.chan,
'rfch': self.rfch,
'seqn': self.seqn,
'opts': self.opts,
'port': self.port,
'freq': self.freq,
'stat': self.stat,
'modu': self.modu,
'datr': self.datr,
'codr': self.codr,
'lsnr': self.lsnr,
'rssi': self.rssi,
'size': self.size,
'data': self.data,
'm_type': self.m_type,
'major': self.major,
'mic': self.mic,
'join_eui': self.join_eui,
'dev_eui': self.dev_eui,
'dev_nonce': self.dev_nonce,
'dev_addr': self.dev_addr,
'adr': self.adr,
'ack': self.ack,
'adr_ack_req': self.adr_ack_req,
'f_pending': self.f_pending,
'class_b': self.class_b,
'f_count': self.f_count,
'f_opts': self.f_opts,
'f_port': self.f_port,
'error': self.error
}
@classmethod
def find(cls, organization_id, mtype, since, until, page, size):
query = cls.query.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.order_by(desc(cls.date))
if page is not None and size:
query = query.limit(size).offset(page*size)
return query.all()
@classmethod
def count_by_date(cls, organization_id, mtype, since, until):
query = db.session.query(func.date(cls.date).label('date'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.group_by(func.date(cls.date))
return query.all()
@classmethod
def count_by_hour(cls, organization_id, mtype, since, until):
query = db.session.query(func.date_trunc('hour', cls.date).label('hour'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.group_by(func.date_trunc('hour', cls.date))
return query.all()
@classmethod
def find_max_by_organization_id(cls, organization_id, min_date):
query = db.session.query(cls.data_collector_id.label('data_collector_id'), func.max(cls.date).label('date'))
query = query.filter(cls.date > min_date, cls.organization_id == organization_id)
query = query.group_by(cls.data_collector_id)
return query.all()
class DeviceAuthData(db.Model):
__tablename__ = 'device_auth_data'
id = Column(BigInteger, primary_key=True, autoincrement=True)
join_request = Column(String(200), nullable=True)
join_accept = Column(String(200), nullable=True)
apps_key = Column(String(32), nullable=True)
nwks_key = Column(String(32), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
app_key_id = Column(BigInteger, ForeignKey("app_key.id"), nullable=False)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
device_session_id = Column(BigInteger, ForeignKey("device_session.id"), nullable=True)
class QuarantineResolutionReasonType(Enum):
MANUAL = 'MANUAL'
AUTOMATIC = 'AUTOMATIC'
class QuarantineResolutionReason(db.Model):
__tablename__ = "quarantine_resolution_reason"
# region fields
id = Column(BigInteger, primary_key=True, autoincrement=True)
type = Column(SQLEnum(QuarantineResolutionReasonType))
name = Column(String(80), nullable=False)
description = Column(String(200), nullable=True)
# endregion
@classmethod
def find_by_id(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def find_by_type(cls, type):
return cls.query.filter(cls.type == type).first()
class Quarantine(db.Model):
__tablename__ = "quarantine"
#region fields
id = Column(BigInteger, primary_key=True, autoincrement=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
# alert relationship
alert_id = Column(BigInteger, ForeignKey("alert.id"), nullable=False)
# since when is this device/alert in quarantine
since = Column(DateTime(timezone=True), nullable=False)
# last time the condition for quarantine was checked
last_checked = Column(DateTime(timezone=True), nullable=True)
# when was resolved, if applicable
resolved_at = Column(DateTime(timezone=True), nullable=True)
# who resolved the quarantine, if applicable
resolved_by_id = Column(BigInteger, ForeignKey("iot_user.id"), nullable=True)
# resolution reason relationship, if resolved. Null if not
resolution_reason_id = Column(BigInteger, ForeignKey("quarantine_resolution_reason.id"), nullable=True)
# resolution comment (optional)
resolution_comment = Column(String(1024), nullable=True)
# quarantine parameters (optional)
parameters = Column(String(4096), nullable=True)
# device relationship
device_id = Column(BigInteger, ForeignKey("device.id"))
alert = relationship("Alert", lazy="joined")
#endregion
def to_list_json(self):
data_collector = DataCollector.find_by_id(self.alert.data_collector_id)
return {
'id': self.id,
'organization_id': self.organization_id,
'alert': self.alert.to_json(),
'alert_type': self.alert.alert_type.to_json(),
'device_id': self.device_id,
'data_collector_id': data_collector.id,
'data_collector_name': data_collector.name,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}'),
'since': f'{self.since}' if self.since else None,
'last_checked': f'{self.last_checked}' if self.last_checked else None,
'resolved_at': f'{self.resolved_at}' if self.resolved_at else None,
'resolved_by_id': self.resolved_by_id,
'resolution_reason_id': self.resolution_reason_id,
'resolution_comment': self.resolution_comment
}
def db_insert(self):
db.session.add(self)
db.session.commit()
def db_update(cls):
db.session.commit()
def db_delete(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_by_id(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def get_list_query(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.query.filter(cls.organization_id == organization_id).filter(cls.resolved_at.is_(None))\
.filter(DataCollector.deleted_at.is_(None)).join(Alert).join(AlertType).join(DataCollector)
if since:
query = query.filter(cls.since >= since)
if until:
query = query.filter(cls.since <= until)
if alert_types and len(alert_types) > 0:
query = query.filter(AlertType.id.in_(alert_types))
if risks and len(risks) > 0:
query = query.filter(AlertType.risk.in_(risks))
if devices and len(devices) > 0:
query = query.filter(cls.device_id.in_(devices))
if data_collectors and len(data_collectors) > 0:
query = query.filter(Alert.data_collector_id.in_(data_collectors))
return query
@classmethod
def find(cls, organization_id, since, until, alert_types, devices, risks, data_collectors, order_by, page, size, gateway_id=None, asset_type=None):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
if gateway_id is not None:
query = query.filter(Alert.gateway_id == gateway_id)
#Get only issues of alerts whose type correlates with the value of the param alert_asset_type
if asset_type is not None:
if asset_type == 'device':
alert_asset_type = AlertAssetType.DEVICE
elif asset_type == 'gateway':
alert_asset_type = AlertAssetType.GATEWAY
else:
raise Error.BadRequest(f"Quarantine.find called with asset_type = {asset_type}")
if alert_asset_type == AlertAssetType.BOTH:
valid_types = [AlertAssetType.BOTH, AlertAssetType.DEVICE, AlertAssetType.GATEWAY]
elif alert_asset_type == AlertAssetType.DEVICE or alert_asset_type == AlertAssetType.GATEWAY:
valid_types = [AlertAssetType.BOTH, alert_asset_type]
else:
valid_types = [alert_asset_type]
AlertTypeImplicit = db.aliased(AlertType)
query = query.join(AlertTypeImplicit, or_(
AlertTypeImplicit.code == AlertType.code,
AlertTypeImplicit.code == cast(Alert.parameters, JSON)['alert_solved_type'].as_string(),
))\
.filter(or_(
AlertType.for_asset_type.in_(valid_types),
and_(
AlertType.for_asset_type == AlertAssetType.LOOK_IN_ALERT_PARAMS,
AlertTypeImplicit.for_asset_type.in_(valid_types)
)
))
if order_by:
order_field = order_by[0]
order_direction = order_by[1]
if 'ASC' == order_direction:
query = query.order_by(asc(getattr(cls, order_field)))
else:
query = query.order_by(desc(getattr(cls, order_field)))
else:
query = query.order_by(desc(cls.since), Alert.device_id, Alert.data_collector_id)
if page and size:
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT, max_per_page=config.MAX_PER_PAGE)
return query.all()
@classmethod
def count(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
# this generates a select count(*) from xxx where yyy instead of select count(*) from (select yyy from xxx)
# see https://gist.github.com/hest/8798884
count_q = query.statement.with_only_columns([func.count(func.distinct(Quarantine.id))])
return query.session.execute(count_q).scalar()
@classmethod
def count_by_data_collector(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), Alert.data_collector_id.label('data_collector_id'), DataCollector.name.label('data_collector_name'))
return count_query.group_by('data_collector_id','data_collector_name').all()
@classmethod
def count_by_risk(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), AlertType.risk.label('alert_type_risk'))
return count_query.group_by('alert_type_risk').all()
@classmethod
def count_by_alert_type(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), AlertType.id.label('alert_type_id'), AlertType.name.label('alert_type_name'))
return count_query.group_by('alert_type_id','alert_type_name').all()
@classmethod
def count_devices(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_q = query.statement.with_only_columns([func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id)))])
return query.session.execute(count_q).scalar()
@classmethod
def count_devices_by_hour(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id))).label('device_count'),func.date_trunc('hour', cls.since).label('hour'))
return count_query.group_by('hour').all()
@classmethod
def count_devices_by_date(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id))).label('device_count'),func.date(cls.since).label('date'))
return count_query.group_by('date').all()
@classmethod
def remove_from_quarantine_by_alert(cls, alert, res_reason_id, res_comment):
cls.remove_from_quarantine(alert.alert_type.id, alert.device_id, alert.device_session_id, alert.data_collector_id, res_reason_id, res_comment)
@classmethod
def remove_from_quarantine_manually(cls, id, user_id, res_comment):
qRec = cls.find_by_id(id)
if not qRec:
raise RuntimeError(f'Quarantine record with id {id} not found')
if qRec.resolved_at is not None:
raise RuntimeError(f'Quarantine is already resolved')
reason = QuarantineResolutionReason.find_by_type(QuarantineResolutionReasonType.MANUAL)
if not reason:
raise RuntimeError(f'Manual quarantine resolution type not found')
qRec.resolved_at = datetime.datetime.now()
qRec.resolved_by_id = user_id
qRec.resolution_reason_id = reason.id
qRec.resolution_comment = res_comment
qRec.db_update()
@classmethod
def remove_from_quarantine(cls, alert_type_id, device_id, device_session_id, data_collector_id, res_reason_id, res_comment):
qrec = cls.find_open_by_type_dev_coll(alert_type_id, device_id, device_session_id, data_collector_id)
if qrec:
qrec.resolved_at = datetime.datetime.now()
qrec.resolution_reason_id = res_reason_id
qrec.resolution_comment = res_comment
qrec.db_update()
# Gets the list of collector ids that a user can see - the ones that are assigned to that user.
# Takes a 'collectors_filter_id_strings' param that when is present acts as a filter on top of the visible collectors.
# Raises a ValueError when a collector id in the filtering param is not visible by the user.
def get_user_collector_ids(user, collectors_filter_id_strings=None):
if config.ASSIGN_COLLECTOR_TO_USER_ENABLED:
admin_user = is_admin_user(user.id)
else:
# If the feature is not enabled then behave as if the user was an admin - all collectors are visible.
admin_user = True
if collectors_filter_id_strings and len(collectors_filter_id_strings) > 0:
# Filtering is on.
collectors_from_filter = list(map(lambda user_id: int(user_id), collectors_filter_id_strings))
if not admin_user:
# Non-admin user -> check that the user has access to all the collectors in the filter.
collectors_for_user = list(map(lambda c: c.id, user.collectors))
all_requested_collectors_accessible = set(collectors_from_filter).issubset(collectors_for_user)
if not all_requested_collectors_accessible:
raise ValueError('The user does not have access to all the data collectors in the filter.')
collectors = collectors_from_filter
else:
# No filtering.
if admin_user:
# Admin user -> list data from all the collectors in the organization.
collectors = []
else:
# Non-admin user -> list data from the collectors that are assigned to the user.
if user.collectors and len(user.collectors) > 0:
collectors = list(map(lambda u: u.id, user.collectors))
else:
collectors = [-1] # The user does not have access to any collector.
return collectors
# verify if specified username belongs to user with role 'User_Admin'
def is_admin_user(user_id):
role_id = UserRole.find_by_role_name(RoleTypes.User_Admin.value).id
if not role_id:
return None
if UserToUserRole.find_by_user_id_and_user_role_id(user_id, role_id):
return True
else:
return False
# verify if specified username is system
def is_system_user(user_id):
role_id = UserRole.find_by_role_name(RoleTypes.System.value).id
if not role_id:
return None
if UserToUserRole.find_by_user_id_and_user_role_id(user_id, role_id):
return True
else:
return False | iot_api/user_api/model.py | from iot_api import bcrypt
from iot_api.user_api.enums import RoleTypes
from iot_api.user_api.models.DataCollector import *
from iot_api.user_api import Error
from iot_api import config
from sqlalchemy import Table, Column, ForeignKey, func, desc, asc, cast, case, and_, or_, distinct, \
DateTime, String, Integer, BigInteger, SmallInteger, Float, Boolean
from sqlalchemy.types import JSON
from sqlalchemy.orm import relationship
import json
from datetime import datetime
LOG = iot_logging.getLogger(__name__)
class Organization(db.Model):
__tablename__ = "organization"
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
name = db.Column(db.String(120), unique=True)
country = db.Column(db.String(120))
region = db.Column(db.String(120))
users = db.relationship("User", backref="organization", lazy=True)
def to_json(self):
return {
"name": self.name,
"country": self.country,
"region": self.region
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_name(cls, name):
return cls.query.filter_by(name=name).first()
@classmethod
def find_by_id(cls, organization_id, json):
if json:
return {"organizations": list(map(lambda organization: organization.to_json(), cls.query.filter_by(
id=organization_id).all()))}
else:
return cls.query.filter_by(id=organization_id).first()
@classmethod
def return_all(cls, json):
if json:
return {"organizations": list(map(lambda organization: organization.to_json(), cls.query.all()))}
else:
return cls.query.all()
user_to_data_collector_association_table = Table(
'user_to_data_collector',
db.Model.metadata,
Column('user_id', Integer, ForeignKey('iot_user.id')),
Column('data_collector_id', Integer, ForeignKey('data_collector.id'))
)
class User(db.Model):
__tablename__ = "iot_user"
id = db.Column(db.BigInteger, primary_key=True, autoincrement=True)
username = db.Column(db.String(32), index=True, unique=True, nullable=False)
full_name = db.Column(db.String(64), nullable=False)
email = db.Column(db.String(320), unique=True, nullable=False)
phone = db.Column(db.String(30), nullable=False)
password = db.Column(db.String(120), nullable=False)
user_roles = db.relationship("UserToUserRole", back_populates="user")
organization_id = db.Column(db.BigInteger, db.ForeignKey("organization.id"), nullable=False)
active = db.Column(db.Boolean, nullable=False, default=False)
deleted = db.Column(db.Boolean, nullable=False, default=False)
blocked = db.Column(db.Boolean, nullable=False, default=False)
collectors = relationship("DataCollector", secondary=user_to_data_collector_association_table, lazy="joined")
def to_json(self):
try:
organization_name = Organization.find_by_id(self.organization_id, False).name
user_roles = list(map(lambda x: x.user_role_id, self.user_roles)) # listing user roles for active user
if not user_roles: # listing user roles for not active user created before delay
account_activation_list = AccountActivation.find_last_tokens_by_user_id(self.id)
if account_activation_list:
account_activation = account_activation_list[0]
user_roles = list(account_activation.user_roles_id)
user_roles = list(filter(lambda x: x != ',', user_roles))
user_roles = [int(x) for x in user_roles]
return {
"id": self.id,
"username": self.username,
"full_name": self.full_name,
"phone": self.phone,
"email": self.email,
"user_roles": user_roles,
"organization_id": self.organization_id,
"active": self.active,
"organization_name": organization_name
}
except Exception as e:
LOG.error(e)
def to_short_info_json(self):
return {
"id": self.id,
"username": self.username,
"full_name": self.full_name,
}
@staticmethod
def generate_hash(password):
return bcrypt.generate_password_hash(password).decode('utf - 8')
@staticmethod
def verify_hash(password, hash):
return bcrypt.check_password_hash(hash, password)
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_username(cls, username):
try:
return cls.query.filter_by(username=username.lower()).first()
except Exception as e:
LOG.error(e)
@classmethod
def find_by_id(cls, user_id):
try:
return cls.query.filter_by(id=user_id).first()
except Exception as e:
LOG.error(e)
@classmethod
def find_all_user_by_organization_id(cls, organization_id):
try:
return cls.query.filter(cls.id == UserToUserRole.user_id)\
.filter(~cls.user_roles.any(UserToUserRole.user_role_id == 9))\
.filter_by(organization_id=organization_id).all()
except Exception as e:
LOG.error(e)
@classmethod
def get_count_by_organization_id(cls, organization_id):
try:
return cls.query.filter_by(organization_id=organization_id).count()
except Exception as e:
LOG.error(e)
@classmethod
def find_by_email(cls, email):
try:
return cls.query.filter_by(email=email.lower()).all()
#filter(cls.id == UserToUserRole.user_id). -> commented due to the delay in creating the table user_to_user_role after user is activated
except Exception as e:
LOG.error(e)
@classmethod
def find(cls, organization_id=None, page=None, size=None):
try:
q1 = cls.query.filter(cls.deleted == False).filter(~cls.user_roles.any(UserToUserRole.user_role_id == 9))\
.filter(cls.id == UserToUserRole.user_id)
q2 = cls.query.filter(cls.active == False,cls.deleted == False).filter(cls.id == AccountActivation.user_id)
if organization_id is not None:
q1 = q1.filter(cls.organization_id == organization_id)
q2 = q2.filter(cls.organization_id == organization_id)
query = q1.union(q2)
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT,
max_per_page=config.MAX_PER_PAGE)
except Exception as e:
LOG.error(e)
@classmethod
def get_count_all(cls):
try:
return cls.query.count()
except Exception as e:
LOG.error(e)
class UserRole(db.Model):
__tablename__ = "user_role"
id = db.Column(db.BigInteger, primary_key=True)
role_name = db.Column(db.String(120), unique=True, nullable=False)
def to_json(self):
return {
'id': self.id,
'role_name': self.role_name
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.add(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_id(cls, id):
return cls.query.filter_by(id=id).first()
@classmethod
def find_by_role_name(cls, role_name):
return cls.query.filter_by(role_name=role_name).first()
@classmethod
def return_all(cls, json):
roles = cls.query.filter(cls.id != 9).all()
if json:
return {"user_roles": list(map(lambda user_role: user_role.to_json(), roles))}
else:
return roles
class UserToUserRole(db.Model):
__tablename__ = "user_to_user_role"
user_id = db.Column(db.BigInteger, db.ForeignKey(
"iot_user.id"), primary_key=True)
user_role_id = db.Column(db.BigInteger, db.ForeignKey(
"user_role.id"), primary_key=True)
user = db.relationship("User", back_populates="user_roles")
def to_json(self):
return {
"user_id": self.user_id,
"user_role_id": self.user_role_id,
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.add(self)
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_all_user_by_user_role_id(cls, user_role_id):
return cls.query.filter_by(user_role_id=user_role_id).all()
@classmethod
def find_all_user_role_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id).all()
@classmethod
def find_by_user_id_and_user_role_id(cls, user_id, user_role_id):
return cls.query.filter_by(user_id=user_id, user_role_id=user_role_id).first()
class AccountActivation(db.Model):
__tablename__ = "account_activation"
id = db.Column(db.BigInteger, primary_key=True)
user_id = db.Column(db.BigInteger, db.ForeignKey(
"iot_user.id"), nullable=False)
token = db.Column(db.String(500), nullable=False)
creation_date = db.Column(db.DateTime(timezone=True), nullable=False)
active = db.Column(db.Boolean, nullable=False, default=True)
# organization_id = db.Column(Integer, nullable=True)
user_roles_id = db.Column(String(40))
def to_json(self):
return {
'id': self.id,
'user_id': self.user_id,
'token': self.token,
'creation_date': self.creation_date,
'active': self.active
}
def save_to_db(self):
db.session.add(self)
db.session.commit()
def update_to_db(self):
db.session.commit()
def delete_from_db(self):
db.session.delete(self)
db.session.commit()
def rollback(self):
db.session.rollback()
@classmethod
def find_by_token(cls, token):
return cls.query.filter_by(token=token, active=True).first()
@classmethod
def find_active_tokens_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id, active=True).all()
@classmethod
def find_last_tokens_by_user_id(cls, user_id):
return cls.query.filter_by(user_id=user_id).order_by(desc(AccountActivation.creation_date))
class AlertAssetType(Enum):
DEVICE = 'DEVICE'
GATEWAY = 'GATEWAY'
BOTH = 'BOTH'
NONE = 'NONE'
LOOK_IN_ALERT_PARAMS = 'LOOK_IN_ALERT_PARAMS'
class AlertType(db.Model):
__tablename__ = 'alert_type'
id = Column(BigInteger, primary_key=True, autoincrement=True)
code = Column(String(20), nullable=False, unique=True)
name = Column(String(120), nullable=False)
message = Column(String(4096), nullable=True)
risk = Column(String(20), nullable=False)
description = Column(String(3000), nullable=False)
parameters = Column(String(4096), nullable=True)
technical_description = Column(String(3000), nullable=True)
recommended_action = Column(String(3000), nullable=True)
quarantine_timeout = Column(Integer, nullable=True, default=0)
for_asset_type = Column(SQLEnum(AlertAssetType))
def to_json(self):
return {
'id': self.id,
'code': self.code,
'name': self.name,
'message': self.message,
'risk': self.risk,
'description': self.description,
'technicalDescription': self.technical_description,
'recommendedAction': self.recommended_action,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}')
}
@classmethod
def find_all(cls):
return cls.query.all()
@classmethod
def find_and_count_all(cls, organization_id, _from, to, resolved, risks, data_collectors, types):
try:
query = db.session.query(Alert.type.label('type'), func.count(1).label('count'))\
.join(DataCollector)\
.filter(DataCollector.organization_id == organization_id)\
.filter(Alert.show == True)
if _from:
query = query.filter(Alert.created_at >= _from)
if to:
query = query.filter(Alert.created_at <= to)
if resolved is not None:
if resolved:
query = query.filter(Alert.resolved_at != None)
else:
query = query.filter(Alert.resolved_at == None)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(Alert.data_collector_id.in_(data_collectors))
if types and len(types)>0:
query = query.filter(Alert.type.in_(types))
query = query.group_by(Alert.type)
return query.all()
except Exception as e:
LOG.error(e)
@classmethod
def find_one(cls, code):
return cls.query.filter(cls.code == code).first()
class Alert(db.Model):
__tablename__ = 'alert'
id = Column(BigInteger, primary_key=True, autoincrement=True)
type = Column(String(20), ForeignKey("alert_type.code"), nullable=False)
created_at = Column(DateTime(timezone=True), nullable=False)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
device_session_id = Column(BigInteger, ForeignKey("device_session.id"), nullable=True)
gateway_id = Column(BigInteger, ForeignKey("gateway.id"), nullable=True)
device_auth_id = Column(BigInteger, ForeignKey("device_auth_data.id"), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
parameters = Column(String(4096), nullable=False)
resolved_at = Column(DateTime(timezone=True), nullable=True)
resolved_by_id = Column(BigInteger, ForeignKey("iot_user.id"), nullable=True)
resolution_comment = Column(String(1024), nullable=True)
show = Column(Boolean, nullable=False, default=True)
resolved_by = relationship("User", lazy="joined")
alert_type = relationship("AlertType", lazy="joined")
data_collector = relationship("DataCollector", lazy="joined")
device = relationship("Device", lazy="joined")
gateway = relationship("Gateway", lazy="joined")
def to_json(self):
parsed_user = self.resolved_by.to_short_info_json() if self.resolved_by else None
return {
'id': self.id,
'type': self.type,
'created_at': "{}".format(self.created_at) if self.created_at else None,
'device_id': self.device_id,
'data_collector_id': self.data_collector_id,
'data_collector_name': self.data_collector.name,
'device_session_id': self.device_session_id,
'gateway_id': self.gateway_id,
'device_auth_id': self.device_auth_id,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}'),
'resolved_at': None if self.resolved_at is None else "{}".format(self.resolved_at),
'resolution_comment': self.resolution_comment,
'resolved_by_id': self.resolved_by_id,
'resolved_by': parsed_user,
'asset_importance': self.get_asset_importance()
}
def to_count_json(self):
return {
'id': self.id,
'type': self.type,
'created_at': "{}".format(self.created_at)
}
def get_asset_importance(self):
if self.device:
asset_importance = self.device.importance.value
elif self.gateway:
asset_importance = self.gateway.importance.value
else:
asset_importance = None
return asset_importance
@classmethod
def find_one(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def find(cls, organization_id, since, until, types, resolved, risks, data_collectors, order_by, page, size):
try:
query = db.session.query(Alert)\
.join(DataCollector)\
.filter(DataCollector.organization_id == organization_id)\
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(AlertType.code == cls.type).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
if order_by and 'ASC' in order_by:
query = query.order_by(asc(cls.created_at))
else:
query = query.order_by(desc(cls.created_at))
if page is not None and size:
query = query.limit(size).offset(page*size)
result = query.all()
# print(f"found records: {len(result)}")
return result
except Exception as e:
LOG.error(e)
@classmethod
def find_with(cls, organization_id, since, until, types, resolved, risks, order_by, page, size, device_id=None, gateway_id=None, asset_type=None):
AlertTypeExplicit = db.aliased(AlertType)
AlertTypeImplicit = db.aliased(AlertType)
if asset_type == 'device':
alert_asset_type = AlertAssetType.DEVICE
elif asset_type == 'gateway':
alert_asset_type = AlertAssetType.GATEWAY
elif asset_type is None:
alert_asset_type = AlertAssetType.BOTH
else:
raise Error.BadRequest(f"Alert.find_with called with asset_type = {asset_type}")
query = db.session.query(Alert)\
.join(DataCollector)\
.join(AlertTypeExplicit, AlertTypeExplicit.code == Alert.type)\
.join(AlertTypeImplicit, or_(
AlertTypeImplicit.code == AlertTypeExplicit.code,
AlertTypeImplicit.code == cast(Alert.parameters, JSON)['alert_solved_type'].as_string(),
))\
.filter(DataCollector.organization_id == organization_id)\
.filter(cls.show == True)
if device_id is not None:
query = query.filter(cls.device_id == device_id)
if gateway_id is not None:
query = query.filter(cls.gateway_id == gateway_id)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(AlertTypeExplicit.risk.in_(risks))
#Get only alerts of types that correlates with the value of the param alert_asset_type
if alert_asset_type == AlertAssetType.BOTH:
valid_types = [AlertAssetType.BOTH, AlertAssetType.DEVICE, AlertAssetType.GATEWAY]
elif alert_asset_type == AlertAssetType.DEVICE or alert_asset_type == AlertAssetType.GATEWAY:
valid_types = [AlertAssetType.BOTH, alert_asset_type]
else:
valid_types = [alert_asset_type]
query = query.filter(or_(
AlertTypeExplicit.for_asset_type.in_(valid_types),
and_(
AlertTypeExplicit.for_asset_type == AlertAssetType.LOOK_IN_ALERT_PARAMS,
AlertTypeImplicit.for_asset_type.in_(valid_types)
)
))
if order_by:
order_field = order_by[0]
order_direction = order_by[1]
if 'ASC' == order_direction:
query = query.order_by(asc(getattr(cls, order_field)))
else:
query = query.order_by(desc(getattr(cls, order_field)))
else:
query = query.order_by(desc(cls.created_at)) # newest first if no order_by parameter is specified
if page and size:
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT, max_per_page=config.MAX_PER_PAGE)
return query.all()
@classmethod
def count(cls, organization_id, since, until, types, resolved, risks, data_collectors):
try:
query = db.session.query(func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.filter(cls.type == AlertType.code).filter(AlertType.risk.in_(risks))
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.scalar()
except Exception as e:
LOG.error(e)
@classmethod
def count_by_date(cls, organization_id, since, until, types, resolved, risks):
try:
query = db.session.query(func.date(cls.created_at).label('date'), func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
query = query.group_by(func.date(cls.created_at)).order_by(asc(func.date(cls.created_at)))
return query.all()
except Exception as e:
LOG.error(e)
@classmethod
def count_by_hour(cls, organization_id, since, until, types, resolved, risks):
try:
query = db.session\
.query(func.date_trunc('hour', cls.created_at).label('hour'), func.count(1).label('count'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if risks and len(risks) > 0:
query = query.join(AlertType).filter(AlertType.risk.in_(risks))
query = query\
.group_by(func.date_trunc('hour', cls.created_at))\
.order_by(asc(func.date_trunc('hour', cls.created_at)))
return query.all()
except Exception as e:
LOG.error(e)
def update(self):
db.session.commit()
@classmethod
def group_by_date_and_risk(cls, organization_id, since, until, types, resolved, data_collectors):
try:
query = db.session.query(func.date(cls.created_at).label('date'), AlertType.risk.label('risk'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
query = query.join(AlertType)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.distinct().all()
except Exception as e:
LOG.error(e)
@classmethod
def group_by_hour_and_risk(cls, organization_id, since, until, types, resolved, data_collectors):
try:
query = db.session\
.query(func.date_trunc('hour', cls.created_at).label('hour'), AlertType.risk.label('risk'))\
.filter(cls.data_collector_id == DataCollector.id)\
.filter(DataCollector.organization_id == organization_id) \
.filter(cls.show == True)
query = query.join(AlertType)
if since:
query = query.filter(cls.created_at >= since)
if until:
query = query.filter(cls.created_at <= until)
if types and len(types) > 0:
query = query.filter(cls.type.in_(types))
if resolved is not None:
if resolved:
query = query.filter(cls.resolved_at != None)
else:
query = query.filter(cls.resolved_at == None)
if data_collectors and len(data_collectors) > 0:
query = query.filter(cls.data_collector_id.in_(data_collectors))
return query.distinct().all()
except Exception as e:
LOG.error(e)
class AssetImportance(Enum):
LOW = 'LOW'
MEDIUM = 'MEDIUM'
HIGH = 'HIGH'
class Gateway(db.Model):
__tablename__ = 'gateway'
id = Column(BigInteger, primary_key=True, autoincrement=True)
gw_hex_id = Column(String(100), nullable=True)
name = Column(String, nullable=True)
vendor = Column(String, nullable=True)
location_latitude = Column(Float, nullable=True)
location_longitude = Column(Float, nullable=True)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, db.ForeignKey("organization.id"), nullable=False)
connected = Column(Boolean, nullable=False, default=True)
first_activity = Column(DateTime(timezone=True), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=False)
activity_freq = Column(Float, nullable=True)
npackets_up = Column(BigInteger, nullable=False, default=0)
npackets_down = Column(BigInteger, nullable=False, default=0)
importance = Column(SQLEnum(AssetImportance))
def to_json(self):
return {
'id': self.id,
'gw_hex_id': self.gw_hex_id,
'name': self.name,
'vendor': self.vendor,
'location': {
'latitude': self.location_latitude,
'longitude': self.location_longitude
},
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'connected': self.connected,
'last_activity': "{}".format(self.last_activity),
'activity_freq': self.activity_freq,
'importance': self.importance.value,
'npackets_up': self.npackets_up,
'npackets_down': self.npackets_down
}
class Device(db.Model):
__tablename__ = 'device'
id = Column(BigInteger, primary_key=True, autoincrement=True)
dev_eui = Column(String(16), nullable=False)
name = Column(String, nullable=True)
vendor = Column(String, nullable=True)
app_name = Column(String, nullable=True)
join_eui = Column(String(16), nullable=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
importance = Column(SQLEnum(AssetImportance))
repeated_dev_nonce = Column(Boolean, nullable=True)
join_request_counter = Column(Integer, nullable=False, default=0)
join_accept_counter = Column(Integer, nullable=False, default=0)
has_joined = Column(Boolean, nullable=True, default=False)
join_inferred = Column(Boolean, nullable=True, default=False)
is_otaa = Column(Boolean, nullable=True)
last_packet_id = Column(BigInteger, ForeignKey("packet.id"), nullable=True)
first_up_timestamp = Column(db.DateTime(timezone=True), nullable=True)
last_up_timestamp = Column(DateTime(timezone=True), nullable=True)
pending_first_connection = Column(Boolean, nullable=False, default=True)
connected = Column(Boolean, nullable=False, default=True)
first_activity = Column(DateTime(timezone=True), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=True)
activity_freq = Column(Float, nullable=True)
activity_freq_variance = Column(Float, nullable=False, default=0)
npackets_up = Column(BigInteger, nullable=False, default=0)
npackets_down = Column(BigInteger, nullable=False, default=0)
npackets_lost = Column(Float, nullable=False, default=0)
max_rssi = Column(Float, nullable=True)
max_lsnr = Column(Float, nullable=True)
ngateways_connected_to = Column(BigInteger, nullable=False, default=0)
payload_size = Column(BigInteger, nullable=True)
last_packets_list = Column(String(4096), nullable=True, default='[]')
def to_json(self):
return {
'id': self.id,
'dev_eui': self.dev_eui,
'name': self.name,
'vendor': self.vendor,
'app_name': self.app_name,
'join_eui': self.join_eui,
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'first_up_timestamp': "{}".format(self.first_up_timestamp),
'last_up_timestamp': "{}".format(self.last_up_timestamp),
'repeated_dev_nonce': self.repeated_dev_nonce,
'join_request_counter': self.join_request_counter,
'join_accept_counter': self.join_request_counter,
'has_joined': self.has_joined,
'join_inferred': self.join_inferred,
'is_otaa': self.is_otaa,
'last_packet_id': self.last_packet_id,
'connected': self.connected,
'last_activity': "{}".format(self.last_activity),
'activity_freq': self.activity_freq,
'activity_freq_variance': self.activity_freq_variance,
'importance': self.importance.value,
'npackets_up': self.npackets_up,
'npackets_down': self.npackets_down,
'npackets_lost': self.npackets_lost,
'max_rssi': self.max_rssi,
'pending_first_connection': self.pending_first_connection
}
@classmethod
def find(cls, organization_id, since, until, page, size):
query = cls.query.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.order_by(desc(cls.last_up_timestamp))
if page is not None and size:
query = query.limit(size).offset(page*size)
return query.all()
@classmethod
def count_by_date(cls, organization_id, since, until):
query = db.session.query(func.date(cls.last_up_timestamp).label('date'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.group_by(func.date(cls.last_up_timestamp))
return query.all()
@classmethod
def count_by_hour(cls, organization_id, since, until):
query = db.session\
.query(func.date_trunc('hour', cls.last_up_timestamp).label('hour'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.last_up_timestamp >= since)
if until:
query = query.filter(cls.last_up_timestamp <= until)
query = query.group_by(func.date_trunc('hour', cls.last_up_timestamp))
return query.all()
class GatewayToDevice(db.Model):
__tablename__ = 'gateway_to_device'
gateway_id = Column(BigInteger, db.ForeignKey("gateway.id"), nullable=False, primary_key=True)
device_id = Column(BigInteger, db.ForeignKey("device.id"), nullable=False, primary_key=True)
class DeviceSession(db.Model):
__tablename__ = 'device_session'
id = Column(BigInteger, primary_key=True, autoincrement=True)
may_be_abp = Column(Boolean, nullable=True)
reset_counter = Column(Integer, nullable=False, default=0)
is_confirmed = Column(Boolean, nullable=True)
dev_addr = Column(String(8), nullable=False)
up_link_counter = Column(Integer, nullable=False, default=-1)
down_link_counter = Column(Integer, nullable=False, default=-1)
max_down_counter = Column(Integer, nullable=False, default=-1)
max_up_counter = Column(Integer, nullable=False, default=-1)
total_down_link_packets = Column(BigInteger, nullable=False, default=0)
total_up_link_packets = Column(BigInteger, nullable=False, default=0)
first_down_timestamp = Column(DateTime(timezone=True), nullable=True)
first_up_timestamp = Column(DateTime(timezone=True), nullable=True)
last_down_timestamp = Column(DateTime(timezone=True), nullable=True)
last_up_timestamp = Column(DateTime(timezone=True), nullable=True)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
data_collector_id = Column(BigInteger, db.ForeignKey("data_collector.id"), nullable=False)
device_auth_data_id = Column(BigInteger, ForeignKey("device_auth_data.id"), nullable=True)
last_packet_id = Column(BigInteger, ForeignKey("packet.id"), nullable=True)
last_activity = Column(DateTime(timezone=True), nullable=False)
connected = Column(Boolean, nullable=False, default=True)
class Packet(db.Model):
__tablename__ = 'packet'
id = Column(BigInteger, primary_key=True, autoincrement=True)
date = Column(DateTime(timezone=True), nullable=False)
topic = Column(String(256), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
gateway = Column(String(16), nullable=True)
tmst = Column(BigInteger, nullable=True)
chan = Column(SmallInteger, nullable=True)
rfch = Column(Integer, nullable=True)
seqn = Column(Integer, nullable=True)
opts = Column(String(20), nullable=True)
port = Column(Integer, nullable=True)
freq = Column(Float, nullable=True)
stat = Column(SmallInteger, nullable=True)
modu = Column(String(4), nullable=True)
datr = Column(String(50), nullable=True)
codr = Column(String(10), nullable=True)
lsnr = Column(Float, nullable=True)
rssi = Column(Integer, nullable=True)
size = Column(Integer, nullable=True)
data = Column(String(300), nullable=True)
m_type = Column(String(20), nullable=True)
major = Column(String(10), nullable=True)
mic = Column(String(8), nullable=True)
join_eui = Column(String(16), nullable=True)
dev_eui = Column(String(16), nullable=True)
dev_nonce = Column(Integer, nullable=True)
dev_addr = Column(String(8), nullable=True)
adr = Column(Boolean, nullable=True)
ack = Column(Boolean, nullable=True)
adr_ack_req = Column(Boolean, nullable=True)
f_pending = Column(Boolean, nullable=True)
class_b = Column(Boolean, nullable=True)
f_count = Column(Integer, nullable=True)
f_opts = Column(String(500), nullable=True)
f_port = Column(Integer, nullable=True)
error = Column(String(300), nullable=True)
latitude = Column(Float, nullable=True)
longitude = Column(Float, nullable=True)
altitude = Column(Float, nullable=True)
app_name = Column(String(100), nullable=True)
dev_name = Column(String(100), nullable=True)
def to_json(self):
return {
'id': self.id,
'date': self.date.strftime(config.DATE_FORMAT),
'topic': self.topic,
'data_collector_id': self.data_collector_id,
'organization_id': self.organization_id,
'gateway': self.gateway,
'tmst': self.tmst,
'chan': self.chan,
'rfch': self.rfch,
'seqn': self.seqn,
'opts': self.opts,
'port': self.port,
'freq': self.freq,
'stat': self.stat,
'modu': self.modu,
'datr': self.datr,
'codr': self.codr,
'lsnr': self.lsnr,
'rssi': self.rssi,
'size': self.size,
'data': self.data,
'm_type': self.m_type,
'major': self.major,
'mic': self.mic,
'join_eui': self.join_eui,
'dev_eui': self.dev_eui,
'dev_nonce': self.dev_nonce,
'dev_addr': self.dev_addr,
'adr': self.adr,
'ack': self.ack,
'adr_ack_req': self.adr_ack_req,
'f_pending': self.f_pending,
'class_b': self.class_b,
'f_count': self.f_count,
'f_opts': self.f_opts,
'f_port': self.f_port,
'error': self.error
}
@classmethod
def find(cls, organization_id, mtype, since, until, page, size):
query = cls.query.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.order_by(desc(cls.date))
if page is not None and size:
query = query.limit(size).offset(page*size)
return query.all()
@classmethod
def count_by_date(cls, organization_id, mtype, since, until):
query = db.session.query(func.date(cls.date).label('date'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.group_by(func.date(cls.date))
return query.all()
@classmethod
def count_by_hour(cls, organization_id, mtype, since, until):
query = db.session.query(func.date_trunc('hour', cls.date).label('hour'), func.count(1).label('count'))\
.filter(cls.organization_id == organization_id)
if since:
query = query.filter(cls.date >= since)
if until:
query = query.filter(cls.date <= until)
if mtype:
query = query.filter(cls.m_type == mtype)
query = query.group_by(func.date_trunc('hour', cls.date))
return query.all()
@classmethod
def find_max_by_organization_id(cls, organization_id, min_date):
query = db.session.query(cls.data_collector_id.label('data_collector_id'), func.max(cls.date).label('date'))
query = query.filter(cls.date > min_date, cls.organization_id == organization_id)
query = query.group_by(cls.data_collector_id)
return query.all()
class DeviceAuthData(db.Model):
__tablename__ = 'device_auth_data'
id = Column(BigInteger, primary_key=True, autoincrement=True)
join_request = Column(String(200), nullable=True)
join_accept = Column(String(200), nullable=True)
apps_key = Column(String(32), nullable=True)
nwks_key = Column(String(32), nullable=True)
data_collector_id = Column(BigInteger, ForeignKey("data_collector.id"), nullable=False)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
app_key_id = Column(BigInteger, ForeignKey("app_key.id"), nullable=False)
device_id = Column(BigInteger, ForeignKey("device.id"), nullable=True)
device_session_id = Column(BigInteger, ForeignKey("device_session.id"), nullable=True)
class QuarantineResolutionReasonType(Enum):
MANUAL = 'MANUAL'
AUTOMATIC = 'AUTOMATIC'
class QuarantineResolutionReason(db.Model):
__tablename__ = "quarantine_resolution_reason"
# region fields
id = Column(BigInteger, primary_key=True, autoincrement=True)
type = Column(SQLEnum(QuarantineResolutionReasonType))
name = Column(String(80), nullable=False)
description = Column(String(200), nullable=True)
# endregion
@classmethod
def find_by_id(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def find_by_type(cls, type):
return cls.query.filter(cls.type == type).first()
class Quarantine(db.Model):
__tablename__ = "quarantine"
#region fields
id = Column(BigInteger, primary_key=True, autoincrement=True)
organization_id = Column(BigInteger, ForeignKey("organization.id"), nullable=False)
# alert relationship
alert_id = Column(BigInteger, ForeignKey("alert.id"), nullable=False)
# since when is this device/alert in quarantine
since = Column(DateTime(timezone=True), nullable=False)
# last time the condition for quarantine was checked
last_checked = Column(DateTime(timezone=True), nullable=True)
# when was resolved, if applicable
resolved_at = Column(DateTime(timezone=True), nullable=True)
# who resolved the quarantine, if applicable
resolved_by_id = Column(BigInteger, ForeignKey("iot_user.id"), nullable=True)
# resolution reason relationship, if resolved. Null if not
resolution_reason_id = Column(BigInteger, ForeignKey("quarantine_resolution_reason.id"), nullable=True)
# resolution comment (optional)
resolution_comment = Column(String(1024), nullable=True)
# quarantine parameters (optional)
parameters = Column(String(4096), nullable=True)
# device relationship
device_id = Column(BigInteger, ForeignKey("device.id"))
alert = relationship("Alert", lazy="joined")
#endregion
def to_list_json(self):
data_collector = DataCollector.find_by_id(self.alert.data_collector_id)
return {
'id': self.id,
'organization_id': self.organization_id,
'alert': self.alert.to_json(),
'alert_type': self.alert.alert_type.to_json(),
'device_id': self.device_id,
'data_collector_id': data_collector.id,
'data_collector_name': data_collector.name,
'parameters': json.loads(self.parameters if self.parameters is not None else '{}'),
'since': f'{self.since}' if self.since else None,
'last_checked': f'{self.last_checked}' if self.last_checked else None,
'resolved_at': f'{self.resolved_at}' if self.resolved_at else None,
'resolved_by_id': self.resolved_by_id,
'resolution_reason_id': self.resolution_reason_id,
'resolution_comment': self.resolution_comment
}
def db_insert(self):
db.session.add(self)
db.session.commit()
def db_update(cls):
db.session.commit()
def db_delete(self):
db.session.delete(self)
db.session.commit()
@classmethod
def find_by_id(cls, id):
return cls.query.filter(cls.id == id).first()
@classmethod
def get_list_query(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.query.filter(cls.organization_id == organization_id).filter(cls.resolved_at.is_(None))\
.filter(DataCollector.deleted_at.is_(None)).join(Alert).join(AlertType).join(DataCollector)
if since:
query = query.filter(cls.since >= since)
if until:
query = query.filter(cls.since <= until)
if alert_types and len(alert_types) > 0:
query = query.filter(AlertType.id.in_(alert_types))
if risks and len(risks) > 0:
query = query.filter(AlertType.risk.in_(risks))
if devices and len(devices) > 0:
query = query.filter(cls.device_id.in_(devices))
if data_collectors and len(data_collectors) > 0:
query = query.filter(Alert.data_collector_id.in_(data_collectors))
return query
@classmethod
def find(cls, organization_id, since, until, alert_types, devices, risks, data_collectors, order_by, page, size, gateway_id=None, asset_type=None):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
if gateway_id is not None:
query = query.filter(Alert.gateway_id == gateway_id)
#Get only issues of alerts whose type correlates with the value of the param alert_asset_type
if asset_type is not None:
if asset_type == 'device':
alert_asset_type = AlertAssetType.DEVICE
elif asset_type == 'gateway':
alert_asset_type = AlertAssetType.GATEWAY
else:
raise Error.BadRequest(f"Quarantine.find called with asset_type = {asset_type}")
if alert_asset_type == AlertAssetType.BOTH:
valid_types = [AlertAssetType.BOTH, AlertAssetType.DEVICE, AlertAssetType.GATEWAY]
elif alert_asset_type == AlertAssetType.DEVICE or alert_asset_type == AlertAssetType.GATEWAY:
valid_types = [AlertAssetType.BOTH, alert_asset_type]
else:
valid_types = [alert_asset_type]
AlertTypeImplicit = db.aliased(AlertType)
query = query.join(AlertTypeImplicit, or_(
AlertTypeImplicit.code == AlertType.code,
AlertTypeImplicit.code == cast(Alert.parameters, JSON)['alert_solved_type'].as_string(),
))\
.filter(or_(
AlertType.for_asset_type.in_(valid_types),
and_(
AlertType.for_asset_type == AlertAssetType.LOOK_IN_ALERT_PARAMS,
AlertTypeImplicit.for_asset_type.in_(valid_types)
)
))
if order_by:
order_field = order_by[0]
order_direction = order_by[1]
if 'ASC' == order_direction:
query = query.order_by(asc(getattr(cls, order_field)))
else:
query = query.order_by(desc(getattr(cls, order_field)))
else:
query = query.order_by(desc(cls.since), Alert.device_id, Alert.data_collector_id)
if page and size:
return query.paginate(page=page, per_page=size, error_out=config.ERROR_OUT, max_per_page=config.MAX_PER_PAGE)
return query.all()
@classmethod
def count(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
# this generates a select count(*) from xxx where yyy instead of select count(*) from (select yyy from xxx)
# see https://gist.github.com/hest/8798884
count_q = query.statement.with_only_columns([func.count(func.distinct(Quarantine.id))])
return query.session.execute(count_q).scalar()
@classmethod
def count_by_data_collector(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), Alert.data_collector_id.label('data_collector_id'), DataCollector.name.label('data_collector_name'))
return count_query.group_by('data_collector_id','data_collector_name').all()
@classmethod
def count_by_risk(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), AlertType.risk.label('alert_type_risk'))
return count_query.group_by('alert_type_risk').all()
@classmethod
def count_by_alert_type(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(Quarantine.id)).label('quarantine_count'), AlertType.id.label('alert_type_id'), AlertType.name.label('alert_type_name'))
return count_query.group_by('alert_type_id','alert_type_name').all()
@classmethod
def count_devices(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_q = query.statement.with_only_columns([func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id)))])
return query.session.execute(count_q).scalar()
@classmethod
def count_devices_by_hour(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id))).label('device_count'),func.date_trunc('hour', cls.since).label('hour'))
return count_query.group_by('hour').all()
@classmethod
def count_devices_by_date(cls, organization_id, since, until, alert_types, devices, risks, data_collectors):
query = cls.get_list_query(organization_id, since, until, alert_types, devices, risks, data_collectors)
count_query = query.with_entities(func.count(func.distinct(case([(Alert.device_id != None, Alert.device_id)], else_=Alert.device_session_id))).label('device_count'),func.date(cls.since).label('date'))
return count_query.group_by('date').all()
@classmethod
def remove_from_quarantine_by_alert(cls, alert, res_reason_id, res_comment):
cls.remove_from_quarantine(alert.alert_type.id, alert.device_id, alert.device_session_id, alert.data_collector_id, res_reason_id, res_comment)
@classmethod
def remove_from_quarantine_manually(cls, id, user_id, res_comment):
qRec = cls.find_by_id(id)
if not qRec:
raise RuntimeError(f'Quarantine record with id {id} not found')
if qRec.resolved_at is not None:
raise RuntimeError(f'Quarantine is already resolved')
reason = QuarantineResolutionReason.find_by_type(QuarantineResolutionReasonType.MANUAL)
if not reason:
raise RuntimeError(f'Manual quarantine resolution type not found')
qRec.resolved_at = datetime.datetime.now()
qRec.resolved_by_id = user_id
qRec.resolution_reason_id = reason.id
qRec.resolution_comment = res_comment
qRec.db_update()
@classmethod
def remove_from_quarantine(cls, alert_type_id, device_id, device_session_id, data_collector_id, res_reason_id, res_comment):
qrec = cls.find_open_by_type_dev_coll(alert_type_id, device_id, device_session_id, data_collector_id)
if qrec:
qrec.resolved_at = datetime.datetime.now()
qrec.resolution_reason_id = res_reason_id
qrec.resolution_comment = res_comment
qrec.db_update()
# Gets the list of collector ids that a user can see - the ones that are assigned to that user.
# Takes a 'collectors_filter_id_strings' param that when is present acts as a filter on top of the visible collectors.
# Raises a ValueError when a collector id in the filtering param is not visible by the user.
def get_user_collector_ids(user, collectors_filter_id_strings=None):
if config.ASSIGN_COLLECTOR_TO_USER_ENABLED:
admin_user = is_admin_user(user.id)
else:
# If the feature is not enabled then behave as if the user was an admin - all collectors are visible.
admin_user = True
if collectors_filter_id_strings and len(collectors_filter_id_strings) > 0:
# Filtering is on.
collectors_from_filter = list(map(lambda user_id: int(user_id), collectors_filter_id_strings))
if not admin_user:
# Non-admin user -> check that the user has access to all the collectors in the filter.
collectors_for_user = list(map(lambda c: c.id, user.collectors))
all_requested_collectors_accessible = set(collectors_from_filter).issubset(collectors_for_user)
if not all_requested_collectors_accessible:
raise ValueError('The user does not have access to all the data collectors in the filter.')
collectors = collectors_from_filter
else:
# No filtering.
if admin_user:
# Admin user -> list data from all the collectors in the organization.
collectors = []
else:
# Non-admin user -> list data from the collectors that are assigned to the user.
if user.collectors and len(user.collectors) > 0:
collectors = list(map(lambda u: u.id, user.collectors))
else:
collectors = [-1] # The user does not have access to any collector.
return collectors
# verify if specified username belongs to user with role 'User_Admin'
def is_admin_user(user_id):
role_id = UserRole.find_by_role_name(RoleTypes.User_Admin.value).id
if not role_id:
return None
if UserToUserRole.find_by_user_id_and_user_role_id(user_id, role_id):
return True
else:
return False
# verify if specified username is system
def is_system_user(user_id):
role_id = UserRole.find_by_role_name(RoleTypes.System.value).id
if not role_id:
return None
if UserToUserRole.find_by_user_id_and_user_role_id(user_id, role_id):
return True
else:
return False | 0.517083 | 0.123207 |
from math import factorial, inf
from random import randint
from genetic_algorithm.crossover import apply_crossover
from genetic_algorithm.mutation import apply_mutation
class NonDominatedSorter:
'''
Class for sorting the population by objective fitness scores
Sorts by using a non-dominated sorting approach
'''
def __init__(self, population, objectives, pop_size):
self.population = population
self.objectives = objectives
self.pop_size = pop_size
self.fronts = []
self.population_values = []
self.set_population_values()
# [0, []], where 0 is the dominated_by_counter and [] is the dominates_list
self.domination_list = [[0, []] for phoneme in population]
self.tournament_winner_indices = []
def set_population_values(self):
for phenotype in self.population:
values = []
for objective in self.objectives:
values.append(objective.get_total_fitness_value(phenotype))
self.population_values.append(values)
def set_fronts(self):
values = self.population_values
len_values = len(values)
for index in range(len_values):
for check_index in range(len_values):
check = [False for x in range(len(values[index]))]
one_is_higher = False
for value_index in range(len(values[index])):
if values[index][value_index] >= values[check_index][value_index]:
check[value_index] = True
if values[index][value_index] > values[check_index][value_index]:
one_is_higher = True
if False not in check and one_is_higher:
self.domination_list[index][1].append(check_index)
self.domination_list[check_index][0] += 1
# print(self.population_values)
# print(self.domination_list)
while get_fronts_length(self.fronts) < len(self.population_values):
d_list = self.domination_list
front = []
for index in range(len(d_list)):
if not self.index_is_in_front(index) and d_list[index][0] == 0:
front.append(index)
self.fronts.append(front)
for index in front:
for dominated in d_list[index][1]:
d_list[dominated][0] -= 1
def get_new_population(self):
self.set_fronts()
'''
In case of NSGAIII
p = 4 # num ref points for each objective plane
get_reference_points(p, len(self.objectives))
'''
new_population = []
new_fronts = []
last_front = []
distances = []
for front in self.fronts:
distances.append(self.get_crowding_distance(front))
if len(new_population) + len(front) > self.pop_size:
last_front.extend(front)
break
else:
new_population.extend(front)
new_fronts.append(front)
distance_fix = []
new_last_front = []
while len(new_population) < self.pop_size:
max_distance = max(distances[-1])
add_index = distances[-1].index(max_distance)
new_population.append(last_front[add_index])
new_last_front.append(last_front[add_index])
distance_fix.append([add_index, distances[-1][add_index]])
distances[-1][add_index] = -inf
if len(new_last_front) > 0:
new_fronts.append(new_last_front)
for distance in distance_fix: # Sets distances to previous distances
distances[-1][distance[0]] = distance[1]
new_population = [self.population[x] for x in new_population]
# Tournament selection for crossover
for _ in range(len(new_population)):
crossover = []
for _ in range(2):
try:
front_1 = randint(0, len(new_fronts) - 1)
front_2 = randint(0, len(new_fronts) - 1)
except ValueError:
front_1, front_2 = 0, 0
if front_1 == front_2:
try:
index_1 = randint(0, len(new_fronts[front_1]) - 1)
index_2 = randint(0, len(new_fronts[front_1]) - 1)
except ValueError:
index_1, index_2 = 0, 0
if distances[front_1][index_1] > distances[front_1][index_2]:
crossover.append(new_fronts[front_1][index_1])
else:
crossover.append(new_fronts[front_1][index_2])
else:
index = min([front_1, front_2])
crossover.append(new_fronts[index][randint(0, len(new_fronts[index]) - 1)])
offspring = apply_crossover(self.population[crossover[0]], self.population[crossover[1]])
# Mutation and mutation probabilitiy
if randint(0, 100) <= 30: # Chance of offspring mutating
apply_mutation(offspring, True)
new_population.append(offspring)
# Adds indices from crossover (tournament selection winners) to tournament_winner_indices
self.tournament_winner_indices = []
for index in crossover:
if index not in self.tournament_winner_indices:
self.tournament_winner_indices.append(index)
self.population = new_population
return self.population, len(new_fronts)
def get_crowding_distance(self, front):
distances = [0 for x in range(len(front))]
pop_values = self.population_values
for o_index in range(len(self.objectives)):
sorted_front = sorted(front, key=lambda x: pop_values[x][o_index])
sorted_dist_indices = [front.index(x) for x in sorted_front]
distances[sorted_dist_indices[0]] = inf
distances[sorted_dist_indices[-1]] = inf
for index in range(1, len(sorted_front) - 1):
if (pop_values[sorted_front[-1]][o_index] - pop_values[sorted_front[0]][o_index]) == 0:
continue
distances[sorted_dist_indices[index]] = (
distances[sorted_dist_indices[index]]
+ ((pop_values[sorted_front[index + 1]][o_index] - pop_values[sorted_front[index - 1]][o_index])
/ (pop_values[sorted_front[-1]][o_index] - pop_values[sorted_front[0]][o_index]))
)
return distances
def index_is_in_front(self, index):
for front in self.fronts:
if index in front:
return True
return False
'''
Needed only if upgrade from NSGAII to NSGAIII
def get_reference_points(p, num_objectives):
points = []
num_points = combs(num_objectives + p - 1, p)
decrement = 1 / p
print(points)
'''
def get_fronts_length(fronts):
length = 0
for front in fronts:
length += len(front)
return length
def combs(n, k):
try:
return factorial(n) // factorial(k) // factorial(n - k)
except ValueError:
return 0 | genetic_algorithm/nds.py | from math import factorial, inf
from random import randint
from genetic_algorithm.crossover import apply_crossover
from genetic_algorithm.mutation import apply_mutation
class NonDominatedSorter:
'''
Class for sorting the population by objective fitness scores
Sorts by using a non-dominated sorting approach
'''
def __init__(self, population, objectives, pop_size):
self.population = population
self.objectives = objectives
self.pop_size = pop_size
self.fronts = []
self.population_values = []
self.set_population_values()
# [0, []], where 0 is the dominated_by_counter and [] is the dominates_list
self.domination_list = [[0, []] for phoneme in population]
self.tournament_winner_indices = []
def set_population_values(self):
for phenotype in self.population:
values = []
for objective in self.objectives:
values.append(objective.get_total_fitness_value(phenotype))
self.population_values.append(values)
def set_fronts(self):
values = self.population_values
len_values = len(values)
for index in range(len_values):
for check_index in range(len_values):
check = [False for x in range(len(values[index]))]
one_is_higher = False
for value_index in range(len(values[index])):
if values[index][value_index] >= values[check_index][value_index]:
check[value_index] = True
if values[index][value_index] > values[check_index][value_index]:
one_is_higher = True
if False not in check and one_is_higher:
self.domination_list[index][1].append(check_index)
self.domination_list[check_index][0] += 1
# print(self.population_values)
# print(self.domination_list)
while get_fronts_length(self.fronts) < len(self.population_values):
d_list = self.domination_list
front = []
for index in range(len(d_list)):
if not self.index_is_in_front(index) and d_list[index][0] == 0:
front.append(index)
self.fronts.append(front)
for index in front:
for dominated in d_list[index][1]:
d_list[dominated][0] -= 1
def get_new_population(self):
self.set_fronts()
'''
In case of NSGAIII
p = 4 # num ref points for each objective plane
get_reference_points(p, len(self.objectives))
'''
new_population = []
new_fronts = []
last_front = []
distances = []
for front in self.fronts:
distances.append(self.get_crowding_distance(front))
if len(new_population) + len(front) > self.pop_size:
last_front.extend(front)
break
else:
new_population.extend(front)
new_fronts.append(front)
distance_fix = []
new_last_front = []
while len(new_population) < self.pop_size:
max_distance = max(distances[-1])
add_index = distances[-1].index(max_distance)
new_population.append(last_front[add_index])
new_last_front.append(last_front[add_index])
distance_fix.append([add_index, distances[-1][add_index]])
distances[-1][add_index] = -inf
if len(new_last_front) > 0:
new_fronts.append(new_last_front)
for distance in distance_fix: # Sets distances to previous distances
distances[-1][distance[0]] = distance[1]
new_population = [self.population[x] for x in new_population]
# Tournament selection for crossover
for _ in range(len(new_population)):
crossover = []
for _ in range(2):
try:
front_1 = randint(0, len(new_fronts) - 1)
front_2 = randint(0, len(new_fronts) - 1)
except ValueError:
front_1, front_2 = 0, 0
if front_1 == front_2:
try:
index_1 = randint(0, len(new_fronts[front_1]) - 1)
index_2 = randint(0, len(new_fronts[front_1]) - 1)
except ValueError:
index_1, index_2 = 0, 0
if distances[front_1][index_1] > distances[front_1][index_2]:
crossover.append(new_fronts[front_1][index_1])
else:
crossover.append(new_fronts[front_1][index_2])
else:
index = min([front_1, front_2])
crossover.append(new_fronts[index][randint(0, len(new_fronts[index]) - 1)])
offspring = apply_crossover(self.population[crossover[0]], self.population[crossover[1]])
# Mutation and mutation probabilitiy
if randint(0, 100) <= 30: # Chance of offspring mutating
apply_mutation(offspring, True)
new_population.append(offspring)
# Adds indices from crossover (tournament selection winners) to tournament_winner_indices
self.tournament_winner_indices = []
for index in crossover:
if index not in self.tournament_winner_indices:
self.tournament_winner_indices.append(index)
self.population = new_population
return self.population, len(new_fronts)
def get_crowding_distance(self, front):
distances = [0 for x in range(len(front))]
pop_values = self.population_values
for o_index in range(len(self.objectives)):
sorted_front = sorted(front, key=lambda x: pop_values[x][o_index])
sorted_dist_indices = [front.index(x) for x in sorted_front]
distances[sorted_dist_indices[0]] = inf
distances[sorted_dist_indices[-1]] = inf
for index in range(1, len(sorted_front) - 1):
if (pop_values[sorted_front[-1]][o_index] - pop_values[sorted_front[0]][o_index]) == 0:
continue
distances[sorted_dist_indices[index]] = (
distances[sorted_dist_indices[index]]
+ ((pop_values[sorted_front[index + 1]][o_index] - pop_values[sorted_front[index - 1]][o_index])
/ (pop_values[sorted_front[-1]][o_index] - pop_values[sorted_front[0]][o_index]))
)
return distances
def index_is_in_front(self, index):
for front in self.fronts:
if index in front:
return True
return False
'''
Needed only if upgrade from NSGAII to NSGAIII
def get_reference_points(p, num_objectives):
points = []
num_points = combs(num_objectives + p - 1, p)
decrement = 1 / p
print(points)
'''
def get_fronts_length(fronts):
length = 0
for front in fronts:
length += len(front)
return length
def combs(n, k):
try:
return factorial(n) // factorial(k) // factorial(n - k)
except ValueError:
return 0 | 0.406862 | 0.473962 |
import StringIO
import time
import sys
from pydoc import pager
import termcolor as T
from subprocess import Popen, PIPE
try:
import dpkt
except:
dpkt = False
def collect(ntuple, sep='\n'):
lst = []
for field in ntuple._fields:
val = getattr(ntuple, field)
lst.append("%s=%s" % (field, val))
return sep.join(lst)
def indent(lst, w=1):
return map(lambda l: ('\t'*w) + l, lst)
def indent_str(s,w=1):
return indent(s.split("\n"), w)
def mac(addr):
return ':'.join(map(lambda octet: "%02x" % ord(octet),
addr))
def ipaddr(addr):
return '.'.join(map(lambda octet: "%d" % ord(octet),
addr))
def red(s):
return T.colored(s, "red", attrs=['bold'])
def yellow(s):
return T.colored(s, "yellow")
def green(s):
return T.colored(s, "green")
def format_packet(bytearr):
if dpkt:
return dpkt.ethernet.Ethernet(str(bytearr))
else:
return bytearr
def waitListening(server, port, attempts=10):
"Wait until server is listening on port"
if not 'telnet' in Popen('which telnet', shell=True, stdout=PIPE).communicate()[0]:
raise Exception('Could not find telnet')
cmd = ('sh -c "echo A | telnet -e A %s %s"' % (server, port))
print 'Waiting for', server, 'to listen on port', port
for i in range(attempts):
if 'Connected' not in Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()[0]:
print '.',
sys.stdout.flush()
time.sleep(.5)
else:
return
raise Exception('Could not connect to the MongoDB server at %s:%s' %
(server, port))
class MyDict(dict):
def __init__(self, hsh={}, should_trim=False, indent_level=0):
super(MyDict, self).__init__(hsh)
self.should_trim = should_trim
self.indent_level = indent_level
def __repr__(self):
def trim(s):
if len(s) > 100:
return s[0:100] + "..."
return s
rep = ["Properties:"]
for k, v in self.iteritems():
if type(v) == bytearray:
v = format_packet(v)
if self.should_trim:
rep.append("\t"*self.indent_level + "%s=%s" % (k, trim(v.__repr__())))
else:
rep.append("Key: %s" % k)
rep.append("\t"*self.indent_level + "%s" % v.__repr__())
return "\n".join(rep)
def __getattr__(self, name):
return self.get(name, None) | recorder/utils.py | import StringIO
import time
import sys
from pydoc import pager
import termcolor as T
from subprocess import Popen, PIPE
try:
import dpkt
except:
dpkt = False
def collect(ntuple, sep='\n'):
lst = []
for field in ntuple._fields:
val = getattr(ntuple, field)
lst.append("%s=%s" % (field, val))
return sep.join(lst)
def indent(lst, w=1):
return map(lambda l: ('\t'*w) + l, lst)
def indent_str(s,w=1):
return indent(s.split("\n"), w)
def mac(addr):
return ':'.join(map(lambda octet: "%02x" % ord(octet),
addr))
def ipaddr(addr):
return '.'.join(map(lambda octet: "%d" % ord(octet),
addr))
def red(s):
return T.colored(s, "red", attrs=['bold'])
def yellow(s):
return T.colored(s, "yellow")
def green(s):
return T.colored(s, "green")
def format_packet(bytearr):
if dpkt:
return dpkt.ethernet.Ethernet(str(bytearr))
else:
return bytearr
def waitListening(server, port, attempts=10):
"Wait until server is listening on port"
if not 'telnet' in Popen('which telnet', shell=True, stdout=PIPE).communicate()[0]:
raise Exception('Could not find telnet')
cmd = ('sh -c "echo A | telnet -e A %s %s"' % (server, port))
print 'Waiting for', server, 'to listen on port', port
for i in range(attempts):
if 'Connected' not in Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()[0]:
print '.',
sys.stdout.flush()
time.sleep(.5)
else:
return
raise Exception('Could not connect to the MongoDB server at %s:%s' %
(server, port))
class MyDict(dict):
def __init__(self, hsh={}, should_trim=False, indent_level=0):
super(MyDict, self).__init__(hsh)
self.should_trim = should_trim
self.indent_level = indent_level
def __repr__(self):
def trim(s):
if len(s) > 100:
return s[0:100] + "..."
return s
rep = ["Properties:"]
for k, v in self.iteritems():
if type(v) == bytearray:
v = format_packet(v)
if self.should_trim:
rep.append("\t"*self.indent_level + "%s=%s" % (k, trim(v.__repr__())))
else:
rep.append("Key: %s" % k)
rep.append("\t"*self.indent_level + "%s" % v.__repr__())
return "\n".join(rep)
def __getattr__(self, name):
return self.get(name, None) | 0.279927 | 0.12363 |
metadict_army['Генеральный штаб'] = {
'Командование Генерального штаба':1,
'Главное оперативное управление':1,
'Главное разведывательное управление':1,
'Главное организационное управление':1,
'Военно-научное управление':1,
'Управление военных сообщений':1,
'Управление материально-технического обеспечения':1,
'Военно-топографическое управление':1,
'Главное управление связи':1,
'Центральный командный пункт Генерального штаба':1,
}
metadict_army['Главное оперативное управление'] = {
'Командование управления Генерального штаба':1,
'Оперативное управление направлений':1,
'Управление авиации и противовоздушной обороны':1,
'Самостоятельные отделы главного оперативного управления':1,
'Канцелярия Генерального штаба':1,
}
metadict_army['Оперативное управление направлений'] = {
'Южное направление':1,
'Северное направление':1,
'Юго-восточное направление':1,
'Юго-западное направление':1,
'Западно-восточное океанское направление':1,
'Направление внутренних военных округов':1,
'Отдел дислокации и оперативного учёта войск':1,
'Секретное отделение Генерального штаба':1,
}
metadict_army['Управление авиации и противовоздушной обороны'] = {
'Авиационный отдел':1,
'Отдел противовоздушной обороны':1,
'Секретное отделение Генерального штаба':1,
}
metadict_army['Самостоятельные отделы главного оперативного управления'] = {
'Отдел планирования оперативной подготовки':1,
'Отдел по извучению и подготовке театра военных действий':1,
'Морской отдел главного оперативного управления':1,
}
metadict_army['Главное разведывательное управление'] = {
'Командование управления Генерального штаба':1,
'Управление стратегической разведки':1,
'Управление информации сухопутных войск и военно-воздушных сил':1,
'Самостоятельные отделы главного разведывательного управления':1,
}
metadict_army['Управление стратегической разведки'] = {
'Зебриканский отдел':1,
'Восточно-океанский отдел':1,
}
metadict_army['Управление информации сухопутных войск и военно-воздушных сил'] = {
'Зебриканский отдел информации':1,
'Восточно-океанский отдел информации':1,
'Кристально-имперский отдел информации':1,
'Отдел информации военно-воздушных сил':1,
}
metadict_army['Самостоятельные отделы главного разведывательного управления'] = {
'Отдел военно-морской стратегической разведки':1,
'Специальный отдел ГРУ':1,
'Отдел военно-морской информации':1,
'Военно-технический отдел ГРУ':1,
'Отдел военной экономики и театра военных действий':1,
'Отдел радиоразведки и радиотехнической разведки':1,
'Отдел оперативно-агентурной разведки':1,
'Отдел кадров ГРУ':1,
'Отдел внешних сношений ГРУ':1,
'Финансовый отдел ГРУ':1,
'Шифровальный отдел ГРУ':1,
'Политический отдел ГРУ':1,
'Отдел оперативной техники':1,
'Особая группа ГРУ':1,
'Отделение обработки документов ГРУ':1,
'Отделение радиосвязи ГРУ':1,
'Учётно-архивное отделение ГРУ':1,
'Редакционно-издательское отделение':1,
'Административно-хозяйственное отделение ГРУ':1,
}
metadict_army['Главное организационное управление'] = {
'Командование управления Генерального штаба':1,
'Мобилизационное управление':1,
'Управление укомплектования и службы войск':1,
'Самостоятельные отделы главного организационного управления':1,
}
metadict_army['Организационное управление'] = {
'Организационно-плановый отдел':1,
'Отдел сухопутных войск':1,
'Отдел военно-воздушных сил':1,
'Отдел военно-морских сил':1,
'Отдел технических войск и специальных частей':1,
'Отдел органов управления':1,
'Отдел военно-учебных заведений':1,
'Отдел тыловых частей и учреждений':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Мобилизационное управление'] = {
'Мобилизационно-плановый отдел':1,
'Отдел укомплектования личным составом запаса':1,
'Отдел комплектования мехтранспортом и дорожно-строительной техникой':1,
'Отдел учета военнообязанных запаса и бронирования':1,
'Мобилизационно-инспекторский отдел':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление укомплектования и службы войск'] = {
'Отдел укомплектования':1,
'Отдел службы войск':1,
'Отдел по выездам за границу':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Самостоятельные отделы главного организационного управления'] = {
'Отдел учёта численности войск':1,
'Отдел по персональному учёту потерь сержантов и солдат':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Военно-научное управление'] = {
'Командование управления Генерального штаба':1,
'Отдел по исследованию вопросов военного искусства':1,
'Военно-исторический отдел':1,
'Уставной отдел военно-научного управления':1,
'Отдел по планированию военно-научной работы':1,
'Редакционно-издательское отделение':1,
'Военно-научная библиотека Генерального штаба':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление военных сообщений'] = {
'Командование управления Генерального штаба':1,
'Отдел планирования железнодорожных перевозок':1,
'Отдел воздушных перевозок':1,
'Отдел эшелонных перевозок':1,
'Отдел грузовых перевозок':1,
'Отдел водных перевозок':1,
'Мобилизационно-технический отдел':1,
'Отдел оперативных и мобилизационных перевозок':1,
'Отдел боевой подготовки управления военных сообщений':1,
'Военно-научный отдел управления военных сообщений':1,
'Инспекция управления военных сообщений':1,
'Отделение кадров управления военных сообщений':1,
'Финансовое отделение управления военных сообщений':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление материально-технического обеспечения'] = {
'Командование управления Генерального штаба':1,
'Мобилизационно-плановый отдел вооружения и материально-технического обеспечения':1,
'Отдел мобилизационного планирования артиллерийского вооружения':1,
'Отдел мобилизационного планирования бронетанкового вооружения':1,
'Отдел мобилизационного планирования военной техники':1,
'Отдел мобилизационного планирования авиатехники':1,
'Отдел мобилизационного планирования ГСМ':1,
'Отдел мобилизационного планирования интендантского и медицинского имущества':1,
'Отдел мобилизационного планирования техники морского флота':1,
'Отдел опытно-конструкторских и научно-исследовательских работ':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Военно-топографическое управление'] = {
'Командование управления Генерального штаба':1,
'Организационно-плановый отдел военно-топографического управления':1,
'Отдел топогеодезический и картографический':1,
'Отдел обеспечения войск топографическими картами и специально-техническим вооружением':1,
'Отдел кадров и топографической подготовки войск':1,
'Финансовое отделение военно-топографического управления':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Главное управление связи'] = {
'Командование управления Генерального штаба':1,
'Организационно-плановый отдел':1,
'Отдел обеспечения шифросвязью по направлениям':1,
'Инспекторский отдел главного управления связи':1,
'Специальный отдел главного управления связи':1,
'Отдел по эксплуатации оборудования связи':1,
'Отдел кадров, боевой и специальной подготовки главного управления связи':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Самостоятельные отделы Генерального штаба'] = {
'Отдел связи Генерального штаба':1,
'Узел связи Генерального штаба':1,
'Отдел военой цензуры Генерального штаба':1,
'Политический отдел Генерального штаба':1,
'Отдел кадров Генерального штаба':1,
'Архивная служба Генерального штаба':1,
'Библиотека Генерального штаба':1,
'Административно-хозяйственный отдел Генерального штаба':1,
}
metadict_army['Авиационный отдел'] = {
'Военнослужащий Генерального штаба':23,
}
metadict_army['Административно-хозяйственная часть Генерального штаба'] = {
'Военнослужащий Генерального штаба':2,
'Служащий Генерального штаба':2,
}
metadict_army['Административно-хозяйственное отделение ГРУ'] = {
'Военнослужащий Генерального штаба':19,
'Служащий Генерального штаба':16,
}
metadict_army['Военно-исторический отдел'] = {
'Военнослужащий Генерального штаба':41,
'Служащий Генерального штаба':1,
}
metadict_army['Военно-научная библиотека Генерального штаба'] = {
'Военнослужащий Генерального штаба':2,
'Служащий Генерального штаба':12,
}
metadict_army['Военно-научный отдел управления военных сообщений'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Военно-технический отдел ГРУ'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Восточно-океанский отдел'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Восточно-океанский отдел информации'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Западно-восточное океанское направление'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Зебриканский отдел'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Зебриканский отдел информации'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Инспекторский отдел главного управления связи'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Инспекция управления военных сообщений'] = {
'Военнослужащий Генерального штаба':3,
}
metadict_army['Канцелярия Генерального штаба'] = {
'Военнослужащий Генерального штаба':20,
'Военнослужащий Генерального штаба':3,
}
metadict_army['Командование Генерального штаба'] = {
'Начальник Генерального штаба':1,
'Заместитель начальника Генерального штаба':2,
}
metadict_army['Командование управления Генерального штаба'] = {
'Начальник управления Генерального штаба':1,
'Заместитель начальника управления Генерального штаба':2,
}
metadict_army['Кристально-имперский отдел информации'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Мобилизационно-инспекторский отдел'] = {
'Военнослужащий Генерального штаба':16,
}
metadict_army['Мобилизационно-плановый отдел'] = {
'Военнослужащий Генерального штаба':27,
}
metadict_army['Мобилизационно-плановый отдел вооружения и материально-технического обеспечения'] = {
'Военнослужащий Генерального штаба':43,
}
metadict_army['Мобилизационно-технический отдел'] = {
'Военнослужащий Генерального штаба':32,
}
metadict_army['Морской отдел главного оперативного управления'] = {
'Военнослужащий Генерального штаба':12,
}
metadict_army['Направление внутренних военных округов'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Организационно-плановый отдел'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Организационно-плановый отдел военно-топографического управления'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел информации военно-воздушных сил'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел боевой подготовки управления военных сообщений'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Особая группа ГРУ'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел внешних сношений ГРУ'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':3,
}
metadict_army['Отдел водных перевозок'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отдел военно-морской информации'] = {
'Военнослужащий Генерального штаба':33,
'Служащий Генерального штаба':3,
}
metadict_army['Отдел военно-морской стратегической разведки'] = {
'Военнослужащий Генерального штаба':24,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел военной экономики и театра военных действий'] = {
'Военнослужащий Генерального штаба':40,
}
metadict_army['Отдел воздушных перевозок'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Отдел грузовых перевозок'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Отдел дислокации и оперативного учёта войск'] = {
'Военнослужащий Генерального штаба':10,
}
metadict_army['Отдел кадров ГРУ'] = {
'Военнослужащий Генерального штаба':40,
'Служащий Генерального штаба':22,
}
metadict_army['Отдел кадров и топографической подготовки войск'] = {
'Военнослужащий Генерального штаба':14,
'Служащий Генерального штаба':2,
}
metadict_army['Отделение кадров управления военных сообщений'] = {
'Военнослужащий Генерального штаба':3,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел кадров, боевой и специальной подготовки главного управления связи'] = {
'Военнослужащий Генерального штаба':16,
}
metadict_army['Отдел комплектования мехтранспортом и дорожно-строительной техникой'] = {
'Военнослужащий Генерального штаба':9,
}
metadict_army['Отдел мобилизационного планирования ГСМ'] = {
'Военнослужащий Генерального штаба':10,
}
metadict_army['Отдел мобилизационного планирования авиатехники'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Отдел мобилизационного планирования артиллерийского вооружения'] = {
'Военнослужащий Генерального штаба':19,
}
metadict_army['Отдел мобилизационного планирования бронетанкового вооружения'] = {
'Военнослужащий Генерального штаба':13,
}
metadict_army['Отдел мобилизационного планирования военной техники'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел мобилизационного планирования интендантского и медицинского имущества'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел мобилизационного планирования техники морского флота'] = {
'Военнослужащий Генерального штаба':12,
}
metadict_army['Отдел обеспечения войск топографическими картами и специально-техническим вооружением'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':13,
}
metadict_army['Отдел обеспечения шифросвязью по направлениям'] = {
'Военнослужащий Генерального штаба':130,
}
metadict_army['Отдел оперативно-агентурной разведки'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел оперативной техники'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':5,
}
metadict_army['Отдел оперативных и мобилизационных перевозок'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел опытно-конструкторских и научно-исследовательских работ'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Отдел планирования железнодорожных перевозок'] = {
'Военнослужащий Генерального штаба':19,
}
metadict_army['Отдел планирования оперативной подготовки'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел по выездам за границу'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':8,
}
metadict_army['Отдел по извучению и подготовке театра военных действий'] = {
'Военнослужащий Генерального штаба':28,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел по исследованию вопросов военного искусства'] = {
'Военнослужащий Генерального штаба':34,
}
metadict_army['Отдел по персональному учёту потерь сержантов и солдат'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':52,
}
metadict_army['Отдел по планированию военно-научной работы'] = {
'Военнослужащий Генерального штаба':6,
}
metadict_army['Отдел по эксплуатации оборудования связи'] = {
'Военнослужащий Генерального штаба':9,
}
metadict_army['Отдел противовоздушной обороны'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отдел радиоразведки и радиотехнической разведки'] = {
'Военнослужащий Генерального штаба':31,
'Служащий Генерального штаба':7,
}
metadict_army['Отдел радиоразведки и радиотехнической разведки'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел топогеодезический и картографический'] = {
'Военнослужащий Генерального штаба':22,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел укомплектования'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел укомплектования личным составом запаса'] = {
'Военнослужащий Генерального штаба':27,
}
metadict_army['Отдел учета военнообязанных запаса и бронирования'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел службы войск'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел учёта численности войск'] = {
'Военнослужащий Генерального штаба':54,
'Служащий Генерального штаба':5,
}
metadict_army['Отдел эшелонных перевозок'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отделение обработки документов ГРУ'] = {
'Военнослужащий Генерального штаба':32,
'Служащий Генерального штаба':45,
}
metadict_army['Отделение радиосвязи ГРУ'] = {
'Военнослужащий Генерального штаба':7,
'Служащий Генерального штаба':2,
}
metadict_army['Учётно-архивное отделение ГРУ'] = {
'Военнослужащий Генерального штаба':15,
'Служащий Генерального штаба':6,
}
metadict_army['Политический отдел ГРУ'] = {
'Военнослужащий Генерального штаба':15,
'Служащий Генерального штаба':5,
}
metadict_army['Редакционно-издательское отделение'] = {
'Военнослужащий Генерального штаба':24,
'Служащий Генерального штаба':13,
}
metadict_army['Северное направление'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Секретное отделение Генерального штаба'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':12,
}
metadict_army['Специальный отдел ГРУ'] = {
'Военнослужащий Генерального штаба':48,
'Служащий Генерального штаба':4,
}
metadict_army['Специальный отдел главного управления связи'] = {
'Военнослужащий Генерального штаба':13,
}
metadict_army['Уставной отдел военно-научного управления'] = {
'Военнослужащий Генерального штаба':31,
}
metadict_army['Финансовое отделение военно-топографического управления'] = {
'Военнослужащий Генерального штаба':4,
'Служащий Генерального штаба':2,
}
metadict_army['Финансовое отделение управления военных сообщений'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':3,
}
metadict_army['Финансовый отдел ГРУ'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':3,
}
metadict_army['Шифровальный отдел ГРУ'] = {
'Военнослужащий Генерального штаба':77,
'Служащий Генерального штаба':10,
}
metadict_army['Юго-восточное направление'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Юго-западное направление'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Южное направление'] = {
'Военнослужащий Генерального штаба':20,
}
#----
# Управление родов и видов войск:
metadict_army['Управление ракетных войск'] = {
'Главный штаб РВСН':1,
'Центральный узел связи':1,
'Центральный командный пункт РВСН':1,
'Запасной центральный командный пункт РВСН':1,
'Центр автоматической системы управления РВСН':1,
'Центральный научно-исследовательский институт РВСН':1,
'Управление эксплуатации ракетного вооружения':1,
'Главное управление ракетного вооружения':1,
'Инженерная служба РВСН':1,
'Управление связи':1,
'Академия РВСН':1,
'Тыл РВСН':1,
}
metadict_army['Обеспечение ракетных войск'] = {
'Арсенал РВСН':3,
}
#----
# Управление групп армий, фронтов, округов:
metadict_army['Управление группы армий'] = {
'Командование группы армий':1,
'Штаб группы армий':1,
'Канцелярия группы армий':1,
}
metadict_army['Штаб группы армий'] = {
'Управление ПВО группы армий':1,
'Управление вооружения группы армий':1,
'Управление войск связи группы армий':1,
'Разведывательное управление группы армий':1,
'Управление инженерных войск группы армий':1,
'Управление войск химической защиты группы армий':1,
'Управление медицинской службы группы армий':1,
}
#----
# Управление армий, флотов:
metadict_army['Управление Тандерхеда'] = {
'Командование Тандерхеда':1,
'Штурманская Тандерхеда':1,
'Штаб Тандерхеда':1,
'Тыл Тандерхеда':1,
'Канцелярия Тандерхеда':1,
}
metadict_army['Штурманская Тандерхеда'] = {
'Ш<NAME>':1,
'Заместитель командира боевой части':1,
'Навигационная группа':1,
}
metadict_army['Управление армии'] = {
'Командование армии':1,
'Штаб армии':1,
'Тыл армии':1,
}
metadict_army['Штаб армии'] = {
'Начальник штаба армии':0,
'Оперативный отдел штаба армии':1,
'Отдел авиации армии':1,
'Отдел инженерных войск армии':1,
'Отдел ракетных войск и артиллерии армии':1,
'Отдел противовоздушной обороны армии':1,
'Канцелярия армии':1,
}
metadict_army['Оперативный отдел штаба армии'] = {
'Заместитель начальника штаба армии':0,
'Отдел связи армии':1,
'Отдел химической защиты армии':1,
'Отдел военной полиции армии':1,
'Разведывательный отдел армии':1,
'Топогеодезическая служба армии':1,
'Метеорологическая служба армии':1,
}
metadict_army['Канцелярия армии'] = {
'Начальник канцелярии армии':1,
'Отдел кадров армии':1,
'Шифровальный отдел армии':1,
'Административно-хозяйственный отдел армии':1,
'Отдел воспитательной работы армии':1,
'Отдел боевой подготовки армии':1,
'Военно-юридическая служба армии':1,
}
metadict_army['Тыл армии'] = {
'Начальник тыла армии':0,
'Заместитель начальника тыла армии':0,
'Квартирно-эксплуатационная служба армии':1,
'Продовольственная служба армии':1,
'Медицинская служба армии':1,
'Служба химической защиты армии':1,
'Финансовая служба армии':1,
'Вещевая служба армии':1,
'Служба ГСМ армии':1,
'Техническая часть армии':1,
'Офицерский клуб армии':1,
}
metadict_army['Техническая часть армии'] = {
'Начальник технической части армии':1,
'Бронетанковая служба армии':1,
'Автомобильная служба армии':1,
'Авиационная служба армии':1,
'Служба ракетно-артиллерийского вооружения армии':1,
}
metadict_army['Авиационная служба армии'] = {
'Начальник авиационной службы армии':0,
}
metadict_army['Автомобильная служба армии'] = {
'Начальник автомобильной службы армии':0,
}
metadict_army['Административно-хозяйственный отдел армии'] = {
'Начальник административно-хозяйствненого отдела армии':1,
'Военнослужащий штаба армии':4,
'Служащий штаба армии':36,
}
metadict_army['Бронетанковая служба армии'] = {
'Начальник бронетанковой службы армии':0,
}
metadict_army['Вещевая служба армии'] = {
'Начальник вещевой службы армии':0,
}
metadict_army['Квартирно-эксплуатационная служба армии'] = {
'Начальник квартирно-эксплуатационной службы армии':1,
'Военнослужащий штаба армии':2,
'Служащий штаба армии':5,
}
metadict_army['Командование армии'] = {
'Командующий армией':1,
'Заместитель командующего армией':2,
}
metadict_army['Медицинская служба армии'] = {
'Начальник медицинской службы армии':0,
}
metadict_army['Метеорологическая служба армии'] = {
'Военнослужащий штаба армии':3,
}
metadict_army['Отдел кадров армии'] = {
'Начальник отдела кадров армии':1,
'Военнослужащий штаба армии':6,
'Служащий штаба армии':30,
}
metadict_army['Отдел химической защиты армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Отдел авиации армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Отдел боевой подготовки армии'] = {
'Начальник боевой подготовки армии':0,
}
metadict_army['Военно-юридическая служба армии'] = {
'Начальник военно-юридической службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':12,
}
metadict_army['Отдел воспитательной работы армии'] = {
'Начальник воспитательной работы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':3,
}
metadict_army['Отдел инженерных войск армии'] = {
'Начальник инженерных войск армии':0,
}
metadict_army['Отдел разведки армии'] = {
'Начальник разведки армии':0,
}
metadict_army['Отдел военной полиции армии'] = {
'Начальник военной полиции армии':0,
}
metadict_army['Отдел противовоздушной обороны армии'] = {
'Начальник противовоздушной обороны армии':0,
}
metadict_army['Отдел ракетных войск и артиллерии армии'] = {
'Начальник ракетных войск и артиллерии армии':0,
}
metadict_army['Отдел связи армии'] = {
'Начальник связи армии':0,
}
metadict_army['Офицерский клуб армии'] = {
'Служащий штаба армии':3,
}
metadict_army['Продовольственная служба армии'] = {
'Начальник продовольственной службы армии':0,
}
metadict_army['Разведывательный отдел армии'] = {
'Начальник разведки армии':0,
}
metadict_army['Служба ГСМ армии'] = {
'Начальник службы ГСМ армии':0,
}
metadict_army['Служба химической защиты армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Служба ракетно-артиллерийского вооружения армии'] = {
'Начальник ракетно-артиллерийской службы армии':0,
}
metadict_army['Топогеодезическая служба армии'] = {
'Начальник топогеодезической службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':3,
}
metadict_army['Финансовая служба армии'] = {
'Начальник финансовой службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':6,
}
metadict_army['Шифровальный отдел армии'] = {
'Начальник секретной части армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':12,
}
#----
# Управление дивизий, флотилий:
metadict_army['Управление дивизии'] = {
'Командование дивизии':1,
'Штаб дивизии':1,
'Тыл дивизии':1,
}
metadict_army['Командование дивизии'] = {
'Командир дивизии':1,
'Заместитель командира дивизии по обучению':1,
'Заместитель командира дивизии по вооружению':1,
}
metadict_army['Штаб дивизии'] = {
'Начальник штаба дивизии':1,
'Оперативный отдел штаба дивизии':1,
'Начальник авиации дивизии':0,
'Начальник артиллерии дивизии':0,
'Начальник химической защиты дивизии':0,
'Начальник инженерной службы дивизии':0,
'Начальник медицинской службы дивизии':0,
'Начальник противовоздушной обороны дивизии':0,
'Канцелярия дивизии':1,
}
metadict_army['Оперативный отдел штаба дивизии'] = {
'Заместитель начальника штаба дивизии':1,
'Начальник связи дивизии':0,
'Начальник разведки дивизии':0,
'Начальник военной полиции дивизии':0,
'Начальник метеорологической службы дивизии':1,
'Начальник геодезической службы дивизии':1,
}
metadict_army['Канцелярия дивизии'] = {
'Начальник канцелярии дивизии':1,
'Группа делопроизводства дивизии':1,
'Группа шифровальщиков дивизии':1,
'Отдел боевой подготовки дивизии':1,
'Отдел воспитательной работы дивизии':1,
'Военно-юридическая служба дивизии':1,
}
metadict_army['Группа делопроизводства дивизии'] = {
'Начальник отдела кадров дивизии':1,
'Служащий штаба дивизии':35,
}
metadict_army['Группа шифровальщиков дивизии'] = {
'Начальник секретной части дивизии':1,
'Шифровальщик':9,
}
metadict_army['Тыл дивизии'] = {
'Начальник тыла дивизии':1,
'Заместитель начальника тыла дивизии':1,
'Квартирно-эксплуатационная служба дивизии':1,
'Продовольственная служба дивизии':1,
'Медицинская служба дивизии':1,
'Финансовая служба дивизии':1,
'Вещевая служба дивизии':1,
'Служба ГСМ дивизии':1,
'Техническая часть дивизии':1,
'Офицерский клуб дивизии':1,
}
metadict_army['Техническая часть дивизии'] = {
'Начальник технической части дивизии':1,
'Начальник склада боеприпасов дивизии':0,
'Начальник автомобильной службы дивизии':0,
'Начальник бронетанковой службы дивизии':0,
'Начальник авиационной службы дивизии':0,
}
metadict_army['Вещевая служба дивизии'] = {
'Начальник вещевой службы дивизии':0,
}
metadict_army['Квартирно-эксплуатационная служба дивизии'] = {
'Начальник квартирно-эксплуатационной службы дивизии':0,
}
metadict_army['Медицинская служба дивизии'] = {
'Начальник медицинской службы дивизии':0,
}
metadict_army['Отдел боевой подготовки дивизии'] = {
'Начальник боевой подготовки дивизии':0,
}
metadict_army['Отдел боевой подготовки дивизии'] = {
'Начальник боевой подготовки дивизии':0,
}
metadict_army['Военно-юридическая служба дивизии'] = {
'Начальник военно-юридической службы дивизии':0,
}
metadict_army['Отдел воспитательной работы дивизии'] = {
'Начальник воспитательной работы дивизии':0,
}
metadict_army['Офицерский клуб дивизии'] = {
'Служащий штаба дивизии':3,
}
metadict_army['Продовольственная служба дивизии'] = {
'Начальник продовольственной службы дивизии':0,
}
metadict_army['Служба ГСМ дивизии'] = {
'Начальник службы ГСМ дивизии':0,
}
metadict_army['Финансовая служба дивизии'] = {
'Начальник финансовой службы дивизии':1,
'Служащий штаба дивизии':7,
}
#----
# Управление полков:
metadict_army['Управление полка'] = {
'Командование полка':1,
'Штаб полка':1,
'Тыл полка':1,
}
metadict_army['Командование полка'] = {
'Командир полка':1,
'Заместитель командира полка по обучению':1,
'Заместитель командира полка по вооружению':1,
}
metadict_army['Штаб полка'] = {
'Начальник штаба полка':0,
'Оперативный отдел штаба полка':1,
'Начальник авиации полка':0,
'Начальник артиллерии полка':0,
'Начальник инженерной службы полка':0,
'Начальник противовоздушной обороны полка':0,
'Канцелярия полка':1,
}
metadict_army['Оперативный отдел штаба полка'] = {
'Заместитель начальника штаба полка':0,
'Начальник связи полка':0,
'Начальник разведки полка':0,
'Начальник военной полиции':0,
'Начальник химической защиты полка':0,
}
metadict_army['Канцелярия полка'] = {
'Начальник канцелярии полка':1,
'Группа делопроизводства полка':1,
'Группа шифровальщиков полка':1,
}
metadict_army['Группа делопроизводства полка'] = {
'Начальник отдела кадров полка':1,
'Служащий штаба полка':8,
}
metadict_army['Группа шифровальщиков полка'] = {
'Начальник секретной части полка':1,
'Шифровальщик':2,
}
metadict_army['Тыл полка'] = {
'Начальник тыла полка':0,
'Техническая часть полка':1,
'Начальник службы ГСМ полка':0,
'Начальник вещевой службы полка':0,
'Начальник продовольственной службы полка':0,
'Начальник квартирно-эксплуатационной части полка':1,
'Начальник медицинской службы полка':0,
'Начальник финансовой службы полка':1,
}
metadict_army['Техническая часть полка'] = {
'Начальник технической части полка':1,
'Начальник склада боеприпасов полка':0,
'Начальник автомобильной службы полка':0,
'Начальник бронетанковой службы полка':0,
'Начальник авиационной службы полка':0,
}
#----
# Управление батальонов, дивизионов, эскадрилий:
metadict_army['Управление ремонтного батальона'] = {
'Командование ремонтного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление разведывательного батальона'] = {
'Командование разведывательного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление понтонно-мостового батальона'] = {
'Командование понтонно-мостового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление переправочно-десантного батальона'] = {
'Командование переправочно-десантного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление пегасо-десантного батальона'] = {
'Командование пегасо-десантного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление мотострелкового батальона'] = {
'Командование мотострелкового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление механизированного батальона'] = {
'Командование механизированного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление медико-санитарного батальона'] = {
'Командование медико-санитарного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление инженерного батальона'] = {
'Командование инженерного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление инженерно-сапёрного батальона'] = {
'Командование инженерно-сапёрного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление инженерно-дорожного батальона'] = {
'Командование инженерно-дорожного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление трубопроводного батальона'] = {
'Командование трубопроводного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление десантно-штурмового батальона'] = {
'Командование десантно-штурмового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление штурмового батальона'] = {
'Командование штурмового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона снабжения'] = {
'Командование батальона снабжения':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона связи и охраны'] = {
'Командование батальона связи и охраны':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона связи'] = {
'Командование батальона связи':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона глубинной разведки'] = {
'Командование батальона глубинной разведки':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона химических войск'] = {
'Командование батальона химических войск':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона военной полиции'] = {
'Командование батальона военной полиции':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона ведомственной охраны'] = {
'Командование батальона ведомственной охраны':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона боевого обеспечения'] = {
'Командование батальона боевого обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона аэродромно-технического обеспечения'] = {
'Командование батальона аэродромно-технического обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона химической разведки'] = {
'Командование батальона химической разведки':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона РТР'] = {
'Командование батальона РТР':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона РЭБ'] = {
'Командование батальона РЭБ':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона РЭР'] = {
'Командование батальона РЭР':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона специального назначения'] = {
'Командование батальона специального назначения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона стальных рейнджеров'] = {
'Командование батальона стальных рейнджеров':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона аэрозольного противодействия'] = {
'Командование батальона аэрозольного противодействия':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона обеспечения'] = {
'Командование батальона обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона обслуживания'] = {
'Командование батальона обслуживания':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона'] = {
'Командование батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление дивизиона 120-мм миномётов (аэромобильное)'] = {
'Командование дивизиона 120-мм миномётов (аэромобильное)':1,
'Штаб дивизиона (аэромобильный)':1,
}
metadict_army['Управление дивизиона 120-мм гаубиц'] = {
'Командование дивизиона 120-мм гаубиц':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 120-мм САУ'] = {
'Командование дивизиона 120-мм САУ':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 150-мм САУ'] = {
'Командование дивизиона 150-мм САУ':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 150-мм гаубиц'] = {
'Командование дивизиона 150-мм гаубиц':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 120-мм РСЗО'] = {
'Командование дивизиона 120-мм РСЗО':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 220-мм РСЗО'] = {
'Командование дивизиона 220-мм РСЗО':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК ближнего действия'] = {
'Командование дивизиона ЗРК ближнего действия':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК малой дальности'] = {
'Командование дивизиона ЗРК малой дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК средней дальности'] = {
'Командование дивизиона ЗРК средней дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК большой дальности'] = {
'Командование дивизиона ЗРК большой дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона тактических ракет'] = {
'Командование дивизиона тактических ракет':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона оперативно-тактических ракет'] = {
'Командование дивизиона оперативно-тактических ракет':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона артиллерийской разведки'] = {
'Командование дивизиона артиллерийской разведки':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона зенитной разведки'] = {
'Командование дивизиона зенитной разведки':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона (аэромобильное)'] = {
'Командование дивизиона (аэромобильное)':1,
'Штаб дивизиона (аэромобильный)':1,
}
metadict_army['Управление танкового батальона'] = {
'Командование танкового батальона':1,
'Штаб батальона':1,
}
metadict_army['Командование батальона'] = {
'Командир батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РТР'] = {
'Командир батальона РТР':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона химической разведки'] = {
'Командир батальона химической разведки':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РЭБ'] = {
'Командир батальона РЭБ':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РЭР'] = {
'Командир батальона РЭР':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона аэродромно-технического обеспечения'] = {
'Командир батальона аэродромно-технического обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона боевого обеспечения'] = {
'Командир батальона боевого обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона ведомственной охраны'] = {
'Командир батальона ведомственной охраны':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона военной полиции'] = {
'Командир батальона военной полиции':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона химических войск'] = {
'Командир батальона химических войск':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона глубинной разведки'] = {
'Командир батальона глубинной разведки':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона связи'] = {
'Командир батальона связи':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона связи и охраны'] = {
'Командир батальона связи и охраны':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона снабжения'] = {
'Командир батальона снабжения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона обеспечения'] = {
'Командир батальона обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона обслуживания'] = {
'Командир батальона обслуживания':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование десантно-штурмового батальона'] = {
'Командир десантно-штурмового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование механизированного батальона'] = {
'Командир механизированного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование мотострелкового батальона'] = {
'Командир мотострелкового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование пегасо-десантного батальона'] = {
'Командир пегасо-десантного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование понтонно-мостового батальона'] = {
'Командир понтонно-мостового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование разведывательного батальона'] = {
'Командир разведывательного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование ремонтного батальона'] = {
'Командир ремонтного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование штурмового батальона'] = {
'Командир штурмового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерно-дорожного батальона'] = {
'Командир инженерно-дорожного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование трубопроводного батальона'] = {
'Командир трубопроводного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерно-сапёрного батальона'] = {
'Командир инженерно-сапёрного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерного батальона'] = {
'Командир инженерного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование медико-санитарного батальона'] = {
'Командир медико-санитарного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона специального назначения'] = {
'Командир батальона специального назначения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона стальных рейнджеров'] = {
'Командир батальона стальных рейнджеров':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона аэрозольного противодействия'] = {
'Командир батальона аэрозольного противодействия':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование танкового батальона'] = {
'Командир танкового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
'Экипаж командирского танка':1,
}
metadict_army['Штаб батальона'] = {
'Начальник штаба батальона':1,
'Заместитель начальника штаба батальона':1,
'Начальник связи батальона':0,
'Начальник разведки батальона':0,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Штаб тылового батальона'] = {
'Начальник штаба батальона':1,
'Заместитель начальника штаба батальона':1,
'Начальник связи батальона':0,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
}
metadict_army['Командование дивизиона'] = {
'Командир дивизиона':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм гаубиц'] = {
'Командир дивизиона 120-мм гаубиц':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм САУ'] = {
'Командир дивизиона 120-мм САУ':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 150-мм САУ'] = {
'Командир дивизиона 150-мм САУ':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 150-мм гаубиц'] = {
'Командир дивизиона 150-мм гаубиц':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм РСЗО'] = {
'Командир дивизиона 120-мм РСЗО':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 220-мм РСЗО'] = {
'Командир дивизиона 220-мм РСЗО':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК ближнего действия'] = {
'Командир дивизиона ЗРК ближнего действия':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК малой дальности'] = {
'Командир дивизиона ЗРК большой дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК средней дальности'] = {
'Командир дивизиона ЗРК средней дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК большой дальности'] = {
'Командир дивизиона ЗРК большой дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона тактических ракет'] = {
'Командир дивизиона тактических ракет':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона оперативно-тактических ракет'] = {
'Командир дивизиона оперативно-тактических ракет':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона зенитной разведки'] = {
'Командир дивизиона зенитной разведки':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона артиллерийской разведки'] = {
'Командир дивизиона артиллерийской разведки':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм миномётов (аэромобильное)'] = {
'Командир дивизиона 120-мм миномётов (пегас)':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Штаб дивизиона'] = {
'Начальник штаба дивизиона':1,
'Делопроизводитель':1,
'Заместитель начальника штаба дивизиона':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Управление тылового батальона'] = {
'Командир тылового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
}
metadict_army['Командование дивизиона (аэромобильное)'] = {
'Командир дивизиона (пегас)':1,
'Заместитель командира дивизиона по обучению (пегас)':1,
'Заместитель командира дивизиона по вооружению (пегас)':1,
}
metadict_army['Штаб дивизиона (аэромобильный)'] = {
'Начальник штаба дивизиона (пегас)':1,
'Делопроизводитель':1,
'Заместитель начальника штаба дивизиона (пегас)':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Управление дивизиона шахтных МБР'] = {
'Командир дивизиона шахтных МБР':1,
'Заместитель командира дивизиона по вооружению':1,
'Инструктор по химической защите':1,
}
metadict_army['Управление эскадрильи бомбонесущих планёров'] = {
'Командир эскадрильи бомбонесущих планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи противотанковых планёров'] = {
'Командир эскадрильи противотанковых планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи противовоздушных планёров'] = {
'Командир эскадрильи противовоздушных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи ракетных планёров'] = {
'Командир эскадрильи ракетных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи транспортных планёров'] = {
'Командир эскадрильи транспортных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи тяжёлых планёров'] = {
'Командир эскадрильи тяжёлых планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи истребителей-бомбардировщиков'] = {
'Командир эскадрильи истребителей-бомбардировщиков (пегас)':1,
'Штаб эскадрильи авиации':1,
}
metadict_army['Управление эскадрильи транспортной авиации'] = {
'Командир эскадрильи транспортной авиации (пегас)':1,
'Штаб эскадрильи авиации':1,
}
metadict_army['Штаб эскадрильи пегасов'] = {
'Заместитель командира эскадрильи (пегас)':1,
'Делопроизводитель':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Штаб эскадрильи авиации'] = {
'Заместитель командира эскадрильи (пегас)':1,
'Делопроизводитель':1,
'Инструктор по химической защите':1,
}
#----
# Управление рот, батарей:
metadict_army['Управление тыловой роты'] = {
'Командир тыловой роты':1,
'Штаб роты':1,
}
metadict_army['Управление сапёрной роты'] = {
'Командир сапёрной роты':1,
'Штаб роты':1,
}
metadict_army['Управление сапёрной роты (аэромобильное)'] = {
'Командир сапёрной роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты тяжёлого оружия (аэромобильное)'] = {
'Командир роты тяжёлого оружия (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты транспортных планёров'] = {
'Командир роты транспортных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты связи'] = {
'Командир роты связи':1,
'Штаб роты':1,
}
metadict_army['Управление роты разведывательных БПЛА'] = {
'Командир роты разведывательных БПЛА':1,
'Штаб роты':1,
}
metadict_army['Управление роты радиолокационной разведки'] = {
'Командир роты радиолокационной разведки':1,
'Штаб роты':1,
}
metadict_army['Управление роты химических войск'] = {
'Командир роты химических войск':1,
'Штаб роты':1,
}
metadict_army['Управление роты военной полиции'] = {
'Командир роты военной полиции':1,
'Штаб роты':1,
}
metadict_army['Управление роты ведомственной охраны'] = {
'Командир роты ведомственной охраны':1,
'Штаб роты':1,
}
metadict_army['Управление роты боевых роботов'] = {
'Командир роты боевых роботов':1,
'Штаб роты':1,
}
metadict_army['Управление роты планёров-бомбардировщиков'] = {
'Командир роты планёров-бомбардировщиков (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты противовоздушных планёров'] = {
'Командир роты противовоздушных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты противотанковых планёров'] = {
'Командир роты противотанковых планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты ракетных планёров'] = {
'Командир роты ракетных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление звена транспортной авиации'] = {
'Командир звена транспортной авиации':1,
}
metadict_army['Управление звена истребителей-бомбардировщиков'] = {
'Командир звена истребителей-бомбардировщиков':1,
}
metadict_army['Управление роты РЭР'] = {
'Командир роты РЭР':1,
'Штаб роты':1,
}
metadict_army['Управление роты РЭБ'] = {
'Командир роты РЭБ':1,
'Штаб роты':1,
}
metadict_army['Управление роты химической разведки (аэромобильное)'] = {
'Командир роты химической разведки (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты химической разведки'] = {
'Командир роты химической разведки':1,
'Штаб роты':1,
}
metadict_army['Управление роты РТР и РЭР'] = {
'Командир роты РТР и РЭР':1,
'Штаб роты':1,
}
metadict_army['Управление роты РТР'] = {
'Командир роты РТР':1,
'Штаб роты':1,
}
metadict_army['Управление ремонтной роты'] = {
'Командир ремонтной роты':1,
'Штаб роты':1,
}
metadict_army['Управление разведывательной роты (бронетехника)'] = {
'Командир разведывательной роты (бронетехника)':1,
'Штаб роты':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление понтонно-мостовой роты'] = {
'Командир понтонно-мостовой роты':1,
'Штаб роты':1,
}
metadict_army['Управление переправочно-десантной роты'] = {
'Командир переправочно-десантной роты':1,
'Штаб роты':1,
}
metadict_army['Управление медицинской роты'] = {
'Командир медицинской роты':1,
'Штаб роты':1,
}
metadict_army['Управление медицинской роты (аэромобильное)'] = {
'Командир медицинской роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление инженерной роты'] = {
'Командир инженерной роты':1,
'Штаб роты':1,
}
metadict_army['Управление инженерно-сапёрной роты'] = {
'Командир инженерно-сапёрной роты':1,
'Штаб роты':1,
}
metadict_army['Управление роты инструкторов'] = {
'Командир роты инструкторов':1,
'Штаб роты':1,
}
metadict_army['Управление роты стальных рейнджеров'] = {
'Командир роты стальных рейнджеров':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление снайперской роты'] = {
'Командир роты снайперов':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление штурмовой роты'] = {
'Командир штурмовой роты':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление штурмовой роты (без БМП)'] = {
'Командир штурмовой роты':1,
'Штаб роты специального назначения':1,
}
metadict_army['Управление пегасо-десантной роты'] = {
'Командир пегасо-десантной роты':1,
'Штаб роты':1,
}
metadict_army['Управление десантно-штурмовой роты'] = {
'Командир десантно-штурмовой роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БМД':1,
}
metadict_army['Управление мотострелковой роты'] = {
'Командир мотострелковой роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление механизированной роты'] = {
'Командир механизированной роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление танковой роты'] = {
'Командир танковой роты':1,
'Штаб танковой роты':1,
'Экипаж командирского танка':1,
}
metadict_army['Управление разведывательной роты (аэромобильное)'] = {
'Командир разведывательной роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление диверсионно-разведывательной роты'] = {
'Командир роты специального назначения (пегас)':1,
'Штаб роты специального назначения':1,
}
metadict_army['Управление роты посыльных'] = {
'Командир роты посыльных (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты обеспечения'] = {
'Командир роты обеспечения':1,
'Штаб роты':1,
}
metadict_army['Управление роты обслуживания'] = {
'Командир роты обслуживания':1,
'Штаб роты':1,
}
metadict_army['Управление роты снабжения'] = {
'Командир роты снабжения':1,
'Штаб роты':1,
}
metadict_army['Управление транспортной роты'] = {
'Командир транспортной роты':1,
'Штаб роты':1,
}
metadict_army['Управление батареи ЗРК ближнего действия'] = {
'Командир батареи ЗРК малой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК малой дальности'] = {
'Командир батареи ЗРК малой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК средней дальности'] = {
'Командир батареи ЗРК средней дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК большой дальности'] = {
'Командир батареи ЗРК большой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи 120-мм РСЗО'] = {
'Командир батареи 120-мм РСЗО':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи 220-мм РСЗО'] = {
'Командир батареи 220-мм РСЗО':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи батальонных САУ'] = {
'Командир батареи батальонных САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи полковых САУ'] = {
'Командир батареи полковых САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи полковых гаубиц'] = {
'Командир батареи полковых гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи дивизионных САУ'] = {
'Командир батареи дивизионных САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи дивизионных гаубиц'] = {
'Командир батареи дивизионных гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи артиллерийской разведки'] = {
'Командир батареи артиллерийской разведки':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи гаубиц'] = {
'Командир батареи гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи тактических ракет'] = {
'Командир батареи тактических ракет':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи оперативно-тактических ракет'] = {
'Командир батареи оперативно-тактических ракет':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи связи'] = {
'Командир батареи связи':1,
'Штаб батареи':1,
}
metadict_army['Управление миномётной батареи'] = {
'Командир миномётной батареи':1,
'Штаб батареи':1,
}
metadict_army['Управление миномётной батареи (аэромобильное)'] = {
'Командир миномётной батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление противотанковой батареи (аэромобильное)'] = {
'Командир противотанковой батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление батареи (аэромобильное)'] = {
'Командир крылатой батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление роты мобильной АЭС'] = {
'Командир роты мобильной АЭС':1,
'Штаб роты':1,
}
metadict_army['Управление армейских складов'] = {
'Управляющий армейских складов':1,
'Заместитель управляющего армейских складов':1,
'Делопроизводитель':6,
}
metadict_army['Управление складского терминала'] = {
'Управляющий складского терминала':1,
'Делопроизводитель':1,
}
metadict_army['Штаб роты'] = {
'Заместитель командира роты':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество роты':1,
}
metadict_army['Штаб роты (аэромобильный)'] = {
'Заместитель командира роты (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Санитар-инструктор':1,
'Станция ближней разведки':1,
'Лазерный прибор разведки':1,
'Лёгкий планёр (перевозка снаряжения)':5,
'Имущество роты (аэромобильной)':1,
}
metadict_army['Штаб мотострелковой роты'] = {
'Заместитель командира роты':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор экипажей бронемашин':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество роты (мотострелковой)':1,
}
metadict_army['Штаб роты специального назначения'] = {
'Заместитель командира роты специального назначения':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Полевой хирург (единорог)':1,
'Имущество роты (специального назначения)':1,
}
metadict_army['Штаб роты специального назначения (аэромобильный)'] = {
'Заместитель командира роты специального назначения (пегас)':1,
'Связист диверсионно-разведывательной группы (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Полевой хирург (единорог)':1,
'КВ-радиостанция переносная':1,
'Станция ближней разведки':1,
'Лазерный прибор разведки':1,
'Лёгкий планёр (перевозка снаряжения)':6,
'Имущество роты (аэромобильной СпН)':1,
}
metadict_army['Штаб танковой роты'] = {
'Заместитель командира роты':1,
'Инструктор экипажей бронемашин':1,
'Санитар-инструктор':1,
'Имущество роты (танковой)':1,
}
metadict_army['Штаб батареи'] = {
'Заместитель командира батареи':1,
'Управляющий хозяйством батареи':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество батареи':1,
}
metadict_army['Штаб батареи (аэромобильный)'] = {
'Заместитель командира роты (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Санитар-инструктор':1,
'Имущество батареи (аэромобильной)':1,
}
#----
# Управление взводов:
metadict_army['Управление взвода РТР'] = {
'Командир взвода РТР':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РТР (аэромобильное)'] = {
'Командир взвода РТР (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода РЭБ'] = {
'Командир взвода РЭБ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РЭБ (аэромобильное)'] = {
'Командир взвода РЭБ (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода РЭР'] = {
'Командир взвода РЭР':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РЭР (аэромобильное)'] = {
'Командир взвода РЭР (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода боевых роботов'] = {
'Командир взвода боевых роботов':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода ведомственной охраны'] = {
'Командир взвода ведомственной охраны':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода военной полиции'] = {
'Командир взвода военной полиции':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода военной полиции (аэромобильное)'] = {
'Командир взвода военной полиции (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода звукометрической разведки'] = {
'Командир взвода звукометрической разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода инженерной разведки'] = {
'Командир взвода инженерной разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода механизированных мостов'] = {
'Командир взвода механизированных мостов':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода мобильного энергоблока'] = {
'Командир взвода мобильного энергоблока':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода обеспечения'] = {
'Командир взвода обеспечения':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода обеспечения (аэромобильное)'] = {
'Командир взвода обеспечения (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода обслуживания'] = {
'Командир взвода обслуживания':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода планёров-бомбардировщиков (аэромобильное)'] = {
'Командир взвода планёров-бомбардировщиков (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода противовоздушных планёров (аэромобильное)'] = {
'Командир взвода противовоздушных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода противотанковых планёров (аэромобильное)'] = {
'Командир взвода противотанковых планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода разведывательных БПЛА'] = {
'Командир взвода разведывательных БПЛА':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода ракетных планёров (аэромобильное)'] = {
'Командир взвода ракетных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода роботов РЭБ'] = {
'Командир взвода роботов РЭБ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода связи'] = {
'Командир взвода связи':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода связи (аэромобильное)'] = {
'Командир взвода связи (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода снабжения'] = {
'Командир взвода снабжения':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода стальных рейнджеров'] = {
'Командир взвода стальных рейнджеров':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода транспортных планёров (аэромобильное)'] = {
'Командир взвода транспортных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода тяжёлого оружия'] = {
'Командир взвода тяжёлого оружия':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической защиты'] = {
'Командир взвода химической защиты':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической разведки'] = {
'Командир взвода химической разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической разведки (аэромобильное)'] = {
'Командир взвода химической разведки (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление гранатомётного взвода (аэромобильное)'] = {
'Командир гранатомётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление десантно-штурмового взвода'] = {
'Командир десантно-штурмового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление диверсионно-разведывательного взвода (аэромобильное)'] = {
'Командир диверсионно-разведывательного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление инженерного взвода'] = {
'Командир инженерного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление инженерного взвода (аэромобильное)'] = {
'Командир инженерного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
}
metadict_army['Управление медицинского взвода'] = {
'Командир медицинского взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление медицинского взвода (аэромобильное)'] = {
'Командир медицинского взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление метеорологического взвода'] = {
'Командир метеорологического взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление механизированного взвода'] = {
'Командир механизированного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление миномётного взвода'] = {
'Командир миномётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление миномётного взвода (аэромобильное)'] = {
'Командир миномётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление миномётного взвода (бронетехника)'] = {
'Командир миномётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление мотострелкового взвода'] = {
'Командир мотострелкового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм РСЗО'] = {
'Командир огневого взвода 120-мм РСЗО':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 220-мм РСЗО'] = {
'Командир огневого взвода 220-мм РСЗО':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм САУ'] = {
'Командир огневого взвода 120-мм САУ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм гаубиц'] = {
'Командир огневого взвода 120-мм гаубиц':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 150-мм САУ'] = {
'Командир огневого взвода 150-мм САУ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 150-мм гаубиц'] = {
'Командир огневого взвода 150-мм гаубиц':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огнемётного взвода'] = {
'Командир огнемётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление пегасо-десантного взвода'] = {
'Командир пегасо-десантного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление переправочно-десантного взвода'] = {
'Командир переправочно-десантного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление понтонно-мостового взвода'] = {
'Командир понтонно-мостового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление противотанкового взвода (аэромобильное)'] = {
'Командир противотанкового взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление пулемётного взвода (аэромобильное)'] = {
'Командир пулемётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление разведывательного взвода (аэромобильное)'] = {
'Командир разведывательного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление разведывательного взвода (бронетехника)'] = {
'Командир разведывательного взвода (бронетехника)':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление ремонтного взвода'] = {
'Командир ремонтного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление сапёрного взвода'] = {
'Командир сапёрного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление сапёрного взвода (аэромобильное)'] = {
'Командир сапёрного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление снайперского взвода (аэромобильное)'] = {
'Командир снайперского взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление танкового взвода'] = {
'Командир танкового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление топогеодезического взвода (аэромобильное)'] = {
'Командир топогеодезического взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление транспортного взвода'] = {
'Командир транспортного взвода':1,
'Заместитель командира взвода':1,
} | dict/foe_army_command.py |
metadict_army['Генеральный штаб'] = {
'Командование Генерального штаба':1,
'Главное оперативное управление':1,
'Главное разведывательное управление':1,
'Главное организационное управление':1,
'Военно-научное управление':1,
'Управление военных сообщений':1,
'Управление материально-технического обеспечения':1,
'Военно-топографическое управление':1,
'Главное управление связи':1,
'Центральный командный пункт Генерального штаба':1,
}
metadict_army['Главное оперативное управление'] = {
'Командование управления Генерального штаба':1,
'Оперативное управление направлений':1,
'Управление авиации и противовоздушной обороны':1,
'Самостоятельные отделы главного оперативного управления':1,
'Канцелярия Генерального штаба':1,
}
metadict_army['Оперативное управление направлений'] = {
'Южное направление':1,
'Северное направление':1,
'Юго-восточное направление':1,
'Юго-западное направление':1,
'Западно-восточное океанское направление':1,
'Направление внутренних военных округов':1,
'Отдел дислокации и оперативного учёта войск':1,
'Секретное отделение Генерального штаба':1,
}
metadict_army['Управление авиации и противовоздушной обороны'] = {
'Авиационный отдел':1,
'Отдел противовоздушной обороны':1,
'Секретное отделение Генерального штаба':1,
}
metadict_army['Самостоятельные отделы главного оперативного управления'] = {
'Отдел планирования оперативной подготовки':1,
'Отдел по извучению и подготовке театра военных действий':1,
'Морской отдел главного оперативного управления':1,
}
metadict_army['Главное разведывательное управление'] = {
'Командование управления Генерального штаба':1,
'Управление стратегической разведки':1,
'Управление информации сухопутных войск и военно-воздушных сил':1,
'Самостоятельные отделы главного разведывательного управления':1,
}
metadict_army['Управление стратегической разведки'] = {
'Зебриканский отдел':1,
'Восточно-океанский отдел':1,
}
metadict_army['Управление информации сухопутных войск и военно-воздушных сил'] = {
'Зебриканский отдел информации':1,
'Восточно-океанский отдел информации':1,
'Кристально-имперский отдел информации':1,
'Отдел информации военно-воздушных сил':1,
}
metadict_army['Самостоятельные отделы главного разведывательного управления'] = {
'Отдел военно-морской стратегической разведки':1,
'Специальный отдел ГРУ':1,
'Отдел военно-морской информации':1,
'Военно-технический отдел ГРУ':1,
'Отдел военной экономики и театра военных действий':1,
'Отдел радиоразведки и радиотехнической разведки':1,
'Отдел оперативно-агентурной разведки':1,
'Отдел кадров ГРУ':1,
'Отдел внешних сношений ГРУ':1,
'Финансовый отдел ГРУ':1,
'Шифровальный отдел ГРУ':1,
'Политический отдел ГРУ':1,
'Отдел оперативной техники':1,
'Особая группа ГРУ':1,
'Отделение обработки документов ГРУ':1,
'Отделение радиосвязи ГРУ':1,
'Учётно-архивное отделение ГРУ':1,
'Редакционно-издательское отделение':1,
'Административно-хозяйственное отделение ГРУ':1,
}
metadict_army['Главное организационное управление'] = {
'Командование управления Генерального штаба':1,
'Мобилизационное управление':1,
'Управление укомплектования и службы войск':1,
'Самостоятельные отделы главного организационного управления':1,
}
metadict_army['Организационное управление'] = {
'Организационно-плановый отдел':1,
'Отдел сухопутных войск':1,
'Отдел военно-воздушных сил':1,
'Отдел военно-морских сил':1,
'Отдел технических войск и специальных частей':1,
'Отдел органов управления':1,
'Отдел военно-учебных заведений':1,
'Отдел тыловых частей и учреждений':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Мобилизационное управление'] = {
'Мобилизационно-плановый отдел':1,
'Отдел укомплектования личным составом запаса':1,
'Отдел комплектования мехтранспортом и дорожно-строительной техникой':1,
'Отдел учета военнообязанных запаса и бронирования':1,
'Мобилизационно-инспекторский отдел':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление укомплектования и службы войск'] = {
'Отдел укомплектования':1,
'Отдел службы войск':1,
'Отдел по выездам за границу':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Самостоятельные отделы главного организационного управления'] = {
'Отдел учёта численности войск':1,
'Отдел по персональному учёту потерь сержантов и солдат':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Военно-научное управление'] = {
'Командование управления Генерального штаба':1,
'Отдел по исследованию вопросов военного искусства':1,
'Военно-исторический отдел':1,
'Уставной отдел военно-научного управления':1,
'Отдел по планированию военно-научной работы':1,
'Редакционно-издательское отделение':1,
'Военно-научная библиотека Генерального штаба':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление военных сообщений'] = {
'Командование управления Генерального штаба':1,
'Отдел планирования железнодорожных перевозок':1,
'Отдел воздушных перевозок':1,
'Отдел эшелонных перевозок':1,
'Отдел грузовых перевозок':1,
'Отдел водных перевозок':1,
'Мобилизационно-технический отдел':1,
'Отдел оперативных и мобилизационных перевозок':1,
'Отдел боевой подготовки управления военных сообщений':1,
'Военно-научный отдел управления военных сообщений':1,
'Инспекция управления военных сообщений':1,
'Отделение кадров управления военных сообщений':1,
'Финансовое отделение управления военных сообщений':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Управление материально-технического обеспечения'] = {
'Командование управления Генерального штаба':1,
'Мобилизационно-плановый отдел вооружения и материально-технического обеспечения':1,
'Отдел мобилизационного планирования артиллерийского вооружения':1,
'Отдел мобилизационного планирования бронетанкового вооружения':1,
'Отдел мобилизационного планирования военной техники':1,
'Отдел мобилизационного планирования авиатехники':1,
'Отдел мобилизационного планирования ГСМ':1,
'Отдел мобилизационного планирования интендантского и медицинского имущества':1,
'Отдел мобилизационного планирования техники морского флота':1,
'Отдел опытно-конструкторских и научно-исследовательских работ':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Военно-топографическое управление'] = {
'Командование управления Генерального штаба':1,
'Организационно-плановый отдел военно-топографического управления':1,
'Отдел топогеодезический и картографический':1,
'Отдел обеспечения войск топографическими картами и специально-техническим вооружением':1,
'Отдел кадров и топографической подготовки войск':1,
'Финансовое отделение военно-топографического управления':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Главное управление связи'] = {
'Командование управления Генерального штаба':1,
'Организационно-плановый отдел':1,
'Отдел обеспечения шифросвязью по направлениям':1,
'Инспекторский отдел главного управления связи':1,
'Специальный отдел главного управления связи':1,
'Отдел по эксплуатации оборудования связи':1,
'Отдел кадров, боевой и специальной подготовки главного управления связи':1,
'Секретное отделение Генерального штаба':1,
'Административно-хозяйственная часть Генерального штаба':1,
}
metadict_army['Самостоятельные отделы Генерального штаба'] = {
'Отдел связи Генерального штаба':1,
'Узел связи Генерального штаба':1,
'Отдел военой цензуры Генерального штаба':1,
'Политический отдел Генерального штаба':1,
'Отдел кадров Генерального штаба':1,
'Архивная служба Генерального штаба':1,
'Библиотека Генерального штаба':1,
'Административно-хозяйственный отдел Генерального штаба':1,
}
metadict_army['Авиационный отдел'] = {
'Военнослужащий Генерального штаба':23,
}
metadict_army['Административно-хозяйственная часть Генерального штаба'] = {
'Военнослужащий Генерального штаба':2,
'Служащий Генерального штаба':2,
}
metadict_army['Административно-хозяйственное отделение ГРУ'] = {
'Военнослужащий Генерального штаба':19,
'Служащий Генерального штаба':16,
}
metadict_army['Военно-исторический отдел'] = {
'Военнослужащий Генерального штаба':41,
'Служащий Генерального штаба':1,
}
metadict_army['Военно-научная библиотека Генерального штаба'] = {
'Военнослужащий Генерального штаба':2,
'Служащий Генерального штаба':12,
}
metadict_army['Военно-научный отдел управления военных сообщений'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Военно-технический отдел ГРУ'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Восточно-океанский отдел'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Восточно-океанский отдел информации'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Западно-восточное океанское направление'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Зебриканский отдел'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Зебриканский отдел информации'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Инспекторский отдел главного управления связи'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Инспекция управления военных сообщений'] = {
'Военнослужащий Генерального штаба':3,
}
metadict_army['Канцелярия Генерального штаба'] = {
'Военнослужащий Генерального штаба':20,
'Военнослужащий Генерального штаба':3,
}
metadict_army['Командование Генерального штаба'] = {
'Начальник Генерального штаба':1,
'Заместитель начальника Генерального штаба':2,
}
metadict_army['Командование управления Генерального штаба'] = {
'Начальник управления Генерального штаба':1,
'Заместитель начальника управления Генерального штаба':2,
}
metadict_army['Кристально-имперский отдел информации'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Мобилизационно-инспекторский отдел'] = {
'Военнослужащий Генерального штаба':16,
}
metadict_army['Мобилизационно-плановый отдел'] = {
'Военнослужащий Генерального штаба':27,
}
metadict_army['Мобилизационно-плановый отдел вооружения и материально-технического обеспечения'] = {
'Военнослужащий Генерального штаба':43,
}
metadict_army['Мобилизационно-технический отдел'] = {
'Военнослужащий Генерального штаба':32,
}
metadict_army['Морской отдел главного оперативного управления'] = {
'Военнослужащий Генерального штаба':12,
}
metadict_army['Направление внутренних военных округов'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Организационно-плановый отдел'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Организационно-плановый отдел военно-топографического управления'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел информации военно-воздушных сил'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел боевой подготовки управления военных сообщений'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Особая группа ГРУ'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел внешних сношений ГРУ'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':3,
}
metadict_army['Отдел водных перевозок'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отдел военно-морской информации'] = {
'Военнослужащий Генерального штаба':33,
'Служащий Генерального штаба':3,
}
metadict_army['Отдел военно-морской стратегической разведки'] = {
'Военнослужащий Генерального штаба':24,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел военной экономики и театра военных действий'] = {
'Военнослужащий Генерального штаба':40,
}
metadict_army['Отдел воздушных перевозок'] = {
'Военнослужащий Генерального штаба':30,
}
metadict_army['Отдел грузовых перевозок'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Отдел дислокации и оперативного учёта войск'] = {
'Военнослужащий Генерального штаба':10,
}
metadict_army['Отдел кадров ГРУ'] = {
'Военнослужащий Генерального штаба':40,
'Служащий Генерального штаба':22,
}
metadict_army['Отдел кадров и топографической подготовки войск'] = {
'Военнослужащий Генерального штаба':14,
'Служащий Генерального штаба':2,
}
metadict_army['Отделение кадров управления военных сообщений'] = {
'Военнослужащий Генерального штаба':3,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел кадров, боевой и специальной подготовки главного управления связи'] = {
'Военнослужащий Генерального штаба':16,
}
metadict_army['Отдел комплектования мехтранспортом и дорожно-строительной техникой'] = {
'Военнослужащий Генерального штаба':9,
}
metadict_army['Отдел мобилизационного планирования ГСМ'] = {
'Военнослужащий Генерального штаба':10,
}
metadict_army['Отдел мобилизационного планирования авиатехники'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Отдел мобилизационного планирования артиллерийского вооружения'] = {
'Военнослужащий Генерального штаба':19,
}
metadict_army['Отдел мобилизационного планирования бронетанкового вооружения'] = {
'Военнослужащий Генерального штаба':13,
}
metadict_army['Отдел мобилизационного планирования военной техники'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел мобилизационного планирования интендантского и медицинского имущества'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел мобилизационного планирования техники морского флота'] = {
'Военнослужащий Генерального штаба':12,
}
metadict_army['Отдел обеспечения войск топографическими картами и специально-техническим вооружением'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':13,
}
metadict_army['Отдел обеспечения шифросвязью по направлениям'] = {
'Военнослужащий Генерального штаба':130,
}
metadict_army['Отдел оперативно-агентурной разведки'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел оперативной техники'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':5,
}
metadict_army['Отдел оперативных и мобилизационных перевозок'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел опытно-конструкторских и научно-исследовательских работ'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Отдел планирования железнодорожных перевозок'] = {
'Военнослужащий Генерального штаба':19,
}
metadict_army['Отдел планирования оперативной подготовки'] = {
'Военнослужащий Генерального штаба':18,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел по выездам за границу'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':8,
}
metadict_army['Отдел по извучению и подготовке театра военных действий'] = {
'Военнослужащий Генерального штаба':28,
'Служащий Генерального штаба':1,
}
metadict_army['Отдел по исследованию вопросов военного искусства'] = {
'Военнослужащий Генерального штаба':34,
}
metadict_army['Отдел по персональному учёту потерь сержантов и солдат'] = {
'Военнослужащий Генерального штаба':17,
'Служащий Генерального штаба':52,
}
metadict_army['Отдел по планированию военно-научной работы'] = {
'Военнослужащий Генерального штаба':6,
}
metadict_army['Отдел по эксплуатации оборудования связи'] = {
'Военнослужащий Генерального штаба':9,
}
metadict_army['Отдел противовоздушной обороны'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отдел радиоразведки и радиотехнической разведки'] = {
'Военнослужащий Генерального штаба':31,
'Служащий Генерального штаба':7,
}
metadict_army['Отдел радиоразведки и радиотехнической разведки'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел топогеодезический и картографический'] = {
'Военнослужащий Генерального штаба':22,
'Служащий Генерального штаба':2,
}
metadict_army['Отдел укомплектования'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел укомплектования личным составом запаса'] = {
'Военнослужащий Генерального штаба':27,
}
metadict_army['Отдел учета военнообязанных запаса и бронирования'] = {
'Военнослужащий Генерального штаба':14,
}
metadict_army['Отдел службы войск'] = {
'Военнослужащий Генерального штаба':29,
}
metadict_army['Отдел учёта численности войск'] = {
'Военнослужащий Генерального штаба':54,
'Служащий Генерального штаба':5,
}
metadict_army['Отдел эшелонных перевозок'] = {
'Военнослужащий Генерального штаба':17,
}
metadict_army['Отделение обработки документов ГРУ'] = {
'Военнослужащий Генерального штаба':32,
'Служащий Генерального штаба':45,
}
metadict_army['Отделение радиосвязи ГРУ'] = {
'Военнослужащий Генерального штаба':7,
'Служащий Генерального штаба':2,
}
metadict_army['Учётно-архивное отделение ГРУ'] = {
'Военнослужащий Генерального штаба':15,
'Служащий Генерального штаба':6,
}
metadict_army['Политический отдел ГРУ'] = {
'Военнослужащий Генерального штаба':15,
'Служащий Генерального штаба':5,
}
metadict_army['Редакционно-издательское отделение'] = {
'Военнослужащий Генерального штаба':24,
'Служащий Генерального штаба':13,
}
metadict_army['Северное направление'] = {
'Военнослужащий Генерального штаба':15,
}
metadict_army['Секретное отделение Генерального штаба'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':12,
}
metadict_army['Специальный отдел ГРУ'] = {
'Военнослужащий Генерального штаба':48,
'Служащий Генерального штаба':4,
}
metadict_army['Специальный отдел главного управления связи'] = {
'Военнослужащий Генерального штаба':13,
}
metadict_army['Уставной отдел военно-научного управления'] = {
'Военнослужащий Генерального штаба':31,
}
metadict_army['Финансовое отделение военно-топографического управления'] = {
'Военнослужащий Генерального штаба':4,
'Служащий Генерального штаба':2,
}
metadict_army['Финансовое отделение управления военных сообщений'] = {
'Военнослужащий Генерального штаба':10,
'Служащий Генерального штаба':3,
}
metadict_army['Финансовый отдел ГРУ'] = {
'Военнослужащий Генерального штаба':20,
'Служащий Генерального штаба':3,
}
metadict_army['Шифровальный отдел ГРУ'] = {
'Военнослужащий Генерального штаба':77,
'Служащий Генерального штаба':10,
}
metadict_army['Юго-восточное направление'] = {
'Военнослужащий Генерального штаба':20,
}
metadict_army['Юго-западное направление'] = {
'Военнослужащий Генерального штаба':25,
}
metadict_army['Южное направление'] = {
'Военнослужащий Генерального штаба':20,
}
#----
# Управление родов и видов войск:
metadict_army['Управление ракетных войск'] = {
'Главный штаб РВСН':1,
'Центральный узел связи':1,
'Центральный командный пункт РВСН':1,
'Запасной центральный командный пункт РВСН':1,
'Центр автоматической системы управления РВСН':1,
'Центральный научно-исследовательский институт РВСН':1,
'Управление эксплуатации ракетного вооружения':1,
'Главное управление ракетного вооружения':1,
'Инженерная служба РВСН':1,
'Управление связи':1,
'Академия РВСН':1,
'Тыл РВСН':1,
}
metadict_army['Обеспечение ракетных войск'] = {
'Арсенал РВСН':3,
}
#----
# Управление групп армий, фронтов, округов:
metadict_army['Управление группы армий'] = {
'Командование группы армий':1,
'Штаб группы армий':1,
'Канцелярия группы армий':1,
}
metadict_army['Штаб группы армий'] = {
'Управление ПВО группы армий':1,
'Управление вооружения группы армий':1,
'Управление войск связи группы армий':1,
'Разведывательное управление группы армий':1,
'Управление инженерных войск группы армий':1,
'Управление войск химической защиты группы армий':1,
'Управление медицинской службы группы армий':1,
}
#----
# Управление армий, флотов:
metadict_army['Управление Тандерхеда'] = {
'Командование Тандерхеда':1,
'Штурманская Тандерхеда':1,
'Штаб Тандерхеда':1,
'Тыл Тандерхеда':1,
'Канцелярия Тандерхеда':1,
}
metadict_army['Штурманская Тандерхеда'] = {
'Ш<NAME>':1,
'Заместитель командира боевой части':1,
'Навигационная группа':1,
}
metadict_army['Управление армии'] = {
'Командование армии':1,
'Штаб армии':1,
'Тыл армии':1,
}
metadict_army['Штаб армии'] = {
'Начальник штаба армии':0,
'Оперативный отдел штаба армии':1,
'Отдел авиации армии':1,
'Отдел инженерных войск армии':1,
'Отдел ракетных войск и артиллерии армии':1,
'Отдел противовоздушной обороны армии':1,
'Канцелярия армии':1,
}
metadict_army['Оперативный отдел штаба армии'] = {
'Заместитель начальника штаба армии':0,
'Отдел связи армии':1,
'Отдел химической защиты армии':1,
'Отдел военной полиции армии':1,
'Разведывательный отдел армии':1,
'Топогеодезическая служба армии':1,
'Метеорологическая служба армии':1,
}
metadict_army['Канцелярия армии'] = {
'Начальник канцелярии армии':1,
'Отдел кадров армии':1,
'Шифровальный отдел армии':1,
'Административно-хозяйственный отдел армии':1,
'Отдел воспитательной работы армии':1,
'Отдел боевой подготовки армии':1,
'Военно-юридическая служба армии':1,
}
metadict_army['Тыл армии'] = {
'Начальник тыла армии':0,
'Заместитель начальника тыла армии':0,
'Квартирно-эксплуатационная служба армии':1,
'Продовольственная служба армии':1,
'Медицинская служба армии':1,
'Служба химической защиты армии':1,
'Финансовая служба армии':1,
'Вещевая служба армии':1,
'Служба ГСМ армии':1,
'Техническая часть армии':1,
'Офицерский клуб армии':1,
}
metadict_army['Техническая часть армии'] = {
'Начальник технической части армии':1,
'Бронетанковая служба армии':1,
'Автомобильная служба армии':1,
'Авиационная служба армии':1,
'Служба ракетно-артиллерийского вооружения армии':1,
}
metadict_army['Авиационная служба армии'] = {
'Начальник авиационной службы армии':0,
}
metadict_army['Автомобильная служба армии'] = {
'Начальник автомобильной службы армии':0,
}
metadict_army['Административно-хозяйственный отдел армии'] = {
'Начальник административно-хозяйствненого отдела армии':1,
'Военнослужащий штаба армии':4,
'Служащий штаба армии':36,
}
metadict_army['Бронетанковая служба армии'] = {
'Начальник бронетанковой службы армии':0,
}
metadict_army['Вещевая служба армии'] = {
'Начальник вещевой службы армии':0,
}
metadict_army['Квартирно-эксплуатационная служба армии'] = {
'Начальник квартирно-эксплуатационной службы армии':1,
'Военнослужащий штаба армии':2,
'Служащий штаба армии':5,
}
metadict_army['Командование армии'] = {
'Командующий армией':1,
'Заместитель командующего армией':2,
}
metadict_army['Медицинская служба армии'] = {
'Начальник медицинской службы армии':0,
}
metadict_army['Метеорологическая служба армии'] = {
'Военнослужащий штаба армии':3,
}
metadict_army['Отдел кадров армии'] = {
'Начальник отдела кадров армии':1,
'Военнослужащий штаба армии':6,
'Служащий штаба армии':30,
}
metadict_army['Отдел химической защиты армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Отдел авиации армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Отдел боевой подготовки армии'] = {
'Начальник боевой подготовки армии':0,
}
metadict_army['Военно-юридическая служба армии'] = {
'Начальник военно-юридической службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':12,
}
metadict_army['Отдел воспитательной работы армии'] = {
'Начальник воспитательной работы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':3,
}
metadict_army['Отдел инженерных войск армии'] = {
'Начальник инженерных войск армии':0,
}
metadict_army['Отдел разведки армии'] = {
'Начальник разведки армии':0,
}
metadict_army['Отдел военной полиции армии'] = {
'Начальник военной полиции армии':0,
}
metadict_army['Отдел противовоздушной обороны армии'] = {
'Начальник противовоздушной обороны армии':0,
}
metadict_army['Отдел ракетных войск и артиллерии армии'] = {
'Начальник ракетных войск и артиллерии армии':0,
}
metadict_army['Отдел связи армии'] = {
'Начальник связи армии':0,
}
metadict_army['Офицерский клуб армии'] = {
'Служащий штаба армии':3,
}
metadict_army['Продовольственная служба армии'] = {
'Начальник продовольственной службы армии':0,
}
metadict_army['Разведывательный отдел армии'] = {
'Начальник разведки армии':0,
}
metadict_army['Служба ГСМ армии'] = {
'Начальник службы ГСМ армии':0,
}
metadict_army['Служба химической защиты армии'] = {
'Начальник химической защиты армии':0,
}
metadict_army['Служба ракетно-артиллерийского вооружения армии'] = {
'Начальник ракетно-артиллерийской службы армии':0,
}
metadict_army['Топогеодезическая служба армии'] = {
'Начальник топогеодезической службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':3,
}
metadict_army['Финансовая служба армии'] = {
'Начальник финансовой службы армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':6,
}
metadict_army['Шифровальный отдел армии'] = {
'Начальник секретной части армии':1,
'Военнослужащий штаба армии':3,
'Служащий штаба армии':12,
}
#----
# Управление дивизий, флотилий:
metadict_army['Управление дивизии'] = {
'Командование дивизии':1,
'Штаб дивизии':1,
'Тыл дивизии':1,
}
metadict_army['Командование дивизии'] = {
'Командир дивизии':1,
'Заместитель командира дивизии по обучению':1,
'Заместитель командира дивизии по вооружению':1,
}
metadict_army['Штаб дивизии'] = {
'Начальник штаба дивизии':1,
'Оперативный отдел штаба дивизии':1,
'Начальник авиации дивизии':0,
'Начальник артиллерии дивизии':0,
'Начальник химической защиты дивизии':0,
'Начальник инженерной службы дивизии':0,
'Начальник медицинской службы дивизии':0,
'Начальник противовоздушной обороны дивизии':0,
'Канцелярия дивизии':1,
}
metadict_army['Оперативный отдел штаба дивизии'] = {
'Заместитель начальника штаба дивизии':1,
'Начальник связи дивизии':0,
'Начальник разведки дивизии':0,
'Начальник военной полиции дивизии':0,
'Начальник метеорологической службы дивизии':1,
'Начальник геодезической службы дивизии':1,
}
metadict_army['Канцелярия дивизии'] = {
'Начальник канцелярии дивизии':1,
'Группа делопроизводства дивизии':1,
'Группа шифровальщиков дивизии':1,
'Отдел боевой подготовки дивизии':1,
'Отдел воспитательной работы дивизии':1,
'Военно-юридическая служба дивизии':1,
}
metadict_army['Группа делопроизводства дивизии'] = {
'Начальник отдела кадров дивизии':1,
'Служащий штаба дивизии':35,
}
metadict_army['Группа шифровальщиков дивизии'] = {
'Начальник секретной части дивизии':1,
'Шифровальщик':9,
}
metadict_army['Тыл дивизии'] = {
'Начальник тыла дивизии':1,
'Заместитель начальника тыла дивизии':1,
'Квартирно-эксплуатационная служба дивизии':1,
'Продовольственная служба дивизии':1,
'Медицинская служба дивизии':1,
'Финансовая служба дивизии':1,
'Вещевая служба дивизии':1,
'Служба ГСМ дивизии':1,
'Техническая часть дивизии':1,
'Офицерский клуб дивизии':1,
}
metadict_army['Техническая часть дивизии'] = {
'Начальник технической части дивизии':1,
'Начальник склада боеприпасов дивизии':0,
'Начальник автомобильной службы дивизии':0,
'Начальник бронетанковой службы дивизии':0,
'Начальник авиационной службы дивизии':0,
}
metadict_army['Вещевая служба дивизии'] = {
'Начальник вещевой службы дивизии':0,
}
metadict_army['Квартирно-эксплуатационная служба дивизии'] = {
'Начальник квартирно-эксплуатационной службы дивизии':0,
}
metadict_army['Медицинская служба дивизии'] = {
'Начальник медицинской службы дивизии':0,
}
metadict_army['Отдел боевой подготовки дивизии'] = {
'Начальник боевой подготовки дивизии':0,
}
metadict_army['Отдел боевой подготовки дивизии'] = {
'Начальник боевой подготовки дивизии':0,
}
metadict_army['Военно-юридическая служба дивизии'] = {
'Начальник военно-юридической службы дивизии':0,
}
metadict_army['Отдел воспитательной работы дивизии'] = {
'Начальник воспитательной работы дивизии':0,
}
metadict_army['Офицерский клуб дивизии'] = {
'Служащий штаба дивизии':3,
}
metadict_army['Продовольственная служба дивизии'] = {
'Начальник продовольственной службы дивизии':0,
}
metadict_army['Служба ГСМ дивизии'] = {
'Начальник службы ГСМ дивизии':0,
}
metadict_army['Финансовая служба дивизии'] = {
'Начальник финансовой службы дивизии':1,
'Служащий штаба дивизии':7,
}
#----
# Управление полков:
metadict_army['Управление полка'] = {
'Командование полка':1,
'Штаб полка':1,
'Тыл полка':1,
}
metadict_army['Командование полка'] = {
'Командир полка':1,
'Заместитель командира полка по обучению':1,
'Заместитель командира полка по вооружению':1,
}
metadict_army['Штаб полка'] = {
'Начальник штаба полка':0,
'Оперативный отдел штаба полка':1,
'Начальник авиации полка':0,
'Начальник артиллерии полка':0,
'Начальник инженерной службы полка':0,
'Начальник противовоздушной обороны полка':0,
'Канцелярия полка':1,
}
metadict_army['Оперативный отдел штаба полка'] = {
'Заместитель начальника штаба полка':0,
'Начальник связи полка':0,
'Начальник разведки полка':0,
'Начальник военной полиции':0,
'Начальник химической защиты полка':0,
}
metadict_army['Канцелярия полка'] = {
'Начальник канцелярии полка':1,
'Группа делопроизводства полка':1,
'Группа шифровальщиков полка':1,
}
metadict_army['Группа делопроизводства полка'] = {
'Начальник отдела кадров полка':1,
'Служащий штаба полка':8,
}
metadict_army['Группа шифровальщиков полка'] = {
'Начальник секретной части полка':1,
'Шифровальщик':2,
}
metadict_army['Тыл полка'] = {
'Начальник тыла полка':0,
'Техническая часть полка':1,
'Начальник службы ГСМ полка':0,
'Начальник вещевой службы полка':0,
'Начальник продовольственной службы полка':0,
'Начальник квартирно-эксплуатационной части полка':1,
'Начальник медицинской службы полка':0,
'Начальник финансовой службы полка':1,
}
metadict_army['Техническая часть полка'] = {
'Начальник технической части полка':1,
'Начальник склада боеприпасов полка':0,
'Начальник автомобильной службы полка':0,
'Начальник бронетанковой службы полка':0,
'Начальник авиационной службы полка':0,
}
#----
# Управление батальонов, дивизионов, эскадрилий:
metadict_army['Управление ремонтного батальона'] = {
'Командование ремонтного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление разведывательного батальона'] = {
'Командование разведывательного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление понтонно-мостового батальона'] = {
'Командование понтонно-мостового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление переправочно-десантного батальона'] = {
'Командование переправочно-десантного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление пегасо-десантного батальона'] = {
'Командование пегасо-десантного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление мотострелкового батальона'] = {
'Командование мотострелкового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление механизированного батальона'] = {
'Командование механизированного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление медико-санитарного батальона'] = {
'Командование медико-санитарного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление инженерного батальона'] = {
'Командование инженерного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление инженерно-сапёрного батальона'] = {
'Командование инженерно-сапёрного батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление инженерно-дорожного батальона'] = {
'Командование инженерно-дорожного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление трубопроводного батальона'] = {
'Командование трубопроводного батальона':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление десантно-штурмового батальона'] = {
'Командование десантно-штурмового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление штурмового батальона'] = {
'Командование штурмового батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона снабжения'] = {
'Командование батальона снабжения':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона связи и охраны'] = {
'Командование батальона связи и охраны':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона связи'] = {
'Командование батальона связи':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона глубинной разведки'] = {
'Командование батальона глубинной разведки':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона химических войск'] = {
'Командование батальона химических войск':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона военной полиции'] = {
'Командование батальона военной полиции':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона ведомственной охраны'] = {
'Командование батальона ведомственной охраны':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона боевого обеспечения'] = {
'Командование батальона боевого обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона аэродромно-технического обеспечения'] = {
'Командование батальона аэродромно-технического обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона химической разведки'] = {
'Командование батальона химической разведки':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона РТР'] = {
'Командование батальона РТР':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона РЭБ'] = {
'Командование батальона РЭБ':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона РЭР'] = {
'Командование батальона РЭР':1,
'Штаб тылового батальона':1,
}
metadict_army['Управление батальона специального назначения'] = {
'Командование батальона специального назначения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона стальных рейнджеров'] = {
'Командование батальона стальных рейнджеров':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона аэрозольного противодействия'] = {
'Командование батальона аэрозольного противодействия':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона обеспечения'] = {
'Командование батальона обеспечения':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона обслуживания'] = {
'Командование батальона обслуживания':1,
'Штаб батальона':1,
}
metadict_army['Управление батальона'] = {
'Командование батальона':1,
'Штаб батальона':1,
}
metadict_army['Управление дивизиона 120-мм миномётов (аэромобильное)'] = {
'Командование дивизиона 120-мм миномётов (аэромобильное)':1,
'Штаб дивизиона (аэромобильный)':1,
}
metadict_army['Управление дивизиона 120-мм гаубиц'] = {
'Командование дивизиона 120-мм гаубиц':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 120-мм САУ'] = {
'Командование дивизиона 120-мм САУ':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 150-мм САУ'] = {
'Командование дивизиона 150-мм САУ':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 150-мм гаубиц'] = {
'Командование дивизиона 150-мм гаубиц':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 120-мм РСЗО'] = {
'Командование дивизиона 120-мм РСЗО':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона 220-мм РСЗО'] = {
'Командование дивизиона 220-мм РСЗО':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК ближнего действия'] = {
'Командование дивизиона ЗРК ближнего действия':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК малой дальности'] = {
'Командование дивизиона ЗРК малой дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК средней дальности'] = {
'Командование дивизиона ЗРК средней дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона ЗРК большой дальности'] = {
'Командование дивизиона ЗРК большой дальности':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона тактических ракет'] = {
'Командование дивизиона тактических ракет':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона оперативно-тактических ракет'] = {
'Командование дивизиона оперативно-тактических ракет':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона артиллерийской разведки'] = {
'Командование дивизиона артиллерийской разведки':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона зенитной разведки'] = {
'Командование дивизиона зенитной разведки':1,
'Штаб дивизиона':1,
}
metadict_army['Управление дивизиона (аэромобильное)'] = {
'Командование дивизиона (аэромобильное)':1,
'Штаб дивизиона (аэромобильный)':1,
}
metadict_army['Управление танкового батальона'] = {
'Командование танкового батальона':1,
'Штаб батальона':1,
}
metadict_army['Командование батальона'] = {
'Командир батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РТР'] = {
'Командир батальона РТР':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона химической разведки'] = {
'Командир батальона химической разведки':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РЭБ'] = {
'Командир батальона РЭБ':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона РЭР'] = {
'Командир батальона РЭР':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона аэродромно-технического обеспечения'] = {
'Командир батальона аэродромно-технического обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона боевого обеспечения'] = {
'Командир батальона боевого обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона ведомственной охраны'] = {
'Командир батальона ведомственной охраны':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона военной полиции'] = {
'Командир батальона военной полиции':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона химических войск'] = {
'Командир батальона химических войск':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона глубинной разведки'] = {
'Командир батальона глубинной разведки':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона связи'] = {
'Командир батальона связи':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона связи и охраны'] = {
'Командир батальона связи и охраны':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона снабжения'] = {
'Командир батальона снабжения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона обеспечения'] = {
'Командир батальона обеспечения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона обслуживания'] = {
'Командир батальона обслуживания':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование десантно-штурмового батальона'] = {
'Командир десантно-штурмового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование механизированного батальона'] = {
'Командир механизированного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование мотострелкового батальона'] = {
'Командир мотострелкового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование пегасо-десантного батальона'] = {
'Командир пегасо-десантного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование понтонно-мостового батальона'] = {
'Командир понтонно-мостового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование разведывательного батальона'] = {
'Командир разведывательного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование ремонтного батальона'] = {
'Командир ремонтного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование штурмового батальона'] = {
'Командир штурмового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерно-дорожного батальона'] = {
'Командир инженерно-дорожного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование трубопроводного батальона'] = {
'Командир трубопроводного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерно-сапёрного батальона'] = {
'Командир инженерно-сапёрного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование инженерного батальона'] = {
'Командир инженерного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование медико-санитарного батальона'] = {
'Командир медико-санитарного батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона специального назначения'] = {
'Командир батальона специального назначения':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона стальных рейнджеров'] = {
'Командир батальона стальных рейнджеров':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование батальона аэрозольного противодействия'] = {
'Командир батальона аэрозольного противодействия':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
}
metadict_army['Командование танкового батальона'] = {
'Командир танкового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
'Экипаж командирского танка':1,
}
metadict_army['Штаб батальона'] = {
'Начальник штаба батальона':1,
'Заместитель начальника штаба батальона':1,
'Начальник связи батальона':0,
'Начальник разведки батальона':0,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Штаб тылового батальона'] = {
'Начальник штаба батальона':1,
'Заместитель начальника штаба батальона':1,
'Начальник связи батальона':0,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
}
metadict_army['Командование дивизиона'] = {
'Командир дивизиона':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм гаубиц'] = {
'Командир дивизиона 120-мм гаубиц':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм САУ'] = {
'Командир дивизиона 120-мм САУ':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 150-мм САУ'] = {
'Командир дивизиона 150-мм САУ':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 150-мм гаубиц'] = {
'Командир дивизиона 150-мм гаубиц':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм РСЗО'] = {
'Командир дивизиона 120-мм РСЗО':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 220-мм РСЗО'] = {
'Командир дивизиона 220-мм РСЗО':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК ближнего действия'] = {
'Командир дивизиона ЗРК ближнего действия':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК малой дальности'] = {
'Командир дивизиона ЗРК большой дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК средней дальности'] = {
'Командир дивизиона ЗРК средней дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона ЗРК большой дальности'] = {
'Командир дивизиона ЗРК большой дальности':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона тактических ракет'] = {
'Командир дивизиона тактических ракет':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона оперативно-тактических ракет'] = {
'Командир дивизиона оперативно-тактических ракет':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона зенитной разведки'] = {
'Командир дивизиона зенитной разведки':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона артиллерийской разведки'] = {
'Командир дивизиона артиллерийской разведки':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Командование дивизиона 120-мм миномётов (аэромобильное)'] = {
'Командир дивизиона 120-мм миномётов (пегас)':1,
'Заместитель командира дивизиона по обучению':1,
'Заместитель командира дивизиона по вооружению':1,
}
metadict_army['Штаб дивизиона'] = {
'Начальник штаба дивизиона':1,
'Делопроизводитель':1,
'Заместитель начальника штаба дивизиона':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Управление тылового батальона'] = {
'Командир тылового батальона':1,
'Заместитель командира батальона по обучению':1,
'Заместитель командира батальона по вооружению':1,
'Инструктор по химической защите':1,
'Делопроизводитель':1,
}
metadict_army['Командование дивизиона (аэромобильное)'] = {
'Командир дивизиона (пегас)':1,
'Заместитель командира дивизиона по обучению (пегас)':1,
'Заместитель командира дивизиона по вооружению (пегас)':1,
}
metadict_army['Штаб дивизиона (аэромобильный)'] = {
'Начальник штаба дивизиона (пегас)':1,
'Делопроизводитель':1,
'Заместитель начальника штаба дивизиона (пегас)':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Управление дивизиона шахтных МБР'] = {
'Командир дивизиона шахтных МБР':1,
'Заместитель командира дивизиона по вооружению':1,
'Инструктор по химической защите':1,
}
metadict_army['Управление эскадрильи бомбонесущих планёров'] = {
'Командир эскадрильи бомбонесущих планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи противотанковых планёров'] = {
'Командир эскадрильи противотанковых планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи противовоздушных планёров'] = {
'Командир эскадрильи противовоздушных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи ракетных планёров'] = {
'Командир эскадрильи ракетных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи транспортных планёров'] = {
'Командир эскадрильи транспортных планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи тяжёлых планёров'] = {
'Командир эскадрильи тяжёлых планёров (пегас)':1,
'Штаб эскадрильи пегасов':1,
}
metadict_army['Управление эскадрильи истребителей-бомбардировщиков'] = {
'Командир эскадрильи истребителей-бомбардировщиков (пегас)':1,
'Штаб эскадрильи авиации':1,
}
metadict_army['Управление эскадрильи транспортной авиации'] = {
'Командир эскадрильи транспортной авиации (пегас)':1,
'Штаб эскадрильи авиации':1,
}
metadict_army['Штаб эскадрильи пегасов'] = {
'Заместитель командира эскадрильи (пегас)':1,
'Делопроизводитель':1,
'Инструктор по химической защите':1,
'Полевой хирург (единорог)':1,
}
metadict_army['Штаб эскадрильи авиации'] = {
'Заместитель командира эскадрильи (пегас)':1,
'Делопроизводитель':1,
'Инструктор по химической защите':1,
}
#----
# Управление рот, батарей:
metadict_army['Управление тыловой роты'] = {
'Командир тыловой роты':1,
'Штаб роты':1,
}
metadict_army['Управление сапёрной роты'] = {
'Командир сапёрной роты':1,
'Штаб роты':1,
}
metadict_army['Управление сапёрной роты (аэромобильное)'] = {
'Командир сапёрной роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты тяжёлого оружия (аэромобильное)'] = {
'Командир роты тяжёлого оружия (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты транспортных планёров'] = {
'Командир роты транспортных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты связи'] = {
'Командир роты связи':1,
'Штаб роты':1,
}
metadict_army['Управление роты разведывательных БПЛА'] = {
'Командир роты разведывательных БПЛА':1,
'Штаб роты':1,
}
metadict_army['Управление роты радиолокационной разведки'] = {
'Командир роты радиолокационной разведки':1,
'Штаб роты':1,
}
metadict_army['Управление роты химических войск'] = {
'Командир роты химических войск':1,
'Штаб роты':1,
}
metadict_army['Управление роты военной полиции'] = {
'Командир роты военной полиции':1,
'Штаб роты':1,
}
metadict_army['Управление роты ведомственной охраны'] = {
'Командир роты ведомственной охраны':1,
'Штаб роты':1,
}
metadict_army['Управление роты боевых роботов'] = {
'Командир роты боевых роботов':1,
'Штаб роты':1,
}
metadict_army['Управление роты планёров-бомбардировщиков'] = {
'Командир роты планёров-бомбардировщиков (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты противовоздушных планёров'] = {
'Командир роты противовоздушных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты противотанковых планёров'] = {
'Командир роты противотанковых планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты ракетных планёров'] = {
'Командир роты ракетных планёров (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление звена транспортной авиации'] = {
'Командир звена транспортной авиации':1,
}
metadict_army['Управление звена истребителей-бомбардировщиков'] = {
'Командир звена истребителей-бомбардировщиков':1,
}
metadict_army['Управление роты РЭР'] = {
'Командир роты РЭР':1,
'Штаб роты':1,
}
metadict_army['Управление роты РЭБ'] = {
'Командир роты РЭБ':1,
'Штаб роты':1,
}
metadict_army['Управление роты химической разведки (аэромобильное)'] = {
'Командир роты химической разведки (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты химической разведки'] = {
'Командир роты химической разведки':1,
'Штаб роты':1,
}
metadict_army['Управление роты РТР и РЭР'] = {
'Командир роты РТР и РЭР':1,
'Штаб роты':1,
}
metadict_army['Управление роты РТР'] = {
'Командир роты РТР':1,
'Штаб роты':1,
}
metadict_army['Управление ремонтной роты'] = {
'Командир ремонтной роты':1,
'Штаб роты':1,
}
metadict_army['Управление разведывательной роты (бронетехника)'] = {
'Командир разведывательной роты (бронетехника)':1,
'Штаб роты':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление понтонно-мостовой роты'] = {
'Командир понтонно-мостовой роты':1,
'Штаб роты':1,
}
metadict_army['Управление переправочно-десантной роты'] = {
'Командир переправочно-десантной роты':1,
'Штаб роты':1,
}
metadict_army['Управление медицинской роты'] = {
'Командир медицинской роты':1,
'Штаб роты':1,
}
metadict_army['Управление медицинской роты (аэромобильное)'] = {
'Командир медицинской роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление инженерной роты'] = {
'Командир инженерной роты':1,
'Штаб роты':1,
}
metadict_army['Управление инженерно-сапёрной роты'] = {
'Командир инженерно-сапёрной роты':1,
'Штаб роты':1,
}
metadict_army['Управление роты инструкторов'] = {
'Командир роты инструкторов':1,
'Штаб роты':1,
}
metadict_army['Управление роты стальных рейнджеров'] = {
'Командир роты стальных рейнджеров':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление снайперской роты'] = {
'Командир роты снайперов':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление штурмовой роты'] = {
'Командир штурмовой роты':1,
'Штаб роты специального назначения':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление штурмовой роты (без БМП)'] = {
'Командир штурмовой роты':1,
'Штаб роты специального назначения':1,
}
metadict_army['Управление пегасо-десантной роты'] = {
'Командир пегасо-десантной роты':1,
'Штаб роты':1,
}
metadict_army['Управление десантно-штурмовой роты'] = {
'Командир десантно-штурмовой роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БМД':1,
}
metadict_army['Управление мотострелковой роты'] = {
'Командир мотострелковой роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БМП':1,
}
metadict_army['Управление механизированной роты'] = {
'Командир механизированной роты':1,
'Штаб мотострелковой роты':1,
'Экипаж командирского БТР':1,
}
metadict_army['Управление танковой роты'] = {
'Командир танковой роты':1,
'Штаб танковой роты':1,
'Экипаж командирского танка':1,
}
metadict_army['Управление разведывательной роты (аэромобильное)'] = {
'Командир разведывательной роты (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление диверсионно-разведывательной роты'] = {
'Командир роты специального назначения (пегас)':1,
'Штаб роты специального назначения':1,
}
metadict_army['Управление роты посыльных'] = {
'Командир роты посыльных (пегас)':1,
'Штаб роты (аэромобильный)':1,
}
metadict_army['Управление роты обеспечения'] = {
'Командир роты обеспечения':1,
'Штаб роты':1,
}
metadict_army['Управление роты обслуживания'] = {
'Командир роты обслуживания':1,
'Штаб роты':1,
}
metadict_army['Управление роты снабжения'] = {
'Командир роты снабжения':1,
'Штаб роты':1,
}
metadict_army['Управление транспортной роты'] = {
'Командир транспортной роты':1,
'Штаб роты':1,
}
metadict_army['Управление батареи ЗРК ближнего действия'] = {
'Командир батареи ЗРК малой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК малой дальности'] = {
'Командир батареи ЗРК малой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК средней дальности'] = {
'Командир батареи ЗРК средней дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи ЗРК большой дальности'] = {
'Командир батареи ЗРК большой дальности':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи 120-мм РСЗО'] = {
'Командир батареи 120-мм РСЗО':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи 220-мм РСЗО'] = {
'Командир батареи 220-мм РСЗО':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи батальонных САУ'] = {
'Командир батареи батальонных САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи полковых САУ'] = {
'Командир батареи полковых САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи полковых гаубиц'] = {
'Командир батареи полковых гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи дивизионных САУ'] = {
'Командир батареи дивизионных САУ':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи дивизионных гаубиц'] = {
'Командир батареи дивизионных гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи артиллерийской разведки'] = {
'Командир батареи артиллерийской разведки':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи гаубиц'] = {
'Командир батареи гаубиц':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи тактических ракет'] = {
'Командир батареи тактических ракет':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи оперативно-тактических ракет'] = {
'Командир батареи оперативно-тактических ракет':1,
'Штаб батареи':1,
}
metadict_army['Управление батареи связи'] = {
'Командир батареи связи':1,
'Штаб батареи':1,
}
metadict_army['Управление миномётной батареи'] = {
'Командир миномётной батареи':1,
'Штаб батареи':1,
}
metadict_army['Управление миномётной батареи (аэромобильное)'] = {
'Командир миномётной батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление противотанковой батареи (аэромобильное)'] = {
'Командир противотанковой батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление батареи (аэромобильное)'] = {
'Командир крылатой батареи (пегас)':1,
'Штаб батареи (аэромобильный)':1,
}
metadict_army['Управление роты мобильной АЭС'] = {
'Командир роты мобильной АЭС':1,
'Штаб роты':1,
}
metadict_army['Управление армейских складов'] = {
'Управляющий армейских складов':1,
'Заместитель управляющего армейских складов':1,
'Делопроизводитель':6,
}
metadict_army['Управление складского терминала'] = {
'Управляющий складского терминала':1,
'Делопроизводитель':1,
}
metadict_army['Штаб роты'] = {
'Заместитель командира роты':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество роты':1,
}
metadict_army['Штаб роты (аэромобильный)'] = {
'Заместитель командира роты (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Санитар-инструктор':1,
'Станция ближней разведки':1,
'Лазерный прибор разведки':1,
'Лёгкий планёр (перевозка снаряжения)':5,
'Имущество роты (аэромобильной)':1,
}
metadict_army['Штаб мотострелковой роты'] = {
'Заместитель командира роты':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор экипажей бронемашин':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество роты (мотострелковой)':1,
}
metadict_army['Штаб роты специального назначения'] = {
'Заместитель командира роты специального назначения':1,
'Управляющий хозяйством роты':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Полевой хирург (единорог)':1,
'Имущество роты (специального назначения)':1,
}
metadict_army['Штаб роты специального назначения (аэромобильный)'] = {
'Заместитель командира роты специального назначения (пегас)':1,
'Связист диверсионно-разведывательной группы (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Полевой хирург (единорог)':1,
'КВ-радиостанция переносная':1,
'Станция ближней разведки':1,
'Лазерный прибор разведки':1,
'Лёгкий планёр (перевозка снаряжения)':6,
'Имущество роты (аэромобильной СпН)':1,
}
metadict_army['Штаб танковой роты'] = {
'Заместитель командира роты':1,
'Инструктор экипажей бронемашин':1,
'Санитар-инструктор':1,
'Имущество роты (танковой)':1,
}
metadict_army['Штаб батареи'] = {
'Заместитель командира батареи':1,
'Управляющий хозяйством батареи':1,
'Инструктор по боевой подготовке':1,
'Инструктор по вооружению':1,
'Санитар-инструктор':1,
'Имущество батареи':1,
}
metadict_army['Штаб батареи (аэромобильный)'] = {
'Заместитель командира роты (пегас)':1,
'Управляющий хозяйством роты (пегас)':1,
'Инструктор по боевой подготовке (пегас)':1,
'Инструктор по вооружению (пегас)':1,
'Санитар-инструктор':1,
'Имущество батареи (аэромобильной)':1,
}
#----
# Управление взводов:
metadict_army['Управление взвода РТР'] = {
'Командир взвода РТР':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РТР (аэромобильное)'] = {
'Командир взвода РТР (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода РЭБ'] = {
'Командир взвода РЭБ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РЭБ (аэромобильное)'] = {
'Командир взвода РЭБ (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода РЭР'] = {
'Командир взвода РЭР':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода РЭР (аэромобильное)'] = {
'Командир взвода РЭР (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода боевых роботов'] = {
'Командир взвода боевых роботов':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода ведомственной охраны'] = {
'Командир взвода ведомственной охраны':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода военной полиции'] = {
'Командир взвода военной полиции':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода военной полиции (аэромобильное)'] = {
'Командир взвода военной полиции (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода звукометрической разведки'] = {
'Командир взвода звукометрической разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода инженерной разведки'] = {
'Командир взвода инженерной разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода механизированных мостов'] = {
'Командир взвода механизированных мостов':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода мобильного энергоблока'] = {
'Командир взвода мобильного энергоблока':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода обеспечения'] = {
'Командир взвода обеспечения':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода обеспечения (аэромобильное)'] = {
'Командир взвода обеспечения (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода обслуживания'] = {
'Командир взвода обслуживания':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода планёров-бомбардировщиков (аэромобильное)'] = {
'Командир взвода планёров-бомбардировщиков (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода противовоздушных планёров (аэромобильное)'] = {
'Командир взвода противовоздушных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода противотанковых планёров (аэромобильное)'] = {
'Командир взвода противотанковых планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода разведывательных БПЛА'] = {
'Командир взвода разведывательных БПЛА':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода ракетных планёров (аэромобильное)'] = {
'Командир взвода ракетных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода роботов РЭБ'] = {
'Командир взвода роботов РЭБ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода связи'] = {
'Командир взвода связи':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода связи (аэромобильное)'] = {
'Командир взвода связи (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода снабжения'] = {
'Командир взвода снабжения':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода стальных рейнджеров'] = {
'Командир взвода стальных рейнджеров':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода транспортных планёров (аэромобильное)'] = {
'Командир взвода транспортных планёров (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление взвода тяжёлого оружия'] = {
'Командир взвода тяжёлого оружия':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической защиты'] = {
'Командир взвода химической защиты':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической разведки'] = {
'Командир взвода химической разведки':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление взвода химической разведки (аэромобильное)'] = {
'Командир взвода химической разведки (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление гранатомётного взвода (аэромобильное)'] = {
'Командир гранатомётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление десантно-штурмового взвода'] = {
'Командир десантно-штурмового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление диверсионно-разведывательного взвода (аэромобильное)'] = {
'Командир диверсионно-разведывательного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление инженерного взвода'] = {
'Командир инженерного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление инженерного взвода (аэромобильное)'] = {
'Командир инженерного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
}
metadict_army['Управление медицинского взвода'] = {
'Командир медицинского взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление медицинского взвода (аэромобильное)'] = {
'Командир медицинского взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление метеорологического взвода'] = {
'Командир метеорологического взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление механизированного взвода'] = {
'Командир механизированного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление миномётного взвода'] = {
'Командир миномётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление миномётного взвода (аэромобильное)'] = {
'Командир миномётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление миномётного взвода (бронетехника)'] = {
'Командир миномётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление мотострелкового взвода'] = {
'Командир мотострелкового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм РСЗО'] = {
'Командир огневого взвода 120-мм РСЗО':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 220-мм РСЗО'] = {
'Командир огневого взвода 220-мм РСЗО':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм САУ'] = {
'Командир огневого взвода 120-мм САУ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 120-мм гаубиц'] = {
'Командир огневого взвода 120-мм гаубиц':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 150-мм САУ'] = {
'Командир огневого взвода 150-мм САУ':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огневого взвода 150-мм гаубиц'] = {
'Командир огневого взвода 150-мм гаубиц':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление огнемётного взвода'] = {
'Командир огнемётного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление пегасо-десантного взвода'] = {
'Командир пегасо-десантного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление переправочно-десантного взвода'] = {
'Командир переправочно-десантного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление понтонно-мостового взвода'] = {
'Командир понтонно-мостового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление противотанкового взвода (аэромобильное)'] = {
'Командир противотанкового взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление пулемётного взвода (аэромобильное)'] = {
'Командир пулемётного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление разведывательного взвода (аэромобильное)'] = {
'Командир разведывательного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление разведывательного взвода (бронетехника)'] = {
'Командир разведывательного взвода (бронетехника)':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление ремонтного взвода'] = {
'Командир ремонтного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление сапёрного взвода'] = {
'Командир сапёрного взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление сапёрного взвода (аэромобильное)'] = {
'Командир сапёрного взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление снайперского взвода (аэромобильное)'] = {
'Командир снайперского взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление танкового взвода'] = {
'Командир танкового взвода':1,
'Заместитель командира взвода':1,
}
metadict_army['Управление топогеодезического взвода (аэромобильное)'] = {
'Командир топогеодезического взвода (пегас)':1,
'Заместитель командира взвода (пегас)':1,
'Лёгкий планёр (перевозка снаряжения)':2,
}
metadict_army['Управление транспортного взвода'] = {
'Командир транспортного взвода':1,
'Заместитель командира взвода':1,
} | 0.121699 | 0.296692 |
import numpy as np
from sympy import *
# Main object
class Robot:
def __init__(self, name = ''):
"""Object constructor
Args:
name (str, optional): robot's name (if any)
"""
# Robot's name
self.name = name
# Inherited
class Serial(Robot):
"""Serial Robot
Args:
Robot (obj): inheritance
"""
def __init__(self, jointsPositions : np.array, jointsVelocities : np.array, jointsAccelerations : np.array, linksLengths : list, COMs : list, mass : list, inertia : list, xi = [], xid = [], name = ''):
"""Object constructor
Args:
jointsPositions (np.array): joints set in radians
jointsVelocities (np.array): velocities set in radians/second
jointsAccelerations (np.array): accelerations set in radians/second^2
linksLengths (list): length of each robot's link in meters
COMs (list): relative position of each center of mass
mass (list): mass of each rigid body in the system
inertia (list): tensor of inertia of each rigid body in the system
xi (np.array, optional): axes of actuation of each joint. Defaults to [].
xid (np.array, optional): derivative of the axes of actuation of each joint. Defaults to [].
name (str, optional): name of the robot. Defaults to ''.
"""
# Robot initializer
Robot.__init__(self, name = name)
# Kinematic Parameters
self.jointsPositions = jointsPositions
self.linksLengths = linksLengths
self.COMs = COMs
# Dynamic Parameters
self.jointsVelocities = jointsVelocities
self.jointsAccelerations = jointsAccelerations
self.mass = mass
self.inertia = inertia
# Symbolic Joints: q(t), q'(t) and q''(t)
self.qSymbolic = Matrix([[f"q{i + 1}",] for i in range(self.jointsPositions.shape[0])])
self.qdSymbolic = Matrix([[f"qd{i + 1}",] for i in range(self.jointsPositions.shape[0])])
self.qddSymbolic = Matrix([[f"qdd{i + 1}",] for i in range(self.jointsPositions.shape[0])])
# Symbolic Geometrical Properties
self.symbolicLinks = Matrix([f"L{i + 1}" for i in range(len(self.linksLengths))])
self.symbolicCOMs = Matrix([f"Lcom{i + 1}" for i in range(len(self.COMs))])
# Symbolic Physical Properties
self.symbolicMass = Matrix([[f"m{i + 1}",] for i in range(len(self.mass))])
self.symbolicInertia = [Matrix([[f"+Ixx{i + 1}", f"-Ixy{i + 1}", f"-Ixz{i + 1}"],
[f"-Ixy{i + 1}", f"+Iyy{i + 1}", f"-Iyz{i + 1}"],
[f"-Ixz{i + 1}", f"-Iyz{i + 1}", f"+Izz{i + 1}"]]) for i in range(len(self.mass))]
# Set Denavit - Hartenberg Parameters Matrix (numerical and symbolical)
self.denavitHartenberg()
self.denavitHartenberg(symbolic = True)
# Set Denavit - Hartenberg Parameters Matrix to Centers of Mass (numerical and symbolical)
self.denavitHartenbergCOM()
self.denavitHartenbergCOM(symbolic = True)
# Actuation axes
self.xi = xi
self.xid = xid
def denavitHartenberg(self, symbolic = False):
"""Denavit - Hartenberg parameters for i - th reference frame, ordered as follows:
theta: rotation on «z» axis
d: translation on «z» axis
a: translation on «x» axis
alpha: rotation on «x» axis
"""
if symbolic:
"""
# Two-link planar robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], 0, self.symbolicLinks[0], 0],
[self.qSymbolic[1, 0], 0, self.symbolicLinks[1], 0]])
"""
"""
# Three-link spatial robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicLinks[0], 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], self.symbolicLinks[1], 0.0000000000000000000, 0.0000000],
[0.000000000000000000, 0.0000000000000000000, self.symbolicLinks[2], 0.0000000],
[self.qSymbolic[2, 0], self.symbolicLinks[3], 0.0000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000, 0.0000000000000000000, 0.0000000000000000000, np.pi / 2],
[0.000000000000000000, self.symbolicLinks[4], 0.0000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicLinks[0], 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], 0.0000000000000000000, self.symbolicLinks[1], 0.0000000],
[self.qSymbolic[2, 0], 0.0000000000000000000, 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[3, 0], self.symbolicLinks[2], 0.0000000000000000000, 0.0000000]])
else:
"""
# Two-link planar robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], 0, self.linksLengths[0], 0],
[self.jointsPositions[1, 0], 0, self.linksLengths[1], 0]])
"""
"""
# Three-link spatial robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.linksLengths[0], 0.000000000000000000, np.pi / 2],
[self.jointsPositions[1, 0], self.linksLengths[1], 0.000000000000000000, 0.0000000],
[0.000000000000000000000000, 0.000000000000000000, self.linksLengths[2], 0.0000000],
[self.jointsPositions[2, 0], self.linksLengths[3], 0.000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000000000, 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[0.000000000000000000000000, self.linksLengths[4], 0.000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.linksLengths[0], 0.000000000000000000, np.pi / 2],
[self.jointsPositions[1, 0], 0.000000000000000000, self.linksLengths[1], 0.0000000],
[self.jointsPositions[2, 0], 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[self.jointsPositions[3, 0], self.linksLengths[2], 0.000000000000000000, 0.0000000]])
def denavitHartenbergCOM(self, symbolic = False):
"""Denavit - Hartenberg parameters for j - th rigid body, ordered as follows:
theta: rotation on «z» axis
d: translation on «z» axis
a: translation on «x» axis
alpha: rotation on «x» axis
"""
if symbolic:
"""
# Two-link planar robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], 0, self.symbolicCOMs[0], 0],
[self.qSymbolic[1, 0], 0, self.symbolicCOMs[1], 0]])
"""
"""
# Three-link spatial robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicCOMs[0], 0.000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], self.symbolicCOMs[1], 0.000000000000000000, 0.0000000],
[0.000000000000000000, 0.000000000000000000, self.symbolicCOMs[2], 0.0000000],
[self.qSymbolic[2, 0], self.symbolicCOMs[3], 0.000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000, 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[0.000000000000000000, self.symbolicCOMs[4], 0.000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicCOMs[0], 0.000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], 0.000000000000000000, self.symbolicCOMs[1], 0.0000000],
[self.qSymbolic[2, 0], 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[self.qSymbolic[3, 0], self.symbolicCOMs[2], 0.000000000000000000, 0.0000000]])
else:
"""
# Two-link planar robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], 0, self.COMs[0], 0],
[self.jointsPositions[1, 0], 0, self.COMs[1], 0]])
"""
"""
# Three-link spatial robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.COMs[0], 0.0000000000, np.pi / 2],
[self.jointsPositions[1, 0], self.COMs[1], 0.0000000000, 0.0000000],
[0.000000000000000000000000, 0.0000000000, self.COMs[2], 0.0000000],
[self.jointsPositions[2, 0], self.COMs[3], 0.0000000000, 0.0000000],
[(np.pi / 2) + 0.0000000000, 0.0000000000, 0.0000000000, np.pi / 2],
[0.000000000000000000000000, self.COMs[4], 0.0000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.COMs[0], 0.0000000000, np.pi / 2],
[self.jointsPositions[1, 0], 0.0000000000, self.COMs[1], 0.0000000],
[self.jointsPositions[2, 0], 0.0000000000, 0.0000000000, np.pi / 2],
[self.jointsPositions[3, 0], self.COMs[2], 0.0000000000, 0.0000000]])
def whereIsTheJoint(self, joint : int):
"""This method allows to know in which reference frame is attached any joint based on symbolic Denavit - Hartenberg Parameters Matrix, so this have to be set before calling this method
Args:
joint (int): number of joint we want to look for
Returns:
row, colum (int, int): Row and column of Denavit - Hartenberg Parameters Matrix where joint is stored
"""
# Check what frame has the i-th joint attached by iteration through all the rows in Denavit - Hartenberg symbolic matrix
for row in range(self.dhParameters.shape[0]):
# Get the current row from the symbolic Denavit - Hartenberg parameters
frame = self.symbolicDHParameters[4 * row : 4 * (row + 1)]
# If joint qi is in current reference frame
if Symbol('q' + str(joint)) in frame:
break
# Returns the frame
return row, frame.index(Symbol('q' + str(joint)))
def whereIsTheCOM(self, COM : int):
"""This method allows to know in which reference frame is attached any Center of Mass based on symbolic Denavit - Hartenberg Parameters Matrix, so this have to be set before calling this method
Args:
COM (int): number of Center of Mass we want to look for
Returns:
row, colum (int, int): Row and column of Denavit - Hartenberg Parameters Matrix where center of mass is stored
"""
# Check what frame has the i-th joint attached by iteration through all the rows in Denavit - Hartenberg symbolic matrix
for row in range(self.dhParameters.shape[0]):
# Get the current row from the symbolic Denavit - Hartenberg parameters
frame = self.symbolicDHParametersCOM[4 * row : 4 * (row + 1)]
# If Center of Máss Lcomi is in current reference frame
if Symbol('Lcom' + str(COM)) in frame:
break
# Returns the frame
return row, frame.index(Symbol('Lcom' + str(COM)))
if __name__ == '_main__':
"""
THIS SECTION IS FOR TESTING PURPOSES ONLY
"""
print("Z") | lib/Robot.py | import numpy as np
from sympy import *
# Main object
class Robot:
def __init__(self, name = ''):
"""Object constructor
Args:
name (str, optional): robot's name (if any)
"""
# Robot's name
self.name = name
# Inherited
class Serial(Robot):
"""Serial Robot
Args:
Robot (obj): inheritance
"""
def __init__(self, jointsPositions : np.array, jointsVelocities : np.array, jointsAccelerations : np.array, linksLengths : list, COMs : list, mass : list, inertia : list, xi = [], xid = [], name = ''):
"""Object constructor
Args:
jointsPositions (np.array): joints set in radians
jointsVelocities (np.array): velocities set in radians/second
jointsAccelerations (np.array): accelerations set in radians/second^2
linksLengths (list): length of each robot's link in meters
COMs (list): relative position of each center of mass
mass (list): mass of each rigid body in the system
inertia (list): tensor of inertia of each rigid body in the system
xi (np.array, optional): axes of actuation of each joint. Defaults to [].
xid (np.array, optional): derivative of the axes of actuation of each joint. Defaults to [].
name (str, optional): name of the robot. Defaults to ''.
"""
# Robot initializer
Robot.__init__(self, name = name)
# Kinematic Parameters
self.jointsPositions = jointsPositions
self.linksLengths = linksLengths
self.COMs = COMs
# Dynamic Parameters
self.jointsVelocities = jointsVelocities
self.jointsAccelerations = jointsAccelerations
self.mass = mass
self.inertia = inertia
# Symbolic Joints: q(t), q'(t) and q''(t)
self.qSymbolic = Matrix([[f"q{i + 1}",] for i in range(self.jointsPositions.shape[0])])
self.qdSymbolic = Matrix([[f"qd{i + 1}",] for i in range(self.jointsPositions.shape[0])])
self.qddSymbolic = Matrix([[f"qdd{i + 1}",] for i in range(self.jointsPositions.shape[0])])
# Symbolic Geometrical Properties
self.symbolicLinks = Matrix([f"L{i + 1}" for i in range(len(self.linksLengths))])
self.symbolicCOMs = Matrix([f"Lcom{i + 1}" for i in range(len(self.COMs))])
# Symbolic Physical Properties
self.symbolicMass = Matrix([[f"m{i + 1}",] for i in range(len(self.mass))])
self.symbolicInertia = [Matrix([[f"+Ixx{i + 1}", f"-Ixy{i + 1}", f"-Ixz{i + 1}"],
[f"-Ixy{i + 1}", f"+Iyy{i + 1}", f"-Iyz{i + 1}"],
[f"-Ixz{i + 1}", f"-Iyz{i + 1}", f"+Izz{i + 1}"]]) for i in range(len(self.mass))]
# Set Denavit - Hartenberg Parameters Matrix (numerical and symbolical)
self.denavitHartenberg()
self.denavitHartenberg(symbolic = True)
# Set Denavit - Hartenberg Parameters Matrix to Centers of Mass (numerical and symbolical)
self.denavitHartenbergCOM()
self.denavitHartenbergCOM(symbolic = True)
# Actuation axes
self.xi = xi
self.xid = xid
def denavitHartenberg(self, symbolic = False):
"""Denavit - Hartenberg parameters for i - th reference frame, ordered as follows:
theta: rotation on «z» axis
d: translation on «z» axis
a: translation on «x» axis
alpha: rotation on «x» axis
"""
if symbolic:
"""
# Two-link planar robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], 0, self.symbolicLinks[0], 0],
[self.qSymbolic[1, 0], 0, self.symbolicLinks[1], 0]])
"""
"""
# Three-link spatial robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicLinks[0], 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], self.symbolicLinks[1], 0.0000000000000000000, 0.0000000],
[0.000000000000000000, 0.0000000000000000000, self.symbolicLinks[2], 0.0000000],
[self.qSymbolic[2, 0], self.symbolicLinks[3], 0.0000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000, 0.0000000000000000000, 0.0000000000000000000, np.pi / 2],
[0.000000000000000000, self.symbolicLinks[4], 0.0000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.symbolicDHParameters = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicLinks[0], 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], 0.0000000000000000000, self.symbolicLinks[1], 0.0000000],
[self.qSymbolic[2, 0], 0.0000000000000000000, 0.0000000000000000000, np.pi / 2],
[self.qSymbolic[3, 0], self.symbolicLinks[2], 0.0000000000000000000, 0.0000000]])
else:
"""
# Two-link planar robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], 0, self.linksLengths[0], 0],
[self.jointsPositions[1, 0], 0, self.linksLengths[1], 0]])
"""
"""
# Three-link spatial robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.linksLengths[0], 0.000000000000000000, np.pi / 2],
[self.jointsPositions[1, 0], self.linksLengths[1], 0.000000000000000000, 0.0000000],
[0.000000000000000000000000, 0.000000000000000000, self.linksLengths[2], 0.0000000],
[self.jointsPositions[2, 0], self.linksLengths[3], 0.000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000000000, 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[0.000000000000000000000000, self.linksLengths[4], 0.000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.dhParameters = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.linksLengths[0], 0.000000000000000000, np.pi / 2],
[self.jointsPositions[1, 0], 0.000000000000000000, self.linksLengths[1], 0.0000000],
[self.jointsPositions[2, 0], 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[self.jointsPositions[3, 0], self.linksLengths[2], 0.000000000000000000, 0.0000000]])
def denavitHartenbergCOM(self, symbolic = False):
"""Denavit - Hartenberg parameters for j - th rigid body, ordered as follows:
theta: rotation on «z» axis
d: translation on «z» axis
a: translation on «x» axis
alpha: rotation on «x» axis
"""
if symbolic:
"""
# Two-link planar robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], 0, self.symbolicCOMs[0], 0],
[self.qSymbolic[1, 0], 0, self.symbolicCOMs[1], 0]])
"""
"""
# Three-link spatial robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicCOMs[0], 0.000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], self.symbolicCOMs[1], 0.000000000000000000, 0.0000000],
[0.000000000000000000, 0.000000000000000000, self.symbolicCOMs[2], 0.0000000],
[self.qSymbolic[2, 0], self.symbolicCOMs[3], 0.000000000000000000, 0.0000000],
[(np.pi / 2) + 0.0000, 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[0.000000000000000000, self.symbolicCOMs[4], 0.000000000000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.symbolicDHParametersCOM = Matrix([[0, 0, 0, 0],
[self.qSymbolic[0, 0], self.symbolicCOMs[0], 0.000000000000000000, np.pi / 2],
[self.qSymbolic[1, 0], 0.000000000000000000, self.symbolicCOMs[1], 0.0000000],
[self.qSymbolic[2, 0], 0.000000000000000000, 0.000000000000000000, np.pi / 2],
[self.qSymbolic[3, 0], self.symbolicCOMs[2], 0.000000000000000000, 0.0000000]])
else:
"""
# Two-link planar robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], 0, self.COMs[0], 0],
[self.jointsPositions[1, 0], 0, self.COMs[1], 0]])
"""
"""
# Three-link spatial robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.COMs[0], 0.0000000000, np.pi / 2],
[self.jointsPositions[1, 0], self.COMs[1], 0.0000000000, 0.0000000],
[0.000000000000000000000000, 0.0000000000, self.COMs[2], 0.0000000],
[self.jointsPositions[2, 0], self.COMs[3], 0.0000000000, 0.0000000],
[(np.pi / 2) + 0.0000000000, 0.0000000000, 0.0000000000, np.pi / 2],
[0.000000000000000000000000, self.COMs[4], 0.0000000000, 0.0000000]])
"""
# 4 degrees-of-freedom robot
self.dhParametersCOM = np.array([[0, 0, 0, 0],
[self.jointsPositions[0, 0], self.COMs[0], 0.0000000000, np.pi / 2],
[self.jointsPositions[1, 0], 0.0000000000, self.COMs[1], 0.0000000],
[self.jointsPositions[2, 0], 0.0000000000, 0.0000000000, np.pi / 2],
[self.jointsPositions[3, 0], self.COMs[2], 0.0000000000, 0.0000000]])
def whereIsTheJoint(self, joint : int):
"""This method allows to know in which reference frame is attached any joint based on symbolic Denavit - Hartenberg Parameters Matrix, so this have to be set before calling this method
Args:
joint (int): number of joint we want to look for
Returns:
row, colum (int, int): Row and column of Denavit - Hartenberg Parameters Matrix where joint is stored
"""
# Check what frame has the i-th joint attached by iteration through all the rows in Denavit - Hartenberg symbolic matrix
for row in range(self.dhParameters.shape[0]):
# Get the current row from the symbolic Denavit - Hartenberg parameters
frame = self.symbolicDHParameters[4 * row : 4 * (row + 1)]
# If joint qi is in current reference frame
if Symbol('q' + str(joint)) in frame:
break
# Returns the frame
return row, frame.index(Symbol('q' + str(joint)))
def whereIsTheCOM(self, COM : int):
"""This method allows to know in which reference frame is attached any Center of Mass based on symbolic Denavit - Hartenberg Parameters Matrix, so this have to be set before calling this method
Args:
COM (int): number of Center of Mass we want to look for
Returns:
row, colum (int, int): Row and column of Denavit - Hartenberg Parameters Matrix where center of mass is stored
"""
# Check what frame has the i-th joint attached by iteration through all the rows in Denavit - Hartenberg symbolic matrix
for row in range(self.dhParameters.shape[0]):
# Get the current row from the symbolic Denavit - Hartenberg parameters
frame = self.symbolicDHParametersCOM[4 * row : 4 * (row + 1)]
# If Center of Máss Lcomi is in current reference frame
if Symbol('Lcom' + str(COM)) in frame:
break
# Returns the frame
return row, frame.index(Symbol('Lcom' + str(COM)))
if __name__ == '_main__':
"""
THIS SECTION IS FOR TESTING PURPOSES ONLY
"""
print("Z") | 0.814053 | 0.543166 |
import json
import csv
from collections import defaultdict
from datetime import datetime
import io
class Stats:
def __init__(self, stats_queries):
self._stats_queries = stats_queries
def get_users_by_app(self, app_name, snapshot):
q = self._stats_queries
if snapshot is None:
snapshot = q.get_latest_snapshot()['_id']
users = q.get_users_by_app(app_name, snapshot)
users = [x['username'] for x in users]
return json.dumps(users, indent=3)
def get_all_apps_time_series(self):
q = self._stats_queries
apps_by_date = q.get_all_apps_time_series()
csv_output = io.StringIO()
f = csv.writer(csv_output)
f.writerow(["date", "name", "count"])
for d in apps_by_date:
for app in d["values"]:
f.writerow([d["date"], app["name"], app["count"]])
return csv_output.getvalue()
def get_all(self, data_history_file):
historical_stats = None
if data_history_file:
with open(data_history_file, 'r') as f:
historical_stats = json.load(f)
q = self._stats_queries
totals = q.get_total_address_counts()
domain_counts = q.get_domain_counts()
subdomains_counts = q.get_subdomain_counts()
person_counts = q.get_person_counts()
total_installs = q.get_total_installs_time_series()
localhost_installs = q.get_localhost_installs_time_series()
values_by_date = defaultdict(list)
merge(values_by_date, 'total_addresses', totals)
merge(values_by_date, 'domains', domain_counts)
merge(values_by_date, 'subdomains', subdomains_counts)
merge(values_by_date, 'persons', person_counts)
merge(values_by_date, 'total_installs', total_installs)
merge(values_by_date, 'localhost_installs', localhost_installs)
domains_data = []
for date, values in values_by_date.items():
domains_data.append({
'date': date,
'values': values
})
if historical_stats:
domains_data.extend(historical_stats['domainsData'])
domains_data.sort(key=lambda d: datetime.strptime(d['date'], '%Y-%m-%d'))
latest_snapshot = q.get_latest_snapshot()
top10_apps = q.get_app_counts(latest_snapshot['_id'])[0:10]
top10_app_names = [app['name'] for app in top10_apps]
apps_data = q.get_app_time_series(top10_app_names)
if historical_stats:
def leave_only_top10(item):
item['values'] = [app for app in item['values'] if app['name'] in top10_app_names]
for item in historical_stats['appsData']:
leave_only_top10(item)
apps_data.extend(historical_stats['appsData'])
apps_data.sort(key=lambda d: datetime.strptime(d['date'], '%Y-%m-%d'))
all_stats = {'domainsData': domains_data, 'appsData': apps_data}
return json.dumps(all_stats, indent=3)
def get_app_counts_csv(self):
q = self._stats_queries
latest_snapshot = q.get_latest_snapshot()
app_counts = q.get_app_counts(latest_snapshot['_id'])
csv_output = io.StringIO()
f = csv.writer(csv_output)
f.writerow(["date", "name", "count"])
snapshot_date = latest_snapshot['start'].strftime('%Y-%m-%d')
for c in app_counts:
f.writerow([snapshot_date, c["name"], c["count"]])
return csv_output.getvalue()
def merge(values_dict, name, values_arr):
for d in values_arr:
date = d['date']
values_dict[date].append({
'name': name,
'count': d['value']
}) | blockstats/stats.py | import json
import csv
from collections import defaultdict
from datetime import datetime
import io
class Stats:
def __init__(self, stats_queries):
self._stats_queries = stats_queries
def get_users_by_app(self, app_name, snapshot):
q = self._stats_queries
if snapshot is None:
snapshot = q.get_latest_snapshot()['_id']
users = q.get_users_by_app(app_name, snapshot)
users = [x['username'] for x in users]
return json.dumps(users, indent=3)
def get_all_apps_time_series(self):
q = self._stats_queries
apps_by_date = q.get_all_apps_time_series()
csv_output = io.StringIO()
f = csv.writer(csv_output)
f.writerow(["date", "name", "count"])
for d in apps_by_date:
for app in d["values"]:
f.writerow([d["date"], app["name"], app["count"]])
return csv_output.getvalue()
def get_all(self, data_history_file):
historical_stats = None
if data_history_file:
with open(data_history_file, 'r') as f:
historical_stats = json.load(f)
q = self._stats_queries
totals = q.get_total_address_counts()
domain_counts = q.get_domain_counts()
subdomains_counts = q.get_subdomain_counts()
person_counts = q.get_person_counts()
total_installs = q.get_total_installs_time_series()
localhost_installs = q.get_localhost_installs_time_series()
values_by_date = defaultdict(list)
merge(values_by_date, 'total_addresses', totals)
merge(values_by_date, 'domains', domain_counts)
merge(values_by_date, 'subdomains', subdomains_counts)
merge(values_by_date, 'persons', person_counts)
merge(values_by_date, 'total_installs', total_installs)
merge(values_by_date, 'localhost_installs', localhost_installs)
domains_data = []
for date, values in values_by_date.items():
domains_data.append({
'date': date,
'values': values
})
if historical_stats:
domains_data.extend(historical_stats['domainsData'])
domains_data.sort(key=lambda d: datetime.strptime(d['date'], '%Y-%m-%d'))
latest_snapshot = q.get_latest_snapshot()
top10_apps = q.get_app_counts(latest_snapshot['_id'])[0:10]
top10_app_names = [app['name'] for app in top10_apps]
apps_data = q.get_app_time_series(top10_app_names)
if historical_stats:
def leave_only_top10(item):
item['values'] = [app for app in item['values'] if app['name'] in top10_app_names]
for item in historical_stats['appsData']:
leave_only_top10(item)
apps_data.extend(historical_stats['appsData'])
apps_data.sort(key=lambda d: datetime.strptime(d['date'], '%Y-%m-%d'))
all_stats = {'domainsData': domains_data, 'appsData': apps_data}
return json.dumps(all_stats, indent=3)
def get_app_counts_csv(self):
q = self._stats_queries
latest_snapshot = q.get_latest_snapshot()
app_counts = q.get_app_counts(latest_snapshot['_id'])
csv_output = io.StringIO()
f = csv.writer(csv_output)
f.writerow(["date", "name", "count"])
snapshot_date = latest_snapshot['start'].strftime('%Y-%m-%d')
for c in app_counts:
f.writerow([snapshot_date, c["name"], c["count"]])
return csv_output.getvalue()
def merge(values_dict, name, values_arr):
for d in values_arr:
date = d['date']
values_dict[date].append({
'name': name,
'count': d['value']
}) | 0.330687 | 0.156362 |
import numpy as np
import scipy.io as sio
class SVHN:
def __init__(self, file_path, n_classes, use_extra=False, gray=False, normalize=False):
self.n_classes = n_classes
# Load Train Set
train = sio.loadmat(file_path + "/train_32x32.mat")
self.train_labels = self.__one_hot_encode(train['y'])
self.train_examples = train['X'].shape[3]
self.train_data = store_data(train['X'].astype("float32"), self.train_examples, normalize, gray)
# Load Test Set
test = sio.loadmat(file_path + "/test_32x32.mat")
self.test_labels = self.__one_hot_encode(test['y'])
self.test_examples = test['X'].shape[3]
self.test_data = store_data(test['X'].astype("float32"), self.test_examples, normalize, gray)
# Load Extra dataset as additional training data if necessary
if use_extra:
extra = sio.loadmat(file_path + "/extra_32x32.mat")
self.train_labels = np.append(self.train_labels, self.__one_hot_encode(extra['y']), axis=0)
extra_examples = extra['X'].shape[3]
self.train_examples += extra_examples
self.train_data = np.append(self.train_data, store_data(extra['X'].astype("float32"), extra_examples,
normalize, gray), axis=0)
# shuffle values
idx = np.arange(self.train_data.shape[0])
self.train_data = self.train_data[idx]
self.train_labels = self.train_labels[idx]
def __one_hot_encode(self, data):
"""Creates a one-hot encoding vector
Args:
data: The data to be converted
Returns:
An array of one-hot encoded items
"""
n = data.shape[0]
one_hot = np.zeros(shape=(data.shape[0], self.n_classes), dtype=np.int32)
for s in range(n):
temp = np.zeros(self.n_classes, dtype=np.int32)
num = data[s][0]
if num == 10:
temp[0] = 1
else:
temp[num] = 1
one_hot[s] = temp
return one_hot
def store_data(data, num_of_examples, normalize, gray):
d = []
for i in range(num_of_examples):
image = data[:, :, :, i]
if normalize:
image = normalize_image(image)
if gray:
image = rgb2gray(image)
d.append(image)
return np.asarray(d)
def normalize_image(image):
pix = (255 - image) * 1.0 / 255.0
norm_image = pix - np.mean(pix, axis=0)
return norm_image
def rgb2gray(rgb):
return np.expand_dims(np.dot(rgb, [0.2989, 0.5870, 0.1140]), axis=3) | src/single_digit/svhn.py | import numpy as np
import scipy.io as sio
class SVHN:
def __init__(self, file_path, n_classes, use_extra=False, gray=False, normalize=False):
self.n_classes = n_classes
# Load Train Set
train = sio.loadmat(file_path + "/train_32x32.mat")
self.train_labels = self.__one_hot_encode(train['y'])
self.train_examples = train['X'].shape[3]
self.train_data = store_data(train['X'].astype("float32"), self.train_examples, normalize, gray)
# Load Test Set
test = sio.loadmat(file_path + "/test_32x32.mat")
self.test_labels = self.__one_hot_encode(test['y'])
self.test_examples = test['X'].shape[3]
self.test_data = store_data(test['X'].astype("float32"), self.test_examples, normalize, gray)
# Load Extra dataset as additional training data if necessary
if use_extra:
extra = sio.loadmat(file_path + "/extra_32x32.mat")
self.train_labels = np.append(self.train_labels, self.__one_hot_encode(extra['y']), axis=0)
extra_examples = extra['X'].shape[3]
self.train_examples += extra_examples
self.train_data = np.append(self.train_data, store_data(extra['X'].astype("float32"), extra_examples,
normalize, gray), axis=0)
# shuffle values
idx = np.arange(self.train_data.shape[0])
self.train_data = self.train_data[idx]
self.train_labels = self.train_labels[idx]
def __one_hot_encode(self, data):
"""Creates a one-hot encoding vector
Args:
data: The data to be converted
Returns:
An array of one-hot encoded items
"""
n = data.shape[0]
one_hot = np.zeros(shape=(data.shape[0], self.n_classes), dtype=np.int32)
for s in range(n):
temp = np.zeros(self.n_classes, dtype=np.int32)
num = data[s][0]
if num == 10:
temp[0] = 1
else:
temp[num] = 1
one_hot[s] = temp
return one_hot
def store_data(data, num_of_examples, normalize, gray):
d = []
for i in range(num_of_examples):
image = data[:, :, :, i]
if normalize:
image = normalize_image(image)
if gray:
image = rgb2gray(image)
d.append(image)
return np.asarray(d)
def normalize_image(image):
pix = (255 - image) * 1.0 / 255.0
norm_image = pix - np.mean(pix, axis=0)
return norm_image
def rgb2gray(rgb):
return np.expand_dims(np.dot(rgb, [0.2989, 0.5870, 0.1140]), axis=3) | 0.76934 | 0.40486 |
from pathlib import Path
import os
import sys
import re
import json
from sqlalchemy import create_engine
from sqlalchemy.pool import QueuePool
from cached_property import cached_property as cached
from docopt import docopt
from contextlib import ExitStack
DOCOPT = """\
mi utility.
Helps manage migrations in raw sql.
Usage:
mi init [--db=db]
mi check
mi new [<name>]
mi apply [<names>...]
mi -h | --help
mi help
Subcommands:
mi init Create .mi.json
mi check Check for unapplied migrations
mi new Create empty migration
mi apply Apply migration(s)
"""
class Mi:
@cached
def args(self):
return docopt(doc=DOCOPT)
@cached
def config(self):
p = Path('.') / '.mi.json'
if not p.exists():
raise ExitError('Please run mi init')
with p.open() as f:
return json.loads(f.read())
@cached
def db_url(self):
return self.config['database_url']
@cached
def dir(self):
d = self.config['migrations_dir']
return Path(d)
@cached
def files(self):
ret = self.dir.glob('*.sql')
ret = sorted(ret, key=lambda p: p.name) # unneeded?
return ret
@cached
def engine(self):
return create_engine(self.db_url, poolclass=QueuePool)
@classmethod
def get_last_number(cls, names):
names = reversed(names)
regexp = re.compile(r'^(\d+)(_\w+)?.sql')
for name in names:
m = regexp.match(name)
if not m:
continue
number, title = m.groups()
return int(number)
@cached
def exec(self):
self.ensure_table()
return self._exec
def _exec(self, sql):
# execute in a transaction
with self.engine.begin() as c:
return c.execute(sql)
def ensure_table(self):
try:
self._exec("SELECT 1 FROM migrations")
except:
create = """\
CREATE TABLE migrations (
name varchar(80),
timestamp timestamp with time zone DEFAULT now()
);"""
self._exec(create)
def _resolve_name(self, name):
paths = list(self.dir.glob(f'{name}*'))
if not paths:
raise ExitError(f'No file matches {name}')
elif len(paths) > 1:
matches = ', '.join(p.name for p in paths)
raise ExitError(f'Multiple mathes for {name}: {matches}')
[p] = paths
return p.name
def get_unapplied_migrations(self):
if self.args['<names>']:
return [
self._resolve_name(name) for name in self.args['<name>']
]
applied = self.exec("SELECT name from migrations")
applied = [name for (name,) in applied]
names = [p.name for p in self.files]
return [
m for m in names
if m not in applied
]
def get_db_url(self):
db = self.args['--db']
if db:
return db
db = os.environ.get('DATABASE_URL')
if db is None:
raise ExitError('Either set $DATABASE_URL or provide the --db option')
return db
def do_init(self):
d = Path('.')
mi_json = d / '.mi.json'
if mi_json.exists():
print('Already initialized.')
return
migration_dir = d / 'migrations'
if not migration_dir.exists():
migration_dir.mkdir()
else:
ans = input('Do you want to reuse directory migrations? [y/n]')
if ans.strip() != 'y':
p = input('Please type the desired directory path: ')
migration_dir = Path(p)
migration_dir.mkdir()
db_url = self.get_db_url()
conf = {
'database_url': db_url,
'migrations_dir': str(migration_dir),
}
with mi_json.open('w') as f:
# pretty-print to file
conf = json.dumps(conf, sort_keys=True, indent=4, separators=(',', ': '))
print(conf)
f.write(conf)
def do_check(self):
unapplied = self.get_unapplied_migrations()
if not unapplied:
print('All migrations are applied')
return
s = ', '.join(m for m in unapplied)
print(f"Unapplied: {s}")
def do_new(self):
names = [p.name for p in self.files]
num = self.get_last_number(names) or 0
num += 1
title = self.args['<name>']
if title:
name = f'{num:04d}_{title}.sql'
else:
name = f'{num:04d}.sql'
p = self.dir / name
p.touch()
print(f'{p.absolute()} is generated. Please fill it with meaning.')
def do_apply(self):
unapplied = self.get_unapplied_migrations()
for name in unapplied:
p = self.dir / name
with p.open() as f:
sql = f.read()
sql = sql.strip()
if not sql.endswith(";"):
sql = f"{sql};"
sql = f'''{sql}\
INSERT INTO migrations VALUES ('{name}');'''
self.exec(sql)
print(f"Applied: {name}")
def run(self):
with ExitStack() as stack:
if os.environ.get('PDB'):
stack.enter_context(drop_into_debugger())
commands = '''
init check new apply help
'''
for cmd in commands.split():
if self.args[cmd]:
method = getattr(self, f'do_{cmd}')
return method()
if self.args['-h'] or self.args['--help']:
return self.do_help()
assert False
def do_help(self):
print(DOCOPT)
class ExitError(Exception):
pass
class drop_into_debugger:
def __enter__(self):
pass
def __exit__(self, e, m, tb):
if not e:
return
try:
import ipdb as pdb
except ImportError:
import pdb
print(m.__repr__(), file=sys.stderr)
pdb.post_mortem(tb)
def entry_point():
try:
Mi().run()
except ExitError as ee:
print(str(ee), file=sys.stderr)
sys.exit(1) | mi/main.py | from pathlib import Path
import os
import sys
import re
import json
from sqlalchemy import create_engine
from sqlalchemy.pool import QueuePool
from cached_property import cached_property as cached
from docopt import docopt
from contextlib import ExitStack
DOCOPT = """\
mi utility.
Helps manage migrations in raw sql.
Usage:
mi init [--db=db]
mi check
mi new [<name>]
mi apply [<names>...]
mi -h | --help
mi help
Subcommands:
mi init Create .mi.json
mi check Check for unapplied migrations
mi new Create empty migration
mi apply Apply migration(s)
"""
class Mi:
@cached
def args(self):
return docopt(doc=DOCOPT)
@cached
def config(self):
p = Path('.') / '.mi.json'
if not p.exists():
raise ExitError('Please run mi init')
with p.open() as f:
return json.loads(f.read())
@cached
def db_url(self):
return self.config['database_url']
@cached
def dir(self):
d = self.config['migrations_dir']
return Path(d)
@cached
def files(self):
ret = self.dir.glob('*.sql')
ret = sorted(ret, key=lambda p: p.name) # unneeded?
return ret
@cached
def engine(self):
return create_engine(self.db_url, poolclass=QueuePool)
@classmethod
def get_last_number(cls, names):
names = reversed(names)
regexp = re.compile(r'^(\d+)(_\w+)?.sql')
for name in names:
m = regexp.match(name)
if not m:
continue
number, title = m.groups()
return int(number)
@cached
def exec(self):
self.ensure_table()
return self._exec
def _exec(self, sql):
# execute in a transaction
with self.engine.begin() as c:
return c.execute(sql)
def ensure_table(self):
try:
self._exec("SELECT 1 FROM migrations")
except:
create = """\
CREATE TABLE migrations (
name varchar(80),
timestamp timestamp with time zone DEFAULT now()
);"""
self._exec(create)
def _resolve_name(self, name):
paths = list(self.dir.glob(f'{name}*'))
if not paths:
raise ExitError(f'No file matches {name}')
elif len(paths) > 1:
matches = ', '.join(p.name for p in paths)
raise ExitError(f'Multiple mathes for {name}: {matches}')
[p] = paths
return p.name
def get_unapplied_migrations(self):
if self.args['<names>']:
return [
self._resolve_name(name) for name in self.args['<name>']
]
applied = self.exec("SELECT name from migrations")
applied = [name for (name,) in applied]
names = [p.name for p in self.files]
return [
m for m in names
if m not in applied
]
def get_db_url(self):
db = self.args['--db']
if db:
return db
db = os.environ.get('DATABASE_URL')
if db is None:
raise ExitError('Either set $DATABASE_URL or provide the --db option')
return db
def do_init(self):
d = Path('.')
mi_json = d / '.mi.json'
if mi_json.exists():
print('Already initialized.')
return
migration_dir = d / 'migrations'
if not migration_dir.exists():
migration_dir.mkdir()
else:
ans = input('Do you want to reuse directory migrations? [y/n]')
if ans.strip() != 'y':
p = input('Please type the desired directory path: ')
migration_dir = Path(p)
migration_dir.mkdir()
db_url = self.get_db_url()
conf = {
'database_url': db_url,
'migrations_dir': str(migration_dir),
}
with mi_json.open('w') as f:
# pretty-print to file
conf = json.dumps(conf, sort_keys=True, indent=4, separators=(',', ': '))
print(conf)
f.write(conf)
def do_check(self):
unapplied = self.get_unapplied_migrations()
if not unapplied:
print('All migrations are applied')
return
s = ', '.join(m for m in unapplied)
print(f"Unapplied: {s}")
def do_new(self):
names = [p.name for p in self.files]
num = self.get_last_number(names) or 0
num += 1
title = self.args['<name>']
if title:
name = f'{num:04d}_{title}.sql'
else:
name = f'{num:04d}.sql'
p = self.dir / name
p.touch()
print(f'{p.absolute()} is generated. Please fill it with meaning.')
def do_apply(self):
unapplied = self.get_unapplied_migrations()
for name in unapplied:
p = self.dir / name
with p.open() as f:
sql = f.read()
sql = sql.strip()
if not sql.endswith(";"):
sql = f"{sql};"
sql = f'''{sql}\
INSERT INTO migrations VALUES ('{name}');'''
self.exec(sql)
print(f"Applied: {name}")
def run(self):
with ExitStack() as stack:
if os.environ.get('PDB'):
stack.enter_context(drop_into_debugger())
commands = '''
init check new apply help
'''
for cmd in commands.split():
if self.args[cmd]:
method = getattr(self, f'do_{cmd}')
return method()
if self.args['-h'] or self.args['--help']:
return self.do_help()
assert False
def do_help(self):
print(DOCOPT)
class ExitError(Exception):
pass
class drop_into_debugger:
def __enter__(self):
pass
def __exit__(self, e, m, tb):
if not e:
return
try:
import ipdb as pdb
except ImportError:
import pdb
print(m.__repr__(), file=sys.stderr)
pdb.post_mortem(tb)
def entry_point():
try:
Mi().run()
except ExitError as ee:
print(str(ee), file=sys.stderr)
sys.exit(1) | 0.259826 | 0.119923 |
import os
import sys
import ConfigParser
_single_config = None
def config(configuration = None):
global _single_config
if configuration and not _single_config:
_single_config = configuration
import mamba.util
if _single_config.log.active:
mamba.util.init_log(_single_config.log.server)
return _single_config
def config_is_true(text):
return str(text).lower() in ("1", "true", "on", "y","yes")
class Configuration:
class Server:
def __init__(self):
self.host = "localhost"
self.port = 8080
self.version = "1.0"
self.wait_on_port = False
self.auto_restart = False
self.plugins = [os.path.abspath("./")]
self.user_cookie = os.path.basename(self.plugins[-1]) + "_UUID"
self.user_database = None
self.www_dir = os.path.abspath("./www")
def from_config(self, config):
try:
for name, value in config.items("SERVER"):
name = name.lower()
if name == 'host':
self.host = value
elif name == 'port':
self.port = int(value)
elif name == 'version':
self.version = value
elif name == 'wait_on_port':
self.wait_on_port = config_is_true(value)
elif name == 'auto_restart':
self.auto_restart = config_is_true(value)
elif name == 'plugins':
self.plugins = map(os.path.abspath, map(str.strip, value.split(';')))
self.user_cookie = os.path.basename(self.plugins[-1]) + "_UUID"
elif name == 'user_database':
self.user_database = value
elif name == "www_dir":
self.www_dir = os.path.abspath(value)
else:
sys.stderr.write('Unknown config parameter "%s" in section [SERVER].\n' % name)
except ConfigParser.NoSectionError:
pass
class Security:
def __init__(self):
self.password = '<PASSWORD>' # The MD5 key for password '<PASSWORD>'.
self.trusted = '127.0.0.1'
def from_config(self, config):
try:
for name, value in config.items('SECURITY'):
setattr(self, name, value)
except ConfigParser.NoSectionError:
pass
class OtherServers:
def __init__(self):
self.servers = []
def from_config(self, config):
try:
for server_name in config.options('OTHER-SERVERS'):
server_url = config.get('OTHER-SERVERS', server_name)
self.servers.append(server_url)
except ConfigParser.NoSectionError:
pass
class ThreadPool:
def __init__(self, pool_name, queue_name=None, threads=1, parameters={}):
self.name = pool_name
self.queue = queue_name
self.threads = threads
self.params = parameters
def initialize(self, options, globals):
self.params = {}
for key in globals:
self.params[key] = globals[key]
for item in map(str.strip, options.split(";")):
if item:
units = item.strip().split("=")
if len(units) and units[-1] == "":
del units[-1]
if len(units) == 2:
key, value = item.strip().split("=")
if key == "queue":
self.queue = value
elif key == "threads":
self.threads = int(value)
else:
if key in globals:
print "[INIT] Warning! Thread pool '%s' overwrites [GLOBALS] parameter '%s' from '%s' to '%s'" % (self.name, key, globals, value)
self.params[key] = value
class Http:
def __init__(self):
self.commands = 'GET POST'
self.max_wait = 5
self.max_msg_header = 4096
self.max_msg_content = 10E6
self.max_data_total = 1000E6
self.max_data_client = 100E6
self.max_http_total = 1000
self.max_http_client = 16
self.error_details = False
def from_config(self, config):
try:
for name, value in config.items('HTTP'):
name = name.lower()
if name == 'command':
self.commands = value
elif name == 'max_wait':
self.max_wait = int(value)
elif name == 'max_msg_header':
self.max_msg_header = int(value)
elif name == 'max_msg_content':
self.max_msg_content = int(value)
elif name == 'max_data_total':
self.max_data_total = int(value)
elif name == 'max_data_client':
self.max_data_client = int(value)
elif name == 'max_http_total':
self.max_http_total = int(value)
elif name == 'max_http_client':
self.max_http_client = int(value)
elif name == 'error_details':
self.error_details = config_is_true(value)
except ConfigParser.NoSectionError:
pass
class Debug:
def __init__(self):
self.timing = False
self.names = {}
def from_config(self, config):
try:
for option in config.options('SHOW-DEBUG'):
name = option
self.names[name] = int(config.get('SHOW-DEBUG', name))
if name.lower() == 'timing':
self.timing = int(config.get('SHOW-DEBUG', name))
except ConfigParser.NoSectionError:
pass
class Log:
def __init__(self):
self.active = False
self.logdir = None
self.server = None
self.download = None
def from_config(self, config):
try:
for name, value in config.items('LOG'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'logdir':
if not os.path.exists(value):
raise Exception, 'Log dir "%s" does not exist' % value
self.logdir = value
elif name == 'server':
self.server = value
elif name == 'download':
self.download = value
except ConfigParser.NoSectionError:
pass
if self.logdir:
if self.server:
self.server = os.path.join(self.logdir, self.server)
if self.download:
self.download = os.path.join(self.logdir, self.download)
class Watchdog:
def __init__(self):
self.active = True
self.renew_threads = True
self.memory_log = None
self.periodicity = 10
def from_config(self, config):
try:
for name, value in config.items('WATCHDOG'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'memory_log':
self.memory_log = value
elif name == 'periodicity':
self.periodicity = int(value)
except ConfigParser.NoSectionError:
pass
class Track:
def __init__(self):
self.active = False
self.logdir = None
self.max_go_back = 3
self.faildir = None
def from_config(self, config):
try:
for name, value in config.items('TRACK'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'logdir':
self.logdir = value
elif name == 'max_go_back':
self.max_go_back = int(value)
elif name == 'faildir':
self.faildir = value
except ConfigParser.NoSectionError:
pass
class Delays:
def __init__(self):
self.names = {}
self.names["*"] = 1
def from_config(self, config):
try:
for name in config.options("DELAYS"):
delay = float(config.get("DELAYS", name))
self.names[name] = delay
except ConfigParser.NoSectionError:
pass
def get_min_wait(self, resource):
if resource == None:
return 0
elif resource in self.names:
return self.names[resource]
else:
return self.names["*"]
def __init__(self, ini_file = None):
self.ini_file = ini_file
self.queues = {}
self.globals = {}
self.sections = {}
self.user_settings = {}
self.plugins = {}
self.thread_pools = {}
self.server = Configuration.Server()
self.security = Configuration.Security()
self.other_servers = Configuration.OtherServers()
self.delays = Configuration.Delays()
self.http = Configuration.Http()
self.debug = Configuration.Debug()
self.log = Configuration.Log()
self.watchdog = Configuration.Watchdog()
self.track = Configuration.Track()
global _single_config
if _single_config:
raise RuntimeError, 'The Core.Setup.Configuration class is a singleton. You cannot instantiate it again.'
else:
if ini_file and not os.path.exists(ini_file):
print '[INIT] Error: Config. file %s did not exist.' % ini_file
else:
config = ConfigParser.ConfigParser()
if ini_file:
#print '[INIT] Reading configuration file', ini_file
config.read(ini_file)
#else:
# print '[INIT] No .ini file specified, using default settings.'
self.server.from_config(config)
self.security.from_config(config)
self.other_servers.from_config(config)
self.delays.from_config(config)
self.http.from_config(config)
self.debug.from_config(config)
self.log.from_config(config)
self.watchdog.from_config(config)
self.track.from_config(config)
try:
for param in config.options("GLOBALS"):
self.globals[param] = config.get("GLOBALS", param)
except ConfigParser.NoSectionError:
pass
try:
for queue_name in config.options("QUEUES"):
type = None
threads = 0
units = config.get("QUEUES", queue_name).split(";")
if len(units) and units[-1] == "":
del units[-1]
if not units:
raise Exception, "Queue defined without any declaration of queue type (fifo or priority) as a minimum."
elif len(units) == 1:
type = units[0]
elif len(units) == 2:
type, threads = units[0], int(units[1].split("=")[1].strip())
else:
raise Exception, "Too many values (separated by ';') found in definition of queue '%s'." % queue_name
self.queues[queue_name] = type
if threads:
if queue_name not in self.thread_pools:
self.thread_pools[queue_name] = []
pool = Configuration.ThreadPool(queue_name, queue_name, threads)
pool.initialize(config.get("QUEUES", queue_name), self.globals)
self.thread_pools[queue_name].append(pool)
except ConfigParser.NoSectionError:
pass
try:
for param in config.options("USER-SETTINGS"):
self.user_settings[param] = config.get("USER-SETTINGS", param)
except ConfigParser.NoSectionError:
pass
try:
for pool_name in config.options("THREAD-POOLS"):
pool = Configuration.ThreadPool(pool_name)
pool.initialize(config.get("THREAD-POOLS", pool.name), self.globals)
if pool.queue_name not in self.queues and pool.queue_name.lower() != "main":
raise Exception, "Thread pool '%s' mentions queue '%s' which is not found under the [QUEUES] section." % (pool.name, pool.queue_name)
if pool.queue_name not in self.thread_pools:
self.thread_pools[queue_name] = []
self.thread_pools[queue_name].append(pool)
except ConfigParser.NoSectionError:
pass
for section in config.sections():
self.sections[section] = {}
for option in config.options(section):
self.sections[section][option] = config.get(section, option)
if "main" not in self.thread_pools:
params = {}
for key in self.globals:
params[key] = self.globals[key]
self.thread_pools["main"] = [Configuration.ThreadPool("main", "main", 1, params)]
print "[INIT] No queues defined, creating 'main' with one worker." | mamba/setup.py |
import os
import sys
import ConfigParser
_single_config = None
def config(configuration = None):
global _single_config
if configuration and not _single_config:
_single_config = configuration
import mamba.util
if _single_config.log.active:
mamba.util.init_log(_single_config.log.server)
return _single_config
def config_is_true(text):
return str(text).lower() in ("1", "true", "on", "y","yes")
class Configuration:
class Server:
def __init__(self):
self.host = "localhost"
self.port = 8080
self.version = "1.0"
self.wait_on_port = False
self.auto_restart = False
self.plugins = [os.path.abspath("./")]
self.user_cookie = os.path.basename(self.plugins[-1]) + "_UUID"
self.user_database = None
self.www_dir = os.path.abspath("./www")
def from_config(self, config):
try:
for name, value in config.items("SERVER"):
name = name.lower()
if name == 'host':
self.host = value
elif name == 'port':
self.port = int(value)
elif name == 'version':
self.version = value
elif name == 'wait_on_port':
self.wait_on_port = config_is_true(value)
elif name == 'auto_restart':
self.auto_restart = config_is_true(value)
elif name == 'plugins':
self.plugins = map(os.path.abspath, map(str.strip, value.split(';')))
self.user_cookie = os.path.basename(self.plugins[-1]) + "_UUID"
elif name == 'user_database':
self.user_database = value
elif name == "www_dir":
self.www_dir = os.path.abspath(value)
else:
sys.stderr.write('Unknown config parameter "%s" in section [SERVER].\n' % name)
except ConfigParser.NoSectionError:
pass
class Security:
def __init__(self):
self.password = '<PASSWORD>' # The MD5 key for password '<PASSWORD>'.
self.trusted = '127.0.0.1'
def from_config(self, config):
try:
for name, value in config.items('SECURITY'):
setattr(self, name, value)
except ConfigParser.NoSectionError:
pass
class OtherServers:
def __init__(self):
self.servers = []
def from_config(self, config):
try:
for server_name in config.options('OTHER-SERVERS'):
server_url = config.get('OTHER-SERVERS', server_name)
self.servers.append(server_url)
except ConfigParser.NoSectionError:
pass
class ThreadPool:
def __init__(self, pool_name, queue_name=None, threads=1, parameters={}):
self.name = pool_name
self.queue = queue_name
self.threads = threads
self.params = parameters
def initialize(self, options, globals):
self.params = {}
for key in globals:
self.params[key] = globals[key]
for item in map(str.strip, options.split(";")):
if item:
units = item.strip().split("=")
if len(units) and units[-1] == "":
del units[-1]
if len(units) == 2:
key, value = item.strip().split("=")
if key == "queue":
self.queue = value
elif key == "threads":
self.threads = int(value)
else:
if key in globals:
print "[INIT] Warning! Thread pool '%s' overwrites [GLOBALS] parameter '%s' from '%s' to '%s'" % (self.name, key, globals, value)
self.params[key] = value
class Http:
def __init__(self):
self.commands = 'GET POST'
self.max_wait = 5
self.max_msg_header = 4096
self.max_msg_content = 10E6
self.max_data_total = 1000E6
self.max_data_client = 100E6
self.max_http_total = 1000
self.max_http_client = 16
self.error_details = False
def from_config(self, config):
try:
for name, value in config.items('HTTP'):
name = name.lower()
if name == 'command':
self.commands = value
elif name == 'max_wait':
self.max_wait = int(value)
elif name == 'max_msg_header':
self.max_msg_header = int(value)
elif name == 'max_msg_content':
self.max_msg_content = int(value)
elif name == 'max_data_total':
self.max_data_total = int(value)
elif name == 'max_data_client':
self.max_data_client = int(value)
elif name == 'max_http_total':
self.max_http_total = int(value)
elif name == 'max_http_client':
self.max_http_client = int(value)
elif name == 'error_details':
self.error_details = config_is_true(value)
except ConfigParser.NoSectionError:
pass
class Debug:
def __init__(self):
self.timing = False
self.names = {}
def from_config(self, config):
try:
for option in config.options('SHOW-DEBUG'):
name = option
self.names[name] = int(config.get('SHOW-DEBUG', name))
if name.lower() == 'timing':
self.timing = int(config.get('SHOW-DEBUG', name))
except ConfigParser.NoSectionError:
pass
class Log:
def __init__(self):
self.active = False
self.logdir = None
self.server = None
self.download = None
def from_config(self, config):
try:
for name, value in config.items('LOG'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'logdir':
if not os.path.exists(value):
raise Exception, 'Log dir "%s" does not exist' % value
self.logdir = value
elif name == 'server':
self.server = value
elif name == 'download':
self.download = value
except ConfigParser.NoSectionError:
pass
if self.logdir:
if self.server:
self.server = os.path.join(self.logdir, self.server)
if self.download:
self.download = os.path.join(self.logdir, self.download)
class Watchdog:
def __init__(self):
self.active = True
self.renew_threads = True
self.memory_log = None
self.periodicity = 10
def from_config(self, config):
try:
for name, value in config.items('WATCHDOG'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'memory_log':
self.memory_log = value
elif name == 'periodicity':
self.periodicity = int(value)
except ConfigParser.NoSectionError:
pass
class Track:
def __init__(self):
self.active = False
self.logdir = None
self.max_go_back = 3
self.faildir = None
def from_config(self, config):
try:
for name, value in config.items('TRACK'):
name = name.lower()
if name == 'active':
self.active = config_is_true(value)
elif name == 'logdir':
self.logdir = value
elif name == 'max_go_back':
self.max_go_back = int(value)
elif name == 'faildir':
self.faildir = value
except ConfigParser.NoSectionError:
pass
class Delays:
def __init__(self):
self.names = {}
self.names["*"] = 1
def from_config(self, config):
try:
for name in config.options("DELAYS"):
delay = float(config.get("DELAYS", name))
self.names[name] = delay
except ConfigParser.NoSectionError:
pass
def get_min_wait(self, resource):
if resource == None:
return 0
elif resource in self.names:
return self.names[resource]
else:
return self.names["*"]
def __init__(self, ini_file = None):
self.ini_file = ini_file
self.queues = {}
self.globals = {}
self.sections = {}
self.user_settings = {}
self.plugins = {}
self.thread_pools = {}
self.server = Configuration.Server()
self.security = Configuration.Security()
self.other_servers = Configuration.OtherServers()
self.delays = Configuration.Delays()
self.http = Configuration.Http()
self.debug = Configuration.Debug()
self.log = Configuration.Log()
self.watchdog = Configuration.Watchdog()
self.track = Configuration.Track()
global _single_config
if _single_config:
raise RuntimeError, 'The Core.Setup.Configuration class is a singleton. You cannot instantiate it again.'
else:
if ini_file and not os.path.exists(ini_file):
print '[INIT] Error: Config. file %s did not exist.' % ini_file
else:
config = ConfigParser.ConfigParser()
if ini_file:
#print '[INIT] Reading configuration file', ini_file
config.read(ini_file)
#else:
# print '[INIT] No .ini file specified, using default settings.'
self.server.from_config(config)
self.security.from_config(config)
self.other_servers.from_config(config)
self.delays.from_config(config)
self.http.from_config(config)
self.debug.from_config(config)
self.log.from_config(config)
self.watchdog.from_config(config)
self.track.from_config(config)
try:
for param in config.options("GLOBALS"):
self.globals[param] = config.get("GLOBALS", param)
except ConfigParser.NoSectionError:
pass
try:
for queue_name in config.options("QUEUES"):
type = None
threads = 0
units = config.get("QUEUES", queue_name).split(";")
if len(units) and units[-1] == "":
del units[-1]
if not units:
raise Exception, "Queue defined without any declaration of queue type (fifo or priority) as a minimum."
elif len(units) == 1:
type = units[0]
elif len(units) == 2:
type, threads = units[0], int(units[1].split("=")[1].strip())
else:
raise Exception, "Too many values (separated by ';') found in definition of queue '%s'." % queue_name
self.queues[queue_name] = type
if threads:
if queue_name not in self.thread_pools:
self.thread_pools[queue_name] = []
pool = Configuration.ThreadPool(queue_name, queue_name, threads)
pool.initialize(config.get("QUEUES", queue_name), self.globals)
self.thread_pools[queue_name].append(pool)
except ConfigParser.NoSectionError:
pass
try:
for param in config.options("USER-SETTINGS"):
self.user_settings[param] = config.get("USER-SETTINGS", param)
except ConfigParser.NoSectionError:
pass
try:
for pool_name in config.options("THREAD-POOLS"):
pool = Configuration.ThreadPool(pool_name)
pool.initialize(config.get("THREAD-POOLS", pool.name), self.globals)
if pool.queue_name not in self.queues and pool.queue_name.lower() != "main":
raise Exception, "Thread pool '%s' mentions queue '%s' which is not found under the [QUEUES] section." % (pool.name, pool.queue_name)
if pool.queue_name not in self.thread_pools:
self.thread_pools[queue_name] = []
self.thread_pools[queue_name].append(pool)
except ConfigParser.NoSectionError:
pass
for section in config.sections():
self.sections[section] = {}
for option in config.options(section):
self.sections[section][option] = config.get(section, option)
if "main" not in self.thread_pools:
params = {}
for key in self.globals:
params[key] = self.globals[key]
self.thread_pools["main"] = [Configuration.ThreadPool("main", "main", 1, params)]
print "[INIT] No queues defined, creating 'main' with one worker." | 0.090276 | 0.071689 |
import datetime
from functools import wraps
import jwt
from flask import make_response, jsonify, request
from app import app
from app.models.User import User
from app.models.BlackListedToken import BlackListedToken
class AuthToken:
@staticmethod
def get_token_config(user_id):
return {
'user_id': user_id,
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=30),
'iat': datetime.datetime.utcnow()
}
@staticmethod
def generate_token(user_id):
return jwt.encode(
AuthToken.get_token_config(user_id),
app.config['SECRETE_KEY'],
algorithm='HS256'
)
@staticmethod
def decode_auth_token(token):
try:
payload = jwt.decode(token, app.config['SECRETE_KEY'], algorithms='HS256')
if BlackListedToken.is_black_listed(token):
return 'Please Sign in again.'
return payload['user_id']
except jwt.ExpiredSignatureError:
return 'Token expired, Please Sign again'
except jwt.InvalidTokenError:
return 'Invalid token. Please SIgn in Again'
class ResponseCreator:
@staticmethod
def response_auth(status, message, token, user, status_code):
return make_response(jsonify({
'status': status,
'message': message,
'auth_token': token.decode('utf-8'),
'username': user.username,
'email': user.email
})), status_code
@staticmethod
def response(status, message, status_code):
return make_response(jsonify({
'status': status,
'message': message,
})), status_code
def login_required_jwt(f):
@wraps(f)
def decorated_function(*args, **kwargs):
token = None
if 'Authorization' in request.headers:
print("login_required_jwt executed")
token = request.headers['Authorization'].split(" ")[0]
if not token or BlackListedToken.is_black_listed(token):
return make_response(jsonify({
'status': 'failed',
'message': 'Provide a valid auth token. Log in again please'
})), 403
user_id = AuthToken.decode_auth_token(token)
current_user = User.get_by_id(user_id)
return f(current_user, *args, **kwargs)
return decorated_function | app/auth/utilities.py | import datetime
from functools import wraps
import jwt
from flask import make_response, jsonify, request
from app import app
from app.models.User import User
from app.models.BlackListedToken import BlackListedToken
class AuthToken:
@staticmethod
def get_token_config(user_id):
return {
'user_id': user_id,
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=30),
'iat': datetime.datetime.utcnow()
}
@staticmethod
def generate_token(user_id):
return jwt.encode(
AuthToken.get_token_config(user_id),
app.config['SECRETE_KEY'],
algorithm='HS256'
)
@staticmethod
def decode_auth_token(token):
try:
payload = jwt.decode(token, app.config['SECRETE_KEY'], algorithms='HS256')
if BlackListedToken.is_black_listed(token):
return 'Please Sign in again.'
return payload['user_id']
except jwt.ExpiredSignatureError:
return 'Token expired, Please Sign again'
except jwt.InvalidTokenError:
return 'Invalid token. Please SIgn in Again'
class ResponseCreator:
@staticmethod
def response_auth(status, message, token, user, status_code):
return make_response(jsonify({
'status': status,
'message': message,
'auth_token': token.decode('utf-8'),
'username': user.username,
'email': user.email
})), status_code
@staticmethod
def response(status, message, status_code):
return make_response(jsonify({
'status': status,
'message': message,
})), status_code
def login_required_jwt(f):
@wraps(f)
def decorated_function(*args, **kwargs):
token = None
if 'Authorization' in request.headers:
print("login_required_jwt executed")
token = request.headers['Authorization'].split(" ")[0]
if not token or BlackListedToken.is_black_listed(token):
return make_response(jsonify({
'status': 'failed',
'message': 'Provide a valid auth token. Log in again please'
})), 403
user_id = AuthToken.decode_auth_token(token)
current_user = User.get_by_id(user_id)
return f(current_user, *args, **kwargs)
return decorated_function | 0.396769 | 0.061989 |
import gym
import gym_pendrogone
import tensorflow as tf
import numpy as np
from agent import Agent
from ppo import rollouts_generator, add_vtarg_adv, render, Sensei
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
num_iterations = 1
sample_horizon = 100
# Learning hyperparameters
epochs = 1
batch_size = 10
learning_rate = 3e-4
# GAE params
gamma = 0.99
lam = 0.95
# PPO specific hyperparameter, not gonna change this :v
epsilon = 0.2
def main():
env = gym.make('PendrogoneZero-v0')
# env = gym.make('DroneZero-v0')
continuous = isinstance(env.action_space, gym.spaces.Box)
# print(continuous)
ob_dim = env.observation_space.shape[0]
ac_dim = env.action_space.shape[0] if continuous else env.action_space.n
# print('ob_dim', ob_dim)
# print('ac_dim', ac_dim)
veronika = Agent(continuous, ob_dim, ac_dim, n_layers=2)
regina = Sensei(veronika, continuous, ob_dim, ac_dim,
epochs, batch_size,
learning_rate, epsilon)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
generator = rollouts_generator(sess, veronika, env, sample_horizon)
for i in range(num_iterations):
seg = generator.__next__()
# print(seg["rew"])
add_vtarg_adv(seg, lam, gamma)
adv = seg["adv"]
adv = (adv - adv.mean()) / (adv.std() + 1e-8)
adv = adv[:, None]
regina.train_samples(sess, seg["ob"], seg["ac"], adv, seg["vtarg"], seg["log_probs"])
rewards = np.array(seg["ep_rets"])
if i % 10 == 0 or i == num_iterations-1:
if rewards.shape[0] > 0:
mean, std = rewards.mean(), rewards.std()
print(
'Iteration {0:3d}: reward: m{1:6.3f}, std{2:4.2f}; ep_len: {3:5.2f}; action: m:{4}, std:{5}'
.format( i, mean, std,
np.mean(seg["ep_lens"]),
np.mean(seg["ac"], axis=0), np.std(seg["ac"], axis=0) )
)
render(veronika, env, sess)
if __name__ == '__main__':
main() | test.py | import gym
import gym_pendrogone
import tensorflow as tf
import numpy as np
from agent import Agent
from ppo import rollouts_generator, add_vtarg_adv, render, Sensei
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
num_iterations = 1
sample_horizon = 100
# Learning hyperparameters
epochs = 1
batch_size = 10
learning_rate = 3e-4
# GAE params
gamma = 0.99
lam = 0.95
# PPO specific hyperparameter, not gonna change this :v
epsilon = 0.2
def main():
env = gym.make('PendrogoneZero-v0')
# env = gym.make('DroneZero-v0')
continuous = isinstance(env.action_space, gym.spaces.Box)
# print(continuous)
ob_dim = env.observation_space.shape[0]
ac_dim = env.action_space.shape[0] if continuous else env.action_space.n
# print('ob_dim', ob_dim)
# print('ac_dim', ac_dim)
veronika = Agent(continuous, ob_dim, ac_dim, n_layers=2)
regina = Sensei(veronika, continuous, ob_dim, ac_dim,
epochs, batch_size,
learning_rate, epsilon)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
generator = rollouts_generator(sess, veronika, env, sample_horizon)
for i in range(num_iterations):
seg = generator.__next__()
# print(seg["rew"])
add_vtarg_adv(seg, lam, gamma)
adv = seg["adv"]
adv = (adv - adv.mean()) / (adv.std() + 1e-8)
adv = adv[:, None]
regina.train_samples(sess, seg["ob"], seg["ac"], adv, seg["vtarg"], seg["log_probs"])
rewards = np.array(seg["ep_rets"])
if i % 10 == 0 or i == num_iterations-1:
if rewards.shape[0] > 0:
mean, std = rewards.mean(), rewards.std()
print(
'Iteration {0:3d}: reward: m{1:6.3f}, std{2:4.2f}; ep_len: {3:5.2f}; action: m:{4}, std:{5}'
.format( i, mean, std,
np.mean(seg["ep_lens"]),
np.mean(seg["ac"], axis=0), np.std(seg["ac"], axis=0) )
)
render(veronika, env, sess)
if __name__ == '__main__':
main() | 0.387574 | 0.287124 |
# http://codereview.stackexchange.com/questions/1526/finding-all-k-subset-partitions
# http://stackoverflow.com/questions/18353280/iterator-over-all-partitions-into-k-groups
# https://docs.python.org/3/library/itertools.html
# http://stackoverflow.com/questions/9316436/how-many-different-partitions-with-exactly-n-parts-can-be-made-of-a-set-with-k-e
# https://en.wikipedia.org/wiki/Partition_of_a_set
# http://math.stackexchange.com/questions/1215983/how-can-i-get-the-maximum-score-without-iterating-all-possibilities
# Core Library modules
import logging
logger = logging.getLogger(__name__)
def prepare_table(table):
"""
Make the table 'symmetric'.
The lower left part of the matrix is the reverse probability.
"""
n = len(table)
for i, row in enumerate(table):
assert len(row) == n, f"len(row) = {len(row)} != {n} = n"
for j, _ in enumerate(row):
if i == j:
table[i][i] = 0.0
elif i > j:
table[i][j] = 1 - table[j][i]
return table
def clusters(l, K): # noqa
"""
Partition list ``l`` in ``K`` partitions.
Examples
--------
>>> l = [0, 1, 2]
>>> list(clusters(l, K=3))
[[[0], [1], [2]], [[], [0, 1], [2]], [[], [1], [0, 2]], [[0], [], [1, 2]], [[], [0], [1, 2]], [[], [], [0, 1, 2]]]
>>> list(clusters(l, K=2))
[[[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]], [[], [0, 1, 2]]]
>>> list(clusters(l, K=1))
[[[0, 1, 2]]]
"""
if l:
prev = None
for t in clusters(l[1:], K):
tup = sorted(t)
if tup != prev:
prev = tup
for i in range(K):
yield tup[:i] + [
[l[0]] + tup[i],
] + tup[i + 1 :]
else:
yield [[] for _ in range(K)]
def neclusters(l, K): # noqa
"""Partition list ``l`` in ``K`` partitions, without empty parts.
>>> l = [0, 1, 2]
>>> list(neclusters(l, 2))
[[[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]]]
>>> list(neclusters(l, 1))
[[[0, 1, 2]]]
"""
for c in clusters(l, K):
if all(x for x in c):
yield c
def all_segmentations(l):
"""Get all segmentations of a list ``l``.
This gets bigger fast. See https://oeis.org/A000110
For len(l) = 14 it is 190,899,322
>>> list(all_segmentations([0, 1, 2]))
[[[0, 1, 2]], [[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]], [[0], [1], [2]]]
"""
for K in range(1, len(l) + 1):
gen = neclusters(l, K)
yield from gen
def find_index(segmentation, stroke_id):
"""
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 0)
0
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 1)
0
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 5)
2
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 6)
2
"""
for i, symbol in enumerate(segmentation):
for sid in symbol:
if sid == stroke_id:
return i
return -1
def q(segmentation, s1, s2):
"""Test if ``s1`` and ``s2`` are in the same symbol, given the
``segmentation``.
"""
index1 = find_index(segmentation, s1)
index2 = find_index(segmentation, s2)
return index1 == index2
class TopFinder:
"""Utility datastructure to find the top n elements."""
def __init__(self, n, find_min=False):
self.n = n
self.tops = []
self.find_min = find_min
def push(self, element, value):
"""Push an ``element`` into the datastrucutre together with its value
and only save it if it currently is one of the top n elements.
Drop elements if necessary.
"""
insert_pos = 0
for index, el in enumerate(self.tops):
if not self.find_min and el[1] >= value:
insert_pos = index + 1
elif self.find_min and el[1] <= value:
insert_pos = index + 1
self.tops.insert(insert_pos, [element, value])
self.tops = self.tops[: self.n]
def __iter__(self):
return self.tops.__iter__()
def score_segmentation(segmentation, table):
"""Get the score of a segmentation."""
stroke_nr = sum(1 for symbol in segmentation for stroke in symbol)
score = 1
for i in range(stroke_nr):
for j in range(i + 1, stroke_nr):
qval = q(segmentation, i, j)
if qval:
score *= table[i][j]
else:
score *= table[j][i]
return score
def normalize_segmentation(segmentation):
for i in range(len(segmentation)):
segmentation[i] = sorted(segmentation[i])
return sorted(segmentation, key=lambda x: x[0])
def get_top_segmentations(table, n):
"""
Parameters
----------
table : matrix of probabilities
Each cell (i, j) of `table` gives the probability that i and j are in
the same symbol.
n : int
Number of best segmentations which get returned
"""
stroke_count = list(range(len(table)))
topf = TopFinder(n)
for curr_segmentation in all_segmentations(stroke_count):
curr_seg_score = score_segmentation(curr_segmentation, table)
topf.push(curr_segmentation, curr_seg_score)
for el, score in topf:
yield [normalize_segmentation(el), score]
def main():
# Column0 1 2 3 4 5 6 7
table = [
[0.00, 0.55, 0.43, 0.30, 0.28, 0.74, 0.28, 0.26], # 0
[0.45, 0.00, 0.67, 0.40, 0.35, 0.77, 0.30, 0.31], # 1
[0.57, 0.33, 0.00, 0.29, 0.28, 0.80, 0.21, 0.23], # 2
[0.70, 0.60, 0.71, 0.00, 0.39, 0.76, 0.29, 0.29], # 3
[0.72, 0.65, 0.72, 0.61, 0.00, 0.76, 0.25, 0.29], # 4
[0.26, 0.23, 0.20, 0.24, 0.24, 0.00, 0.30, 0.31], # 5
[0.72, 0.70, 0.19, 0.71, 0.75, 0.70, 0.00, 0.27], # 6
[0.74, 0.69, 0.77, 0.71, 0.71, 0.69, 0.73, 0.00],
] # 7
# 0 1 2
# table = [[0.00, 0.01, 0.99],
# [0.99, 0.00, 0.01], # noqa
# [0.01, 0.99, 0.00]]
topfs = get_top_segmentations(table, 5)
for el, score in topfs:
print(f"{score:0.10f}: {el}")
for i in range(20):
logger.info(f"{i:>5}: {len(list(all_segmentations(list(range(i))))):>10}") | hwrt/partitions.py |
# http://codereview.stackexchange.com/questions/1526/finding-all-k-subset-partitions
# http://stackoverflow.com/questions/18353280/iterator-over-all-partitions-into-k-groups
# https://docs.python.org/3/library/itertools.html
# http://stackoverflow.com/questions/9316436/how-many-different-partitions-with-exactly-n-parts-can-be-made-of-a-set-with-k-e
# https://en.wikipedia.org/wiki/Partition_of_a_set
# http://math.stackexchange.com/questions/1215983/how-can-i-get-the-maximum-score-without-iterating-all-possibilities
# Core Library modules
import logging
logger = logging.getLogger(__name__)
def prepare_table(table):
"""
Make the table 'symmetric'.
The lower left part of the matrix is the reverse probability.
"""
n = len(table)
for i, row in enumerate(table):
assert len(row) == n, f"len(row) = {len(row)} != {n} = n"
for j, _ in enumerate(row):
if i == j:
table[i][i] = 0.0
elif i > j:
table[i][j] = 1 - table[j][i]
return table
def clusters(l, K): # noqa
"""
Partition list ``l`` in ``K`` partitions.
Examples
--------
>>> l = [0, 1, 2]
>>> list(clusters(l, K=3))
[[[0], [1], [2]], [[], [0, 1], [2]], [[], [1], [0, 2]], [[0], [], [1, 2]], [[], [0], [1, 2]], [[], [], [0, 1, 2]]]
>>> list(clusters(l, K=2))
[[[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]], [[], [0, 1, 2]]]
>>> list(clusters(l, K=1))
[[[0, 1, 2]]]
"""
if l:
prev = None
for t in clusters(l[1:], K):
tup = sorted(t)
if tup != prev:
prev = tup
for i in range(K):
yield tup[:i] + [
[l[0]] + tup[i],
] + tup[i + 1 :]
else:
yield [[] for _ in range(K)]
def neclusters(l, K): # noqa
"""Partition list ``l`` in ``K`` partitions, without empty parts.
>>> l = [0, 1, 2]
>>> list(neclusters(l, 2))
[[[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]]]
>>> list(neclusters(l, 1))
[[[0, 1, 2]]]
"""
for c in clusters(l, K):
if all(x for x in c):
yield c
def all_segmentations(l):
"""Get all segmentations of a list ``l``.
This gets bigger fast. See https://oeis.org/A000110
For len(l) = 14 it is 190,899,322
>>> list(all_segmentations([0, 1, 2]))
[[[0, 1, 2]], [[0, 1], [2]], [[1], [0, 2]], [[0], [1, 2]], [[0], [1], [2]]]
"""
for K in range(1, len(l) + 1):
gen = neclusters(l, K)
yield from gen
def find_index(segmentation, stroke_id):
"""
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 0)
0
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 1)
0
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 5)
2
>>> find_index([[0, 1, 2], [3, 4], [5, 6, 7]], 6)
2
"""
for i, symbol in enumerate(segmentation):
for sid in symbol:
if sid == stroke_id:
return i
return -1
def q(segmentation, s1, s2):
"""Test if ``s1`` and ``s2`` are in the same symbol, given the
``segmentation``.
"""
index1 = find_index(segmentation, s1)
index2 = find_index(segmentation, s2)
return index1 == index2
class TopFinder:
"""Utility datastructure to find the top n elements."""
def __init__(self, n, find_min=False):
self.n = n
self.tops = []
self.find_min = find_min
def push(self, element, value):
"""Push an ``element`` into the datastrucutre together with its value
and only save it if it currently is one of the top n elements.
Drop elements if necessary.
"""
insert_pos = 0
for index, el in enumerate(self.tops):
if not self.find_min and el[1] >= value:
insert_pos = index + 1
elif self.find_min and el[1] <= value:
insert_pos = index + 1
self.tops.insert(insert_pos, [element, value])
self.tops = self.tops[: self.n]
def __iter__(self):
return self.tops.__iter__()
def score_segmentation(segmentation, table):
"""Get the score of a segmentation."""
stroke_nr = sum(1 for symbol in segmentation for stroke in symbol)
score = 1
for i in range(stroke_nr):
for j in range(i + 1, stroke_nr):
qval = q(segmentation, i, j)
if qval:
score *= table[i][j]
else:
score *= table[j][i]
return score
def normalize_segmentation(segmentation):
for i in range(len(segmentation)):
segmentation[i] = sorted(segmentation[i])
return sorted(segmentation, key=lambda x: x[0])
def get_top_segmentations(table, n):
"""
Parameters
----------
table : matrix of probabilities
Each cell (i, j) of `table` gives the probability that i and j are in
the same symbol.
n : int
Number of best segmentations which get returned
"""
stroke_count = list(range(len(table)))
topf = TopFinder(n)
for curr_segmentation in all_segmentations(stroke_count):
curr_seg_score = score_segmentation(curr_segmentation, table)
topf.push(curr_segmentation, curr_seg_score)
for el, score in topf:
yield [normalize_segmentation(el), score]
def main():
# Column0 1 2 3 4 5 6 7
table = [
[0.00, 0.55, 0.43, 0.30, 0.28, 0.74, 0.28, 0.26], # 0
[0.45, 0.00, 0.67, 0.40, 0.35, 0.77, 0.30, 0.31], # 1
[0.57, 0.33, 0.00, 0.29, 0.28, 0.80, 0.21, 0.23], # 2
[0.70, 0.60, 0.71, 0.00, 0.39, 0.76, 0.29, 0.29], # 3
[0.72, 0.65, 0.72, 0.61, 0.00, 0.76, 0.25, 0.29], # 4
[0.26, 0.23, 0.20, 0.24, 0.24, 0.00, 0.30, 0.31], # 5
[0.72, 0.70, 0.19, 0.71, 0.75, 0.70, 0.00, 0.27], # 6
[0.74, 0.69, 0.77, 0.71, 0.71, 0.69, 0.73, 0.00],
] # 7
# 0 1 2
# table = [[0.00, 0.01, 0.99],
# [0.99, 0.00, 0.01], # noqa
# [0.01, 0.99, 0.00]]
topfs = get_top_segmentations(table, 5)
for el, score in topfs:
print(f"{score:0.10f}: {el}")
for i in range(20):
logger.info(f"{i:>5}: {len(list(all_segmentations(list(range(i))))):>10}") | 0.882105 | 0.500793 |
import pytest
import numpy as np
import pandas as pd
from .context import fitgrid
from fitgrid import fake_data, defaults
from fitgrid.errors import FitGridError
from fitgrid.epochs import Epochs
def test_epochs_unequal_snapshots():
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
epochs_table.drop(epochs_table.index[42], inplace=True)
with pytest.raises(FitGridError) as error:
Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert 'differs from previous snapshot' in str(error.value)
def test_raises_error_on_duplicate_channels():
epochs_table, channels = fitgrid.fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
dupe_channel = channels[0]
dupe_column = epochs_table[dupe_channel]
bad_epochs_table = pd.concat([epochs_table, dupe_column], axis=1)
with pytest.raises(FitGridError) as error:
fitgrid.epochs_from_dataframe(
bad_epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert "Duplicate column names" in str(error.value)
def test__raises_error_on_epoch_index_mismatch():
"""Bad: all epochs have the same shape, but indices differ."""
# strategy: generate epochs, but insert meaningless time index
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
# blow up index to misalign epochs and time
bad_index = np.arange(len(epochs_table))
epochs_table.index.set_levels(
levels=bad_index, level=defaults.TIME, inplace=True
)
epochs_table.index.set_codes(
codes=bad_index, level=defaults.TIME, inplace=True
)
# now time index is equal to row number in the table overall
with pytest.raises(FitGridError) as error:
Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert 'differs from previous snapshot' in str(error.value)
def test_multiple_indices_end_up_EPOCH_ID():
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
epochs_table.reset_index(inplace=True)
epochs_table.set_index(
[defaults.EPOCH_ID, defaults.TIME, 'categorical'], inplace=True
)
epochs = Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
# internal table has epoch_id in index
assert epochs.table.index.names == [defaults.EPOCH_ID]
# input table is not altered
assert epochs_table.index.names == [
defaults.EPOCH_ID,
defaults.TIME,
'categorical',
]
def test_smoke_plot_averages():
epochs = fake_data.generate()
epochs.plot_averages(channels=['channel0', 'channel1'])
def test_smoke_epochs_distances():
epochs = fake_data.generate()
epochs.distances() | tests/test_epochs.py | import pytest
import numpy as np
import pandas as pd
from .context import fitgrid
from fitgrid import fake_data, defaults
from fitgrid.errors import FitGridError
from fitgrid.epochs import Epochs
def test_epochs_unequal_snapshots():
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
epochs_table.drop(epochs_table.index[42], inplace=True)
with pytest.raises(FitGridError) as error:
Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert 'differs from previous snapshot' in str(error.value)
def test_raises_error_on_duplicate_channels():
epochs_table, channels = fitgrid.fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
dupe_channel = channels[0]
dupe_column = epochs_table[dupe_channel]
bad_epochs_table = pd.concat([epochs_table, dupe_column], axis=1)
with pytest.raises(FitGridError) as error:
fitgrid.epochs_from_dataframe(
bad_epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert "Duplicate column names" in str(error.value)
def test__raises_error_on_epoch_index_mismatch():
"""Bad: all epochs have the same shape, but indices differ."""
# strategy: generate epochs, but insert meaningless time index
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
# blow up index to misalign epochs and time
bad_index = np.arange(len(epochs_table))
epochs_table.index.set_levels(
levels=bad_index, level=defaults.TIME, inplace=True
)
epochs_table.index.set_codes(
codes=bad_index, level=defaults.TIME, inplace=True
)
# now time index is equal to row number in the table overall
with pytest.raises(FitGridError) as error:
Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
assert 'differs from previous snapshot' in str(error.value)
def test_multiple_indices_end_up_EPOCH_ID():
epochs_table, channels = fake_data._generate(
n_epochs=10,
n_samples=100,
n_categories=2,
n_channels=32,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
)
epochs_table.reset_index(inplace=True)
epochs_table.set_index(
[defaults.EPOCH_ID, defaults.TIME, 'categorical'], inplace=True
)
epochs = Epochs(
epochs_table,
time=defaults.TIME,
epoch_id=defaults.EPOCH_ID,
channels=channels,
)
# internal table has epoch_id in index
assert epochs.table.index.names == [defaults.EPOCH_ID]
# input table is not altered
assert epochs_table.index.names == [
defaults.EPOCH_ID,
defaults.TIME,
'categorical',
]
def test_smoke_plot_averages():
epochs = fake_data.generate()
epochs.plot_averages(channels=['channel0', 'channel1'])
def test_smoke_epochs_distances():
epochs = fake_data.generate()
epochs.distances() | 0.786418 | 0.447943 |
from __future__ import absolute_import, division, print_function
from polyaxon_client.api.base import BaseApiHandler
from polyaxon_client.exceptions import PolyaxonClientException
from polyaxon_client.schemas import JobConfig, JobStatusConfig
class BuildJobApi(BaseApiHandler):
"""
Api handler to get build jobs from the server.
"""
ENDPOINT = "/"
def get_build(self, username, project_name, job_id):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
try:
response = self.transport.get(request_url)
return self.prepare_results(response_json=response.json(), config=JobConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while retrieving build')
return None
def update_build(self, username, project_name, job_id, patch_dict, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
if background:
self.transport.async_patch(request_url, json_data=patch_dict)
return None
try:
response = self.transport.patch(request_url, json_data=patch_dict)
return self.prepare_results(response_json=response.json(), config=JobConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while updating build')
return None
def delete_build(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
if background:
self.transport.async_delete(request_url)
return None
try:
return self.transport.delete(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while deleting build')
return None
def get_statuses(self, username, project_name, job_id, page=1):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'statuses')
try:
response = self.transport.get(request_url, params=self.get_page(page=page))
return self.prepare_list_results(response.json(), page, JobStatusConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(
e=e, log_message='Error while retrieving build statuses')
return None
def stop(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'stop')
if background:
self.transport.async_post(request_url)
return None
try:
return self.transport.post(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while stopping build')
return None
def bookmark(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'bookmark')
if background:
self.transport.async_post(request_url)
return None
try:
return self.transport.post(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while bookmarking build')
return None
def unbookmark(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'unbookmark')
if background:
self.transport.async_delete(request_url)
return None
try:
return self.transport.delete(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while unbookmarking build')
return None
def resources(self, username, project_name, job_id, message_handler=None):
"""Streams builds resources using websockets.
message_handler: handles the messages received from server.
e.g. def f(x): print(x)
"""
request_url = self.build_url(self._get_ws_url(),
username,
project_name,
'builds',
job_id,
'resources')
self.transport.socket(request_url, message_handler=message_handler)
# pylint:disable=inconsistent-return-statements
def logs(self, username, project_name, job_id, stream=True, message_handler=None):
"""Streams builds logs using websockets.
message_handler: handles the messages received from server.
e.g. def f(x): print(x)
"""
if not stream:
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'logs')
try:
return self.transport.get(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while retrieving builds')
return []
request_url = self.build_url(self._get_ws_url(),
username,
project_name,
'builds',
job_id,
'logs')
self.transport.stream(request_url, message_handler=message_handler)
def get_heartbeat_url(self, username, project_name, job_id):
return self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
self.HEARTBEAT) | polyaxon_client/api/build_job.py | from __future__ import absolute_import, division, print_function
from polyaxon_client.api.base import BaseApiHandler
from polyaxon_client.exceptions import PolyaxonClientException
from polyaxon_client.schemas import JobConfig, JobStatusConfig
class BuildJobApi(BaseApiHandler):
"""
Api handler to get build jobs from the server.
"""
ENDPOINT = "/"
def get_build(self, username, project_name, job_id):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
try:
response = self.transport.get(request_url)
return self.prepare_results(response_json=response.json(), config=JobConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while retrieving build')
return None
def update_build(self, username, project_name, job_id, patch_dict, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
if background:
self.transport.async_patch(request_url, json_data=patch_dict)
return None
try:
response = self.transport.patch(request_url, json_data=patch_dict)
return self.prepare_results(response_json=response.json(), config=JobConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while updating build')
return None
def delete_build(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id)
if background:
self.transport.async_delete(request_url)
return None
try:
return self.transport.delete(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while deleting build')
return None
def get_statuses(self, username, project_name, job_id, page=1):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'statuses')
try:
response = self.transport.get(request_url, params=self.get_page(page=page))
return self.prepare_list_results(response.json(), page, JobStatusConfig)
except PolyaxonClientException as e:
self.transport.handle_exception(
e=e, log_message='Error while retrieving build statuses')
return None
def stop(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'stop')
if background:
self.transport.async_post(request_url)
return None
try:
return self.transport.post(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while stopping build')
return None
def bookmark(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'bookmark')
if background:
self.transport.async_post(request_url)
return None
try:
return self.transport.post(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while bookmarking build')
return None
def unbookmark(self, username, project_name, job_id, background=False):
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'unbookmark')
if background:
self.transport.async_delete(request_url)
return None
try:
return self.transport.delete(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while unbookmarking build')
return None
def resources(self, username, project_name, job_id, message_handler=None):
"""Streams builds resources using websockets.
message_handler: handles the messages received from server.
e.g. def f(x): print(x)
"""
request_url = self.build_url(self._get_ws_url(),
username,
project_name,
'builds',
job_id,
'resources')
self.transport.socket(request_url, message_handler=message_handler)
# pylint:disable=inconsistent-return-statements
def logs(self, username, project_name, job_id, stream=True, message_handler=None):
"""Streams builds logs using websockets.
message_handler: handles the messages received from server.
e.g. def f(x): print(x)
"""
if not stream:
request_url = self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
'logs')
try:
return self.transport.get(request_url)
except PolyaxonClientException as e:
self.transport.handle_exception(e=e, log_message='Error while retrieving builds')
return []
request_url = self.build_url(self._get_ws_url(),
username,
project_name,
'builds',
job_id,
'logs')
self.transport.stream(request_url, message_handler=message_handler)
def get_heartbeat_url(self, username, project_name, job_id):
return self.build_url(self._get_http_url(),
username,
project_name,
'builds',
job_id,
self.HEARTBEAT) | 0.513912 | 0.053999 |
from collections import Counter
from functools import partial
import numpy as np
import pandas as pd
from sklearn.metrics import roc_auc_score, roc_curve
from sklearn.utils.multiclass import unique_labels
from responsibly.fairness.metrics.utils import (
_assert_binary, _groupby_y_x_sens,
)
def _proportion(data, labels):
counts = Counter(data)
assert set(counts.keys()).issubset(labels)
return (counts[labels[1]]
/ (counts[labels[0]] + counts[labels[1]]))
def _get_labels(ys, labels):
if labels is None:
labels = unique_labels(ys)
else:
labels = np.asarray(labels)
if np.all([label not in ys for label in labels]):
raise ValueError('At least one label specified must be in y.')
return labels
def _normalize_by_attr(y_score, x_sens, ndigits=1):
y_score_within = y_score[:]
for indices in x_sens.groupby(x_sens).groups.values():
y_score_within[indices] = (y_score_within[indices]
.rank(pct=True))
y_score_within = (np.floor(y_score_within * (10**ndigits))
/ (10**ndigits))
return y_score_within
def independence_score(y_score, x_sens,
as_df=False):
"""Compute the independence criteria for score prediction.
In classification terminology, it is the **acceptance rate**
grouped by the score and the sensitive attribute.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame`(if ``True``).
:return: Independence criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
criterion = pd.crosstab(index=y_score,
columns=x_sens,
normalize='columns')
if not as_df:
criterion = criterion.to_dict()
return criterion
def separation_score(y_true, y_score, x_sens,
labels=None,
as_df=False):
"""Compute the separation criteria for score prediction.
In classification terminology, it is the **FPR** and **TPR**
grouped by the score and the sensitive attribute.
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame` (if ``True``).
:return: Separation criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
_assert_binary(y_true)
labels = _get_labels(y_score, labels)
criterion = pd.crosstab(index=y_score,
columns=[y_true, x_sens],
normalize=True)
if not as_df:
criterion = criterion.to_dict()
return criterion
def sufficiency_score(y_true, y_score, x_sens,
labels=None,
within_score_percentile=False,
as_df=False):
"""Compute the sufficiency criteria for score prediction.
In classification terminology, it is the **PPV** and the **NPV**
grouped by the score and the sensitive attribute.
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame` (if ``True``).
:return: Sufficiency criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
_assert_binary(y_true)
labels = _get_labels(y_true, labels)
if within_score_percentile:
y_score = _normalize_by_attr(y_score, x_sens,
within_score_percentile)
criterion = pd.crosstab(index=y_score,
columns=x_sens,
values=y_true,
aggfunc=partial(_proportion,
labels=labels))
if not as_df:
criterion = criterion.to_dict()
return criterion
def _all_equal(iterator):
iterator = iter(iterator)
try:
first = next(iterator)
except StopIteration:
return True
try:
return all(np.allclose(first, rest) for rest in iterator)
except ValueError:
return False
def roc_curve_by_attr(y_true, y_score, x_sens,
pos_label=None, sample_weight=None,
drop_intermediate=False):
"""Compute Receiver operating characteristic (ROC) by attribute.
Based on :func:`sklearn.metrics.roc_curve`
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param pos_label: Label considered as positive and others
are considered negative.
:param sample_weight: Sample weights.
:param drop_intermediate: Whether to drop some suboptimal
thresholds which would not appear on
a plotted ROC curve.
This is useful in order to create
lighter ROC curves.
:return: For each value of sensitive attribute:
- fpr - Increasing false positive rates such
that element i is the false positive rate
of predictions with score >= thresholds[i].
- fpr - Increasing true positive rates such
that element i is the true positive rate
of predictions with score >= thresholds[i].
- thresholds -
Decreasing thresholds on the decision function
used to compute fpr and tpr. thresholds[0] represents
no instances being predicted and is arbitrarily set
to max(y_score) + 1.
:rtype: dict
"""
grouped = _groupby_y_x_sens(y_true, y_score, x_sens)
# pylint: disable=too-many-function-args
roc_curves = {x_sens_value: roc_curve(group['y_true'],
group['y_score'],
pos_label, sample_weight,
drop_intermediate)
for x_sens_value, group in grouped}
if not _all_equal(thresholds
for _, _, thresholds in roc_curves.values()):
raise NotImplementedError('All the scores values should'
' appear for each sensitive'
' attribute value.'
' It will be implemented'
' in the future.'
' Please post your use-case in'
' https://github.com/ResponsiblyAI/responsibly/issues/15') # pylint: disable=line-too-long
return roc_curves
def roc_auc_score_by_attr(y_true, y_score, x_sens,
sample_weight=None):
"""Compute Area Under the ROC (AUC) by attribute.
Based on function:`sklearn.metrics.roc_auc_score`
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param sample_weight: Sample weights.
:return: ROC AUC grouped by the sensitive attribute.
:rtype: dict
"""
grouped = _groupby_y_x_sens(y_true, y_score, x_sens)
return {x_sens_value: roc_auc_score(group['y_true'],
group['y_score'],
sample_weight=sample_weight)
for x_sens_value, group in grouped} | responsibly/fairness/metrics/score.py | from collections import Counter
from functools import partial
import numpy as np
import pandas as pd
from sklearn.metrics import roc_auc_score, roc_curve
from sklearn.utils.multiclass import unique_labels
from responsibly.fairness.metrics.utils import (
_assert_binary, _groupby_y_x_sens,
)
def _proportion(data, labels):
counts = Counter(data)
assert set(counts.keys()).issubset(labels)
return (counts[labels[1]]
/ (counts[labels[0]] + counts[labels[1]]))
def _get_labels(ys, labels):
if labels is None:
labels = unique_labels(ys)
else:
labels = np.asarray(labels)
if np.all([label not in ys for label in labels]):
raise ValueError('At least one label specified must be in y.')
return labels
def _normalize_by_attr(y_score, x_sens, ndigits=1):
y_score_within = y_score[:]
for indices in x_sens.groupby(x_sens).groups.values():
y_score_within[indices] = (y_score_within[indices]
.rank(pct=True))
y_score_within = (np.floor(y_score_within * (10**ndigits))
/ (10**ndigits))
return y_score_within
def independence_score(y_score, x_sens,
as_df=False):
"""Compute the independence criteria for score prediction.
In classification terminology, it is the **acceptance rate**
grouped by the score and the sensitive attribute.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame`(if ``True``).
:return: Independence criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
criterion = pd.crosstab(index=y_score,
columns=x_sens,
normalize='columns')
if not as_df:
criterion = criterion.to_dict()
return criterion
def separation_score(y_true, y_score, x_sens,
labels=None,
as_df=False):
"""Compute the separation criteria for score prediction.
In classification terminology, it is the **FPR** and **TPR**
grouped by the score and the sensitive attribute.
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame` (if ``True``).
:return: Separation criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
_assert_binary(y_true)
labels = _get_labels(y_score, labels)
criterion = pd.crosstab(index=y_score,
columns=[y_true, x_sens],
normalize=True)
if not as_df:
criterion = criterion.to_dict()
return criterion
def sufficiency_score(y_true, y_score, x_sens,
labels=None,
within_score_percentile=False,
as_df=False):
"""Compute the sufficiency criteria for score prediction.
In classification terminology, it is the **PPV** and the **NPV**
grouped by the score and the sensitive attribute.
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
target.
:param as_df: Whether to return the results as ``dict`` (if ``False``)
or as :class:`pandas.DataFrame` (if ``True``).
:return: Sufficiency criteria.
:rtype: dict or :class:`pandas.DataFrame`
"""
_assert_binary(y_true)
labels = _get_labels(y_true, labels)
if within_score_percentile:
y_score = _normalize_by_attr(y_score, x_sens,
within_score_percentile)
criterion = pd.crosstab(index=y_score,
columns=x_sens,
values=y_true,
aggfunc=partial(_proportion,
labels=labels))
if not as_df:
criterion = criterion.to_dict()
return criterion
def _all_equal(iterator):
iterator = iter(iterator)
try:
first = next(iterator)
except StopIteration:
return True
try:
return all(np.allclose(first, rest) for rest in iterator)
except ValueError:
return False
def roc_curve_by_attr(y_true, y_score, x_sens,
pos_label=None, sample_weight=None,
drop_intermediate=False):
"""Compute Receiver operating characteristic (ROC) by attribute.
Based on :func:`sklearn.metrics.roc_curve`
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param pos_label: Label considered as positive and others
are considered negative.
:param sample_weight: Sample weights.
:param drop_intermediate: Whether to drop some suboptimal
thresholds which would not appear on
a plotted ROC curve.
This is useful in order to create
lighter ROC curves.
:return: For each value of sensitive attribute:
- fpr - Increasing false positive rates such
that element i is the false positive rate
of predictions with score >= thresholds[i].
- fpr - Increasing true positive rates such
that element i is the true positive rate
of predictions with score >= thresholds[i].
- thresholds -
Decreasing thresholds on the decision function
used to compute fpr and tpr. thresholds[0] represents
no instances being predicted and is arbitrarily set
to max(y_score) + 1.
:rtype: dict
"""
grouped = _groupby_y_x_sens(y_true, y_score, x_sens)
# pylint: disable=too-many-function-args
roc_curves = {x_sens_value: roc_curve(group['y_true'],
group['y_score'],
pos_label, sample_weight,
drop_intermediate)
for x_sens_value, group in grouped}
if not _all_equal(thresholds
for _, _, thresholds in roc_curves.values()):
raise NotImplementedError('All the scores values should'
' appear for each sensitive'
' attribute value.'
' It will be implemented'
' in the future.'
' Please post your use-case in'
' https://github.com/ResponsiblyAI/responsibly/issues/15') # pylint: disable=line-too-long
return roc_curves
def roc_auc_score_by_attr(y_true, y_score, x_sens,
sample_weight=None):
"""Compute Area Under the ROC (AUC) by attribute.
Based on function:`sklearn.metrics.roc_auc_score`
:param y_true: Binary ground truth (correct) target values.
:param y_score: Estimated target score as returned by a classifier.
:param x_sens: Sensitive attribute values corresponded to each
estimated target.
:param sample_weight: Sample weights.
:return: ROC AUC grouped by the sensitive attribute.
:rtype: dict
"""
grouped = _groupby_y_x_sens(y_true, y_score, x_sens)
return {x_sens_value: roc_auc_score(group['y_true'],
group['y_score'],
sample_weight=sample_weight)
for x_sens_value, group in grouped} | 0.939359 | 0.612136 |
from __future__ import print_function
from __future__ import division
import sys
import os
import subprocess
import time
import datetime
import signal
def actuallyprint(msg):
print(msg)
sys.stdout.flush()
def main():
seed=None
output=None
print("arguments", sys.argv)
for arg in sys.argv[1:]:
if arg.startswith("seed="):
seed = arg[len("seed=") : ]
elif arg.startswith("output="):
output = arg[len("output=") : ]
else:
assert False, "unrecognized argument: " + arg
value_updates = []
for (name, value) in value_updates:
print("setting " + name + " to " + value)
# splice_value_into_bundle(name, value)
actuallyprint("Building executable...")
sys.stdout.flush()
dafny_cmd = ".dafny/dafny/Binaries/dafny /noVerify /spillTargetCode:3 /countVerificationErrors:0 /compileTarget:cpp lib/DataStructures/MutableMapImpl.i.dfy framework/NativeArithmetic.h framework/NativeArrays.h framework/LinearCongruentialGenerator.h"
actuallyprint(dafny_cmd)
ret = os.system(dafny_cmd)
assert ret == 0
dafny_cmd_2 = ".dafny/dafny/Binaries/dafny /noVerify /spillTargetCode:3 /countVerificationErrors:0 /compileTarget:cpp bench/MutableMap.dfy framework/NativeArithmetic.h framework/NativeArrays.h framework/LinearCongruentialGenerator.h"
actuallyprint(dafny_cmd_2)
ret = os.system(dafny_cmd_2)
assert ret == 0
cmd = "g++ -O3 bench/bench/MutableMap.cpp bench/run-mutable-map.cpp framework/NativeArithmetic.cpp -o MutableMapBench -I .dafny/dafny/Binaries/ -I lib/DataStructures/ -I bench -Ilib -std=c++17 -I."
actuallyprint(cmd)
ret = os.system(cmd)
assert ret == 0
# bitmask indicating which CPUs we can use
# See https://linux.die.net/man/1/taskset
taskset_cmd = "taskset 4 "
with open(output, 'w') as f:
f.write("METADATA map perf comparison\n")
f.write("METADATA seed {}\n".format(seed))
command = taskset_cmd + "./MutableMapBench {} false".format(str(seed))
actuallyprint(command)
sys.stdout.flush()
result = subprocess.run(command, shell=True, preexec_fn=os.setsid,
universal_newlines=True, stdout=subprocess.PIPE)
f.write(result.stdout)
f.flush()
actuallyprint("done")
if __name__ == "__main__":
main() | tools/run-map-config-experiment.py |
from __future__ import print_function
from __future__ import division
import sys
import os
import subprocess
import time
import datetime
import signal
def actuallyprint(msg):
print(msg)
sys.stdout.flush()
def main():
seed=None
output=None
print("arguments", sys.argv)
for arg in sys.argv[1:]:
if arg.startswith("seed="):
seed = arg[len("seed=") : ]
elif arg.startswith("output="):
output = arg[len("output=") : ]
else:
assert False, "unrecognized argument: " + arg
value_updates = []
for (name, value) in value_updates:
print("setting " + name + " to " + value)
# splice_value_into_bundle(name, value)
actuallyprint("Building executable...")
sys.stdout.flush()
dafny_cmd = ".dafny/dafny/Binaries/dafny /noVerify /spillTargetCode:3 /countVerificationErrors:0 /compileTarget:cpp lib/DataStructures/MutableMapImpl.i.dfy framework/NativeArithmetic.h framework/NativeArrays.h framework/LinearCongruentialGenerator.h"
actuallyprint(dafny_cmd)
ret = os.system(dafny_cmd)
assert ret == 0
dafny_cmd_2 = ".dafny/dafny/Binaries/dafny /noVerify /spillTargetCode:3 /countVerificationErrors:0 /compileTarget:cpp bench/MutableMap.dfy framework/NativeArithmetic.h framework/NativeArrays.h framework/LinearCongruentialGenerator.h"
actuallyprint(dafny_cmd_2)
ret = os.system(dafny_cmd_2)
assert ret == 0
cmd = "g++ -O3 bench/bench/MutableMap.cpp bench/run-mutable-map.cpp framework/NativeArithmetic.cpp -o MutableMapBench -I .dafny/dafny/Binaries/ -I lib/DataStructures/ -I bench -Ilib -std=c++17 -I."
actuallyprint(cmd)
ret = os.system(cmd)
assert ret == 0
# bitmask indicating which CPUs we can use
# See https://linux.die.net/man/1/taskset
taskset_cmd = "taskset 4 "
with open(output, 'w') as f:
f.write("METADATA map perf comparison\n")
f.write("METADATA seed {}\n".format(seed))
command = taskset_cmd + "./MutableMapBench {} false".format(str(seed))
actuallyprint(command)
sys.stdout.flush()
result = subprocess.run(command, shell=True, preexec_fn=os.setsid,
universal_newlines=True, stdout=subprocess.PIPE)
f.write(result.stdout)
f.flush()
actuallyprint("done")
if __name__ == "__main__":
main() | 0.371821 | 0.09426 |
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ContactsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_contact(self, create_contact_parameters, **kwargs):
"""
Add Contact
Add a contact to your contact list. <br><br> Returns a contact object on success, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X POST -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_contact(create_contact_parameters, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateContactParameters create_contact_parameters: Request body (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_contact_with_http_info(create_contact_parameters, **kwargs)
else:
(data) = self.create_contact_with_http_info(create_contact_parameters, **kwargs)
return data
def create_contact_with_http_info(self, create_contact_parameters, **kwargs):
"""
Add Contact
Add a contact to your contact list. <br><br> Returns a contact object on success, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X POST -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_contact_with_http_info(create_contact_parameters, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateContactParameters create_contact_parameters: Request body (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_contact_parameters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_contact" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_contact_parameters' is set
if ('create_contact_parameters' not in params) or (params['create_contact_parameters'] is None):
raise ValueError("Missing the required parameter `create_contact_parameters` when calling `create_contact`")
resource_path = '/contact'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_contact_parameters' in params:
body_params = params['create_contact_parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_contact_by_id(self, contact_id, **kwargs):
"""
Delete Contact
Delete a contact from your contact list. <br><br> Returns the following if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X DELETE https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contact_by_id(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_contact_by_id_with_http_info(contact_id, **kwargs)
else:
(data) = self.delete_contact_by_id_with_http_info(contact_id, **kwargs)
return data
def delete_contact_by_id_with_http_info(self, contact_id, **kwargs):
"""
Delete Contact
Delete a contact from your contact list. <br><br> Returns the following if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X DELETE https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contact_by_id_with_http_info(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `delete_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contact_by_id(self, contact_id, **kwargs):
"""
Get Contact
Retrieve a contact to your contact list. <br><br> Returns a contact object if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contact_by_id(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contact_by_id_with_http_info(contact_id, **kwargs)
else:
(data) = self.get_contact_by_id_with_http_info(contact_id, **kwargs)
return data
def get_contact_by_id_with_http_info(self, contact_id, **kwargs):
"""
Get Contact
Retrieve a contact to your contact list. <br><br> Returns a contact object if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contact_by_id_with_http_info(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `get_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contacts(self, **kwargs):
"""
List Contacts
Retrieve a list of contacts. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str range: Range (ie \"records=201-300\") of contacts requested
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contacts_with_http_info(**kwargs)
else:
(data) = self.get_contacts_with_http_info(**kwargs)
return data
def get_contacts_with_http_info(self, **kwargs):
"""
List Contacts
Retrieve a list of contacts. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str range: Range (ie \"records=201-300\") of contacts requested
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['range']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contacts" % key
)
params[key] = val
del params['kwargs']
resource_path = '/contacts'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'range' in params:
header_params['Range'] = params['range']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contacts_by_group_id(self, group_id, **kwargs):
"""
List Contacts in Group
Retrieve a list of contacts in a contact group. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts/$GROUP_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_by_group_id(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: GroupId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contacts_by_group_id_with_http_info(group_id, **kwargs)
else:
(data) = self.get_contacts_by_group_id_with_http_info(group_id, **kwargs)
return data
def get_contacts_by_group_id_with_http_info(self, group_id, **kwargs):
"""
List Contacts in Group
Retrieve a list of contacts in a contact group. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts/$GROUP_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_by_group_id_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: GroupId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contacts_by_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_contacts_by_group_id`")
resource_path = '/contacts/{GroupId}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['GroupId'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_contact_by_id(self, update_contact_by_id_parameters, contact_id, **kwargs):
"""
Update Contact
Update an existing contact in your contact list. <br><br> Returns a contact object if a valid identifier was provided and input validation passed, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X PUT -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_contact_by_id(update_contact_by_id_parameters, contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateContactByIdParameters update_contact_by_id_parameters: Request body (required)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, **kwargs)
else:
(data) = self.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, **kwargs)
return data
def update_contact_by_id_with_http_info(self, update_contact_by_id_parameters, contact_id, **kwargs):
"""
Update Contact
Update an existing contact in your contact list. <br><br> Returns a contact object if a valid identifier was provided and input validation passed, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X PUT -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateContactByIdParameters update_contact_by_id_parameters: Request body (required)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['update_contact_by_id_parameters', 'contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'update_contact_by_id_parameters' is set
if ('update_contact_by_id_parameters' not in params) or (params['update_contact_by_id_parameters'] is None):
raise ValueError("Missing the required parameter `update_contact_by_id_parameters` when calling `update_contact_by_id`")
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `update_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_contact_by_id_parameters' in params:
body_params = params['update_contact_by_id_parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only')) | dialmycalls_client/apis/contacts_api.py | from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ContactsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_contact(self, create_contact_parameters, **kwargs):
"""
Add Contact
Add a contact to your contact list. <br><br> Returns a contact object on success, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X POST -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_contact(create_contact_parameters, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateContactParameters create_contact_parameters: Request body (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_contact_with_http_info(create_contact_parameters, **kwargs)
else:
(data) = self.create_contact_with_http_info(create_contact_parameters, **kwargs)
return data
def create_contact_with_http_info(self, create_contact_parameters, **kwargs):
"""
Add Contact
Add a contact to your contact list. <br><br> Returns a contact object on success, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X POST -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_contact_with_http_info(create_contact_parameters, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param CreateContactParameters create_contact_parameters: Request body (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['create_contact_parameters']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_contact" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'create_contact_parameters' is set
if ('create_contact_parameters' not in params) or (params['create_contact_parameters'] is None):
raise ValueError("Missing the required parameter `create_contact_parameters` when calling `create_contact`")
resource_path = '/contact'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'create_contact_parameters' in params:
body_params = params['create_contact_parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def delete_contact_by_id(self, contact_id, **kwargs):
"""
Delete Contact
Delete a contact from your contact list. <br><br> Returns the following if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X DELETE https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contact_by_id(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_contact_by_id_with_http_info(contact_id, **kwargs)
else:
(data) = self.delete_contact_by_id_with_http_info(contact_id, **kwargs)
return data
def delete_contact_by_id_with_http_info(self, contact_id, **kwargs):
"""
Delete Contact
Delete a contact from your contact list. <br><br> Returns the following if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X DELETE https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_contact_by_id_with_http_info(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `delete_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contact_by_id(self, contact_id, **kwargs):
"""
Get Contact
Retrieve a contact to your contact list. <br><br> Returns a contact object if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contact_by_id(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contact_by_id_with_http_info(contact_id, **kwargs)
else:
(data) = self.get_contact_by_id_with_http_info(contact_id, **kwargs)
return data
def get_contact_by_id_with_http_info(self, contact_id, **kwargs):
"""
Get Contact
Retrieve a contact to your contact list. <br><br> Returns a contact object if a valid identifier was provided, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contact_by_id_with_http_info(contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `get_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contacts(self, **kwargs):
"""
List Contacts
Retrieve a list of contacts. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str range: Range (ie \"records=201-300\") of contacts requested
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contacts_with_http_info(**kwargs)
else:
(data) = self.get_contacts_with_http_info(**kwargs)
return data
def get_contacts_with_http_info(self, **kwargs):
"""
List Contacts
Retrieve a list of contacts. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str range: Range (ie \"records=201-300\") of contacts requested
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['range']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contacts" % key
)
params[key] = val
del params['kwargs']
resource_path = '/contacts'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
if 'range' in params:
header_params['Range'] = params['range']
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def get_contacts_by_group_id(self, group_id, **kwargs):
"""
List Contacts in Group
Retrieve a list of contacts in a contact group. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts/$GROUP_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_by_group_id(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: GroupId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_contacts_by_group_id_with_http_info(group_id, **kwargs)
else:
(data) = self.get_contacts_by_group_id_with_http_info(group_id, **kwargs)
return data
def get_contacts_by_group_id_with_http_info(self, group_id, **kwargs):
"""
List Contacts in Group
Retrieve a list of contacts in a contact group. <br><br> Returns a list of contact objects. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X GET https://$API_KEY@api.dialmycalls.com/2.0/contacts/$GROUP_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_contacts_by_group_id_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: GroupId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_contacts_by_group_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_contacts_by_group_id`")
resource_path = '/contacts/{GroupId}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['GroupId'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'))
def update_contact_by_id(self, update_contact_by_id_parameters, contact_id, **kwargs):
"""
Update Contact
Update an existing contact in your contact list. <br><br> Returns a contact object if a valid identifier was provided and input validation passed, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X PUT -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_contact_by_id(update_contact_by_id_parameters, contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateContactByIdParameters update_contact_by_id_parameters: Request body (required)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, **kwargs)
else:
(data) = self.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, **kwargs)
return data
def update_contact_by_id_with_http_info(self, update_contact_by_id_parameters, contact_id, **kwargs):
"""
Update Contact
Update an existing contact in your contact list. <br><br> Returns a contact object if a valid identifier was provided and input validation passed, and returns an error otherwise. <br><br> ``` curl -i -H \"Content-Type: application/json\" -X PUT -d \"{\\\"phone\\\": \\\"5555555555\\\"}\" https://$API_KEY@api.dialmycalls.com/2.0/contact/$CONTACT_ID ```
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_contact_by_id_with_http_info(update_contact_by_id_parameters, contact_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param UpdateContactByIdParameters update_contact_by_id_parameters: Request body (required)
:param str contact_id: ContactId (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['update_contact_by_id_parameters', 'contact_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_contact_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'update_contact_by_id_parameters' is set
if ('update_contact_by_id_parameters' not in params) or (params['update_contact_by_id_parameters'] is None):
raise ValueError("Missing the required parameter `update_contact_by_id_parameters` when calling `update_contact_by_id`")
# verify the required parameter 'contact_id' is set
if ('contact_id' not in params) or (params['contact_id'] is None):
raise ValueError("Missing the required parameter `contact_id` when calling `update_contact_by_id`")
resource_path = '/contact/{ContactId}'.replace('{format}', 'json')
path_params = {}
if 'contact_id' in params:
path_params['ContactId'] = params['contact_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'update_contact_by_id_parameters' in params:
body_params = params['update_contact_by_id_parameters']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/xml'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/xml'])
# Authentication setting
auth_settings = ['api_key']
return self.api_client.call_api(resource_path, 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only')) | 0.644673 | 0.059374 |
import json
import glob
import os
import datetime
import dash
import dash_html_components as html
from dash.dependencies import Input, Output, State
from typegame.parsing.from_notebook import parse_notebook
from typegame.parsing.to_components import (
parse_leaderboard,
parse_question_list,
get_question_solution_layout,
)
from typegame.core.submission_class import Submission
from typing import List, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
from typegame.core.question_class import Question # noqa: F401
def decorate_app(
app: dash.Dash, quiz_path: str, answer_path: str, type_only: bool = True
) -> None:
@app.callback(
Output("leaderboard", "children"),
[Input("interval-component", "n_intervals")],
)
def update_leaderboard(_):
js_files = glob.glob(os.path.join(answer_path, "*.json"))
board_components = parse_leaderboard(
[Submission(**json.load(open(jf))) for jf in js_files]
)
return board_components
@app.callback(
Output("question_list", "children"), [Input("quiz_list", "value")]
)
def get_question_list_for_quiz(quiz_name):
if quiz_name is not None:
return parse_question_list(
parse_notebook(
os.path.join(quiz_path, f"{quiz_name}.ipynb"), type_only
)
)
@app.callback(
[
Output("output-state", "children"),
Output("warning-msg", "children"),
Output("question_list", "style"),
Output("form-submit", "style"),
],
[Input("submit-button", "n_clicks"), Input("quiz_list", "value")],
[State("name", "value"), State("question_list", "children")],
)
def evaluate_submission(n_clicks, quiz_name, name, answers):
if (n_clicks < 1) or (answers is None):
return [], [], {}, {}
question_list = parse_notebook(
os.path.join(quiz_path, f"{quiz_name}.ipynb"), type_only
)
parsed_answers = [
a["props"]["children"][2]["props"].get("value") for a in answers
]
return handle_submission(
question_list, parsed_answers, quiz_name, name, answer_path
)
def handle_submission(
question_list: List["Question"],
answers: list,
quiz_name: str,
name: str,
answer_path: str,
) -> Tuple:
correct_num = 0
solutions = []
missed = []
for idx, question in enumerate(question_list):
user_answer = answers[idx]
if user_answer is None:
missed.append(idx + 1)
else:
is_correct = question.answer == user_answer
correct_num += int(is_correct)
solutions.append(
get_question_solution_layout(
question, user_answer, idx, is_correct
)
)
if len(missed) > 0:
return (
[],
"You did not answer questions {}".format(
", ".join([str(m) for m in missed])
),
{},
{},
)
json.dump(
Submission(
name=name,
answers=answers,
correct_answers=correct_num,
quiz_name=quiz_name,
).dict(),
open(
os.path.join(
answer_path, f"{name}-{datetime.datetime.now()}.json"
),
"w",
),
)
header = html.H2("{}/{}".format(correct_num, len(solutions)))
return (
html.Div([header, *solutions]),
"",
{"display": "none"},
{"display": "none"},
) | typegame/core/callbacks.py | import json
import glob
import os
import datetime
import dash
import dash_html_components as html
from dash.dependencies import Input, Output, State
from typegame.parsing.from_notebook import parse_notebook
from typegame.parsing.to_components import (
parse_leaderboard,
parse_question_list,
get_question_solution_layout,
)
from typegame.core.submission_class import Submission
from typing import List, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
from typegame.core.question_class import Question # noqa: F401
def decorate_app(
app: dash.Dash, quiz_path: str, answer_path: str, type_only: bool = True
) -> None:
@app.callback(
Output("leaderboard", "children"),
[Input("interval-component", "n_intervals")],
)
def update_leaderboard(_):
js_files = glob.glob(os.path.join(answer_path, "*.json"))
board_components = parse_leaderboard(
[Submission(**json.load(open(jf))) for jf in js_files]
)
return board_components
@app.callback(
Output("question_list", "children"), [Input("quiz_list", "value")]
)
def get_question_list_for_quiz(quiz_name):
if quiz_name is not None:
return parse_question_list(
parse_notebook(
os.path.join(quiz_path, f"{quiz_name}.ipynb"), type_only
)
)
@app.callback(
[
Output("output-state", "children"),
Output("warning-msg", "children"),
Output("question_list", "style"),
Output("form-submit", "style"),
],
[Input("submit-button", "n_clicks"), Input("quiz_list", "value")],
[State("name", "value"), State("question_list", "children")],
)
def evaluate_submission(n_clicks, quiz_name, name, answers):
if (n_clicks < 1) or (answers is None):
return [], [], {}, {}
question_list = parse_notebook(
os.path.join(quiz_path, f"{quiz_name}.ipynb"), type_only
)
parsed_answers = [
a["props"]["children"][2]["props"].get("value") for a in answers
]
return handle_submission(
question_list, parsed_answers, quiz_name, name, answer_path
)
def handle_submission(
question_list: List["Question"],
answers: list,
quiz_name: str,
name: str,
answer_path: str,
) -> Tuple:
correct_num = 0
solutions = []
missed = []
for idx, question in enumerate(question_list):
user_answer = answers[idx]
if user_answer is None:
missed.append(idx + 1)
else:
is_correct = question.answer == user_answer
correct_num += int(is_correct)
solutions.append(
get_question_solution_layout(
question, user_answer, idx, is_correct
)
)
if len(missed) > 0:
return (
[],
"You did not answer questions {}".format(
", ".join([str(m) for m in missed])
),
{},
{},
)
json.dump(
Submission(
name=name,
answers=answers,
correct_answers=correct_num,
quiz_name=quiz_name,
).dict(),
open(
os.path.join(
answer_path, f"{name}-{datetime.datetime.now()}.json"
),
"w",
),
)
header = html.H2("{}/{}".format(correct_num, len(solutions)))
return (
html.Div([header, *solutions]),
"",
{"display": "none"},
{"display": "none"},
) | 0.52902 | 0.168617 |
# Part 1: Run Qiime2 on Hardec
# Info to collect
# a. Library protocol, EMP or not? (515-806, multiple gg reference seq to choose for taxonomy)
# b. Reverse complement barcodes or not? (for demux)
# c. Read length (for trimming, dada2)
# d. Reverse complemented mapping barcodes? (for demux)
# e. Mapping files (prefix+map.txt)
# 1) run LeftJoinTablesOnFirstCol.py to merge mapping txt file with experiment design tables.
# a) save files needed to be merged into one folder, first column contains primary IDs for merging.
# b) run python script to merge file, when prompt enter folder path.
# c) check root folder for merged file.
# 34, treatment = NA, Holly lookedup in RedCap, changed NA to lifestyle.
# 2) check experiment design balanceness with TableStats.py, when prompt enter input file path, output csv file path and export pdf file path
# 3) add a group + sample name column, will be useful for graphing purpose.
# 4) https://docs.google.com/spreadsheets -> newSheet -> copy and paste content -> Addon-> Keemi -> validate Qiime2 meta : check validaty of the meta file
# Login to Hardec [hardac-login.genome.duke.edu]
## Part 2: Make folders with 00_initialize.sh:
newgrp omicscore
umask 007
# to make folder structures for analysis
# download from gitlab and sftp 00_initialization.sh to /data/omicscore/projectFolder
cd /data/omicscore/Rawls-Rawls-20190118
sh 00_initialization.sh
# a. script, script/log : all slurms store under script folder, output from script will be stored in log folder.
# b. rawData/ stores original fastq.gz files; data/"barcodes.fastq.gz", "forward.fastq.gz" and "reverse.fastq.gz" : data folder store raw data, exact filenames. folder should contain only those 3 files.
# c. meta/${PREFIX}map.txt : store mapping file, sample filter column can be used to filter samples downstream (e.g. separate samples from different projects)
# i. #SampleID, BarcodeSequence, LinkerPrimerSequence, condition/groups, platform (MiSeq150PE), libProtocol(EMP16s_515_806), seqOrder(#), samplefilter (to be added later to filter out samples)
# ii. #q2:types, numeric, categorical
# iii. Tab separated
# d. qza, qzv : qza will store all data file from Qiime2 methods; qzv folder will store all visualization file.
# e. export/: export-txt, export-fastq, export-nwk, export-biom
##Part 3A: Sftp sequencing data from dnaseqcore server
# go to data folder where the fastq files need to be saved.
# copy the raw data into folder from duke box
cd /data/omicscore/Rawls-Rawls-20190118/data/
# link R1, R2 and I1 to "../data/barcodes.fastq.gz", "forward.fastq.gz" and "reverse.fastq.gz"
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_R1_001.fastq.gz forward.fastq.gz
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_R2_001.fastq.gz reverse.fastq.gz
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_I1_001.fastq.gz barcodes.fastq.gz
##Part 3B: copy mapping file to meta file folder
sftp *map.txt /data/omicscore/projectFolder/meta/
##Part 4: lines to run at the beginning of each run
# Get files ready, data file, map file
newgrp omicscore
umask 007
# go to script folder
cd /data/omicscore/Rawls-Rawls-20190118/script
# load current version of qiime2
source /data/common/qiime2/miniconda/bin/activate qiime2-2018.11
# start sbatch runs
srun -p interactive --pty --mem 4096 /bin/bash
# define file paths
export WKPATH="/data/omicscore/Rawls-Rawls-20190118"
export PREFIX="Rawls-Rawls-20190118-R24_"
export REFPATH="/data/omicscore/Qiime/reference"
# run sbatch scripts
sbatch 01-02_pair_import-demux.slurm
# check 02_pair-demux.qzv
# 1) sample total # of reads
# 2) parameters for dada2, trim left and right for forward and revesre read
sbatch 03_pair_dada2.slurmA
sbatch 04_filter-taxa_silva.slurm
sbatch 05_featureTable.slurm
sbatch A06-09_algn-to-root_raxml.slurm
sbatch A10-11_core-metrics_alpha_raxml.slurm
# check txt files
# check visualization files
sbatch B11_taxonomy_silva.slurm
(qiime2-2018.11) [zwei@x2-01-1 reference]$ wget http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.fna.qza
--2019-03-15 11:11:15-- http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.fna.qza
Resolving kronos.pharmacology.dal.ca... 192.168.3.11
Connecting to kronos.pharmacology.dal.ca|192.168.3.11|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 4408600 (4.2M)
Saving to: âreference.fna.qzaâ
reference.fna.qza 100%[============================================================================================>] 4.20M 4.16MB/s in 1.0s
2019-03-15 11:11:16 (4.16 MB/s) - âreference.fna.qzaâ saved [4408600/4408600]
(qiime2-2018.11) [zwei@x2-01-1 reference]$ wget http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.tre.qza
--2019-03-15 11:11:36-- http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.tre.qza
Resolving kronos.pharmacology.dal.ca... 192.168.3.11
Connecting to kronos.pharmacology.dal.ca|192.168.3.11|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 191187 (187K)
Saving to: âreference.tre.qzaâ
reference.tre.qza 100%[============================================================================================>] 186.71K 773KB/s in 0.2s
2019-03-15 11:11:37 (773 KB/s) - âreference.tre.qzaâ saved [191187/191187]
(qiime2-2018.11) [zwei@x2-01-1 reference]$ qiime fragment-insertion sepp --i-representative-sequences $WKPATH/qza/${PREFIX}04_seq.fasta.qza --p-threads 1 --i-reference-alignment $REFPATH/reference.fna.qza --i-reference-phylogeny $REFPATH/reference.tre.qza --output-dir $WKPATH/placed_out_for_picrust
Saved Phylogeny[Rooted] to: /data/omicscore/Rawls-Rawls-20190118/placed_out_for_picrust/tree.qza
Saved Placements to: /data/omicscore/Rawls-Rawls-20190118/placed_out_for_picrust/placements.qza
(qiime2-2018.11) [zwei@x2-01-1 script]$ sbatch C12_picrust2_tempTest.slurm
https://github.com/picrust/picrust2/wiki/q2-picrust2-Tutorial
https://github.com/picrust/picrust2/wiki
https://picrust.github.io/picrust/tutorials/algorithm_description.html
[zwei@hardac-login omicscore]$ srun -p interactive --mem=64G --pty bash
[zwei@x2-01-1 omicscore]$ module load ddsclient
[zwei@x2-01-1 omicscore]$ ddsclient upload -p R24-16S-Results /data/omicscore/R24-16S-Results/
[zwei@x2-01-1 omicscore]$ ddsclient upload -p R24-16S /data/omicscore/R24-16S/
Uploading 0 projects, 0 folders, 1 file.
Progress: 100% - sending R24-16S-20190118__Microbiome_Data_Analysis_Report-ResulDone: 100%
Upload Report for Project: 'R24-16S-Results' 2019-03-26 20:26:16.309123
SENT FILENAME ID SIZE HASH
/data/omicscore/R24-16S-Results/20190118/R24-16S-20190118__Microbiome_Data_Analysis_Report-Results.docx 4ffb51ab-ae5d-4c98-9aca-27ba8dd97079 65086 0e916dd69ef8e8ccf27a8da1277f50e3
URL to view project: https://dataservice.duke.edu/#/project/dbd4269d-97da-44cf-875e-47f6e44a56b4 | 1_process_fastqs/RawlsRawls20190118_cmd_workflow_notes_EMP.py |
# Part 1: Run Qiime2 on Hardec
# Info to collect
# a. Library protocol, EMP or not? (515-806, multiple gg reference seq to choose for taxonomy)
# b. Reverse complement barcodes or not? (for demux)
# c. Read length (for trimming, dada2)
# d. Reverse complemented mapping barcodes? (for demux)
# e. Mapping files (prefix+map.txt)
# 1) run LeftJoinTablesOnFirstCol.py to merge mapping txt file with experiment design tables.
# a) save files needed to be merged into one folder, first column contains primary IDs for merging.
# b) run python script to merge file, when prompt enter folder path.
# c) check root folder for merged file.
# 34, treatment = NA, Holly lookedup in RedCap, changed NA to lifestyle.
# 2) check experiment design balanceness with TableStats.py, when prompt enter input file path, output csv file path and export pdf file path
# 3) add a group + sample name column, will be useful for graphing purpose.
# 4) https://docs.google.com/spreadsheets -> newSheet -> copy and paste content -> Addon-> Keemi -> validate Qiime2 meta : check validaty of the meta file
# Login to Hardec [hardac-login.genome.duke.edu]
## Part 2: Make folders with 00_initialize.sh:
newgrp omicscore
umask 007
# to make folder structures for analysis
# download from gitlab and sftp 00_initialization.sh to /data/omicscore/projectFolder
cd /data/omicscore/Rawls-Rawls-20190118
sh 00_initialization.sh
# a. script, script/log : all slurms store under script folder, output from script will be stored in log folder.
# b. rawData/ stores original fastq.gz files; data/"barcodes.fastq.gz", "forward.fastq.gz" and "reverse.fastq.gz" : data folder store raw data, exact filenames. folder should contain only those 3 files.
# c. meta/${PREFIX}map.txt : store mapping file, sample filter column can be used to filter samples downstream (e.g. separate samples from different projects)
# i. #SampleID, BarcodeSequence, LinkerPrimerSequence, condition/groups, platform (MiSeq150PE), libProtocol(EMP16s_515_806), seqOrder(#), samplefilter (to be added later to filter out samples)
# ii. #q2:types, numeric, categorical
# iii. Tab separated
# d. qza, qzv : qza will store all data file from Qiime2 methods; qzv folder will store all visualization file.
# e. export/: export-txt, export-fastq, export-nwk, export-biom
##Part 3A: Sftp sequencing data from dnaseqcore server
# go to data folder where the fastq files need to be saved.
# copy the raw data into folder from duke box
cd /data/omicscore/Rawls-Rawls-20190118/data/
# link R1, R2 and I1 to "../data/barcodes.fastq.gz", "forward.fastq.gz" and "reverse.fastq.gz"
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_R1_001.fastq.gz forward.fastq.gz
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_R2_001.fastq.gz reverse.fastq.gz
ln -s /data/omicscore/Rawls-Rawls-20190118/rawData/4851-P1_S1_L001_I1_001.fastq.gz barcodes.fastq.gz
##Part 3B: copy mapping file to meta file folder
sftp *map.txt /data/omicscore/projectFolder/meta/
##Part 4: lines to run at the beginning of each run
# Get files ready, data file, map file
newgrp omicscore
umask 007
# go to script folder
cd /data/omicscore/Rawls-Rawls-20190118/script
# load current version of qiime2
source /data/common/qiime2/miniconda/bin/activate qiime2-2018.11
# start sbatch runs
srun -p interactive --pty --mem 4096 /bin/bash
# define file paths
export WKPATH="/data/omicscore/Rawls-Rawls-20190118"
export PREFIX="Rawls-Rawls-20190118-R24_"
export REFPATH="/data/omicscore/Qiime/reference"
# run sbatch scripts
sbatch 01-02_pair_import-demux.slurm
# check 02_pair-demux.qzv
# 1) sample total # of reads
# 2) parameters for dada2, trim left and right for forward and revesre read
sbatch 03_pair_dada2.slurmA
sbatch 04_filter-taxa_silva.slurm
sbatch 05_featureTable.slurm
sbatch A06-09_algn-to-root_raxml.slurm
sbatch A10-11_core-metrics_alpha_raxml.slurm
# check txt files
# check visualization files
sbatch B11_taxonomy_silva.slurm
(qiime2-2018.11) [zwei@x2-01-1 reference]$ wget http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.fna.qza
--2019-03-15 11:11:15-- http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.fna.qza
Resolving kronos.pharmacology.dal.ca... 192.168.3.11
Connecting to kronos.pharmacology.dal.ca|192.168.3.11|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 4408600 (4.2M)
Saving to: âreference.fna.qzaâ
reference.fna.qza 100%[============================================================================================>] 4.20M 4.16MB/s in 1.0s
2019-03-15 11:11:16 (4.16 MB/s) - âreference.fna.qzaâ saved [4408600/4408600]
(qiime2-2018.11) [zwei@x2-01-1 reference]$ wget http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.tre.qza
--2019-03-15 11:11:36-- http://kronos.pharmacology.dal.ca/public_files/tutorial_datasets/picrust2_tutorial_files/reference.tre.qza
Resolving kronos.pharmacology.dal.ca... 192.168.3.11
Connecting to kronos.pharmacology.dal.ca|192.168.3.11|:80... connected.
HTTP request sent, awaiting response... 200 OK
Length: 191187 (187K)
Saving to: âreference.tre.qzaâ
reference.tre.qza 100%[============================================================================================>] 186.71K 773KB/s in 0.2s
2019-03-15 11:11:37 (773 KB/s) - âreference.tre.qzaâ saved [191187/191187]
(qiime2-2018.11) [zwei@x2-01-1 reference]$ qiime fragment-insertion sepp --i-representative-sequences $WKPATH/qza/${PREFIX}04_seq.fasta.qza --p-threads 1 --i-reference-alignment $REFPATH/reference.fna.qza --i-reference-phylogeny $REFPATH/reference.tre.qza --output-dir $WKPATH/placed_out_for_picrust
Saved Phylogeny[Rooted] to: /data/omicscore/Rawls-Rawls-20190118/placed_out_for_picrust/tree.qza
Saved Placements to: /data/omicscore/Rawls-Rawls-20190118/placed_out_for_picrust/placements.qza
(qiime2-2018.11) [zwei@x2-01-1 script]$ sbatch C12_picrust2_tempTest.slurm
https://github.com/picrust/picrust2/wiki/q2-picrust2-Tutorial
https://github.com/picrust/picrust2/wiki
https://picrust.github.io/picrust/tutorials/algorithm_description.html
[zwei@hardac-login omicscore]$ srun -p interactive --mem=64G --pty bash
[zwei@x2-01-1 omicscore]$ module load ddsclient
[zwei@x2-01-1 omicscore]$ ddsclient upload -p R24-16S-Results /data/omicscore/R24-16S-Results/
[zwei@x2-01-1 omicscore]$ ddsclient upload -p R24-16S /data/omicscore/R24-16S/
Uploading 0 projects, 0 folders, 1 file.
Progress: 100% - sending R24-16S-20190118__Microbiome_Data_Analysis_Report-ResulDone: 100%
Upload Report for Project: 'R24-16S-Results' 2019-03-26 20:26:16.309123
SENT FILENAME ID SIZE HASH
/data/omicscore/R24-16S-Results/20190118/R24-16S-20190118__Microbiome_Data_Analysis_Report-Results.docx 4ffb51ab-ae5d-4c98-9aca-27ba8dd97079 65086 0e916dd69ef8e8ccf27a8da1277f50e3
URL to view project: https://dataservice.duke.edu/#/project/dbd4269d-97da-44cf-875e-47f6e44a56b4 | 0.475605 | 0.26322 |
import mpmath as mp
import numpy as np
from .fsframe import FSFrame
from .lgfs import FreeLGEvenFSFrame, FreeLGOddFSFrame
"""FSFrame for Hermite-Gaussian beams.
This is based on FreeLGFSFrame.
"""
class HGFSFrame(FSFrame):
def __init__(self, k, w0, nx=0, ny=0):
self.wavenumber = k
self.k = k
self.w0 = w0
self.nx, self.ny = nx, ny
def tm_maclaurin(self, *args, **kwargs):
pass
def te_maclaurin(self, *args, **kwargs):
pass
def S(self, w):
nx, ny = self.nx, self.ny
def s_term(s):
return (-1) ** s * mp.binomial(nx, s) * mp.binomial(ny, w - s)
return sum(map(s_term, [s for s in range(w + 1)]))
def C(self, q):
nx, ny = self.nx, self.ny
delta = 2 if (2 * q) == (nx + ny) else 1
return (-1) ** (ny // 2) * mp.sqrt(
mp.gammaprod(
[q + 1, nx + ny - q + 1],
[nx + 1, ny + 1]
) / delta / mp.power(2, nx + ny + 1)
)
def B(self, q):
nx, ny = self.nx, self.ny
return self.C(q) * (
self.S(q) + (-1) ** nx
* self.S(nx + ny - q)
)
def lg_mode(self, p, l, even=True):
args = [self.k, self.w0]
kwargs = {"p": p, "l": l}
LGFrame = FreeLGEvenFSFrame if even else FreeLGOddFSFrame
return LGFrame(*args, **kwargs)
def bsc(self, n, m, mode="tm"):
if n < abs(m): return 0
nx, ny = self.nx, self.ny
if ((nx + ny + 1) % 2) != (m % 2) or abs(nx + ny + 1) < abs(m):
return 0
even = (ny % 2) == 0
lg = lambda p, l: self.lg_mode(p, l, even=even)
def term(q):
return self.B(q) * (lg(q, nx + ny - 2 * q).bsc(n, m, mode=mode))
return sum(map(term, [q for q in range(nx // 2 + ny // 2 + 1)]))
def make_field(self, *args, **kwargs):
nx, ny = self.nx, self.ny
max_m = nx + ny + 1
degrees = np.arange(-max_m, max_m + 1, 2)
return super().make_field(
*args, k=self.k, degrees=degrees, **kwargs
) | glmtech/fs/hgfs.py | import mpmath as mp
import numpy as np
from .fsframe import FSFrame
from .lgfs import FreeLGEvenFSFrame, FreeLGOddFSFrame
"""FSFrame for Hermite-Gaussian beams.
This is based on FreeLGFSFrame.
"""
class HGFSFrame(FSFrame):
def __init__(self, k, w0, nx=0, ny=0):
self.wavenumber = k
self.k = k
self.w0 = w0
self.nx, self.ny = nx, ny
def tm_maclaurin(self, *args, **kwargs):
pass
def te_maclaurin(self, *args, **kwargs):
pass
def S(self, w):
nx, ny = self.nx, self.ny
def s_term(s):
return (-1) ** s * mp.binomial(nx, s) * mp.binomial(ny, w - s)
return sum(map(s_term, [s for s in range(w + 1)]))
def C(self, q):
nx, ny = self.nx, self.ny
delta = 2 if (2 * q) == (nx + ny) else 1
return (-1) ** (ny // 2) * mp.sqrt(
mp.gammaprod(
[q + 1, nx + ny - q + 1],
[nx + 1, ny + 1]
) / delta / mp.power(2, nx + ny + 1)
)
def B(self, q):
nx, ny = self.nx, self.ny
return self.C(q) * (
self.S(q) + (-1) ** nx
* self.S(nx + ny - q)
)
def lg_mode(self, p, l, even=True):
args = [self.k, self.w0]
kwargs = {"p": p, "l": l}
LGFrame = FreeLGEvenFSFrame if even else FreeLGOddFSFrame
return LGFrame(*args, **kwargs)
def bsc(self, n, m, mode="tm"):
if n < abs(m): return 0
nx, ny = self.nx, self.ny
if ((nx + ny + 1) % 2) != (m % 2) or abs(nx + ny + 1) < abs(m):
return 0
even = (ny % 2) == 0
lg = lambda p, l: self.lg_mode(p, l, even=even)
def term(q):
return self.B(q) * (lg(q, nx + ny - 2 * q).bsc(n, m, mode=mode))
return sum(map(term, [q for q in range(nx // 2 + ny // 2 + 1)]))
def make_field(self, *args, **kwargs):
nx, ny = self.nx, self.ny
max_m = nx + ny + 1
degrees = np.arange(-max_m, max_m + 1, 2)
return super().make_field(
*args, k=self.k, degrees=degrees, **kwargs
) | 0.538498 | 0.218899 |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import AutoMinorLocator
from matplotlib.ticker import MultipleLocator
import importlib
import sys
import os
if not '../aux/' in sys.path: sys.path.append('../aux/')
import paths; importlib.reload(paths)
import spec; importlib.reload(spec)
import nessy; importlib.reload(nessy)
import auxsys; importlib.reload(auxsys)
import auxplt; importlib.reload(auxplt)
import phys; importlib.reload(phys)
from tqdm import tqdm
wvl3 = np.arange(600, 621, 1) * 10.0
opa3 = np.array([-4134, -3763, -3575, -3433, -3305, -3170, -3040, -2881, -2645, -956,
-4054, -3708, -3506, -3357, -3224, -3081, -2912, -2689, -2239, -179, 0.0])
opa3 = 10.0**(opa3 / 1000.0)
#wvl1, opa1 = nessy.read_lopa(paths.it0f + '/atlodf/old/fal/base', wvl1 = 6000., wvl2 = 6200)
wvl2, opa2 = np.loadtxt('600_620v2.dat', unpack = True)
#np.savez(paths.npz + 'unsorted_opac', w = wvl1, o = opa1)
opac = np.load(paths.npz + 'unsorted_opac.npz')
wvl1 = opac['w']
opa1 = opac['o']
n = np.loadtxt(paths.it0f + '/atlodf/old/fal/base/atm.inp', usecols = [3])
apm = 2.137995438028139e-024
opa2 *= n[54] * apm
opa3 *= n[54] * apm
wvl2 *= 10.0
#wvl2 = phys.vac_to_air(wvl2)
step = 100
xbin = [[i, i + step] for i in np.arange(6000, 6200, step)]
pdfs = ''
os.system('rm ' + paths.figdir + 'plt_opac/*.pdf')
for i in tqdm(range(len(xbin))):
name = str(xbin[i][0]) + '_' + str(xbin[i][1])
pdfs += name + '.pdf '
plt.close('all')
fig, ax = plt.subplots(nrows = 1, ncols = 1, figsize = (10, 10))
# ax.plot(wvl1, opa1[54, :], label = 'FIOSS', color = 'k', alpha = 0.5)
# ax.plot(wvl2, opa2, label = 'ATLAS', color = 'orange', alpha = 0.5)
idx1 = np.where((wvl1 >= xbin[i][0]) & (wvl1 <= xbin[i][1]))
idx2 = np.where((wvl2 >= xbin[i][0]) & (wvl2 <= xbin[i][1]))
wvl1s = wvl1[idx1]
wvl2s = wvl2[idx2]
opa1s = np.sort(opa1[54, idx1])
opa2s = np.sort(opa2[idx2])
xbina = [[i, i + 10] for i in np.arange(xbin[i][0], xbin[i][1], 10)]
wvlm = np.zeros(len(xbina) + 1)
opa1m = np.zeros(len(xbina) + 1)
opa2m = np.zeros(len(xbina) + 1)
for j in range(len(xbina)):
idxa1 = np.where((wvl1s >= xbina[j][0]) & (wvl1s <= xbina[j][1]))
idxa2 = np.where((wvl2s >= xbina[j][0]) & (wvl2s <= xbina[j][1]))
# wvlm[j] = (xbina[j][0] + xbina[j][1]) / 2.0
wvlm[j] = xbina[j][0]
opa1m[j] = np.mean(opa1s[0, idxa1])
opa2m[j] = np.mean(opa2s[idxa2])
if j == len(xbina) - 1:
wvlm[len(xbina)] = xbina[j][1]
# plt.step(wvlm, opa1m, where = 'post', color = 'k')
plt.step(wvlm, opa2m, where = 'post', color = 'r')
plt.step(wvl3, opa3, where = 'post', color = 'purple')
# plt.plot(wvlm, opa1m, color = 'k')
# plt.plot(wvlm, opa2m, color = 'r')
# ax.plot(wvl1s, opa1s[0, :], color = 'k')
# ax.plot(wvl2s, opa2s, color = 'r')
# opa1m = np.mean(opa1[54, idx1])
# opa2m = np.mean(opa2[idx2])
# ax.axhline(y = opa1m, color = 'k', linestyle = '--')
# ax.axhline(y = opa2m, color = 'r', linestyle = '--')
ax.set_xlim(xbin[i][0], xbin[i][1])
ax.set_xlabel('Wavelength, A')
ax.set_ylabel(r'Opacity, cm$^{-1}$')
ax.set_yscale('log')
ax.xaxis.set_major_locator(MultipleLocator(10))
# leg = ax.legend(framealpha = 1, loc = 2, handletextpad = 1, prop = {'size': 7.5}, bbox_to_anchor=(0, 1.08))
# for obj in leg.legendHandles: obj.set_linewidth(3.0)
auxplt.savepdf(name, paths.figdir + 'plt_opac/')
os.chdir(paths.figdir + 'plt_opac/')
os.system('pdftk ' + pdfs + ' output overall.pdf')
os.chdir(paths.mscdir) | msc/comp_unsorted_opac.py | import numpy as np
import matplotlib.pyplot as plt
from matplotlib.ticker import AutoMinorLocator
from matplotlib.ticker import MultipleLocator
import importlib
import sys
import os
if not '../aux/' in sys.path: sys.path.append('../aux/')
import paths; importlib.reload(paths)
import spec; importlib.reload(spec)
import nessy; importlib.reload(nessy)
import auxsys; importlib.reload(auxsys)
import auxplt; importlib.reload(auxplt)
import phys; importlib.reload(phys)
from tqdm import tqdm
wvl3 = np.arange(600, 621, 1) * 10.0
opa3 = np.array([-4134, -3763, -3575, -3433, -3305, -3170, -3040, -2881, -2645, -956,
-4054, -3708, -3506, -3357, -3224, -3081, -2912, -2689, -2239, -179, 0.0])
opa3 = 10.0**(opa3 / 1000.0)
#wvl1, opa1 = nessy.read_lopa(paths.it0f + '/atlodf/old/fal/base', wvl1 = 6000., wvl2 = 6200)
wvl2, opa2 = np.loadtxt('600_620v2.dat', unpack = True)
#np.savez(paths.npz + 'unsorted_opac', w = wvl1, o = opa1)
opac = np.load(paths.npz + 'unsorted_opac.npz')
wvl1 = opac['w']
opa1 = opac['o']
n = np.loadtxt(paths.it0f + '/atlodf/old/fal/base/atm.inp', usecols = [3])
apm = 2.137995438028139e-024
opa2 *= n[54] * apm
opa3 *= n[54] * apm
wvl2 *= 10.0
#wvl2 = phys.vac_to_air(wvl2)
step = 100
xbin = [[i, i + step] for i in np.arange(6000, 6200, step)]
pdfs = ''
os.system('rm ' + paths.figdir + 'plt_opac/*.pdf')
for i in tqdm(range(len(xbin))):
name = str(xbin[i][0]) + '_' + str(xbin[i][1])
pdfs += name + '.pdf '
plt.close('all')
fig, ax = plt.subplots(nrows = 1, ncols = 1, figsize = (10, 10))
# ax.plot(wvl1, opa1[54, :], label = 'FIOSS', color = 'k', alpha = 0.5)
# ax.plot(wvl2, opa2, label = 'ATLAS', color = 'orange', alpha = 0.5)
idx1 = np.where((wvl1 >= xbin[i][0]) & (wvl1 <= xbin[i][1]))
idx2 = np.where((wvl2 >= xbin[i][0]) & (wvl2 <= xbin[i][1]))
wvl1s = wvl1[idx1]
wvl2s = wvl2[idx2]
opa1s = np.sort(opa1[54, idx1])
opa2s = np.sort(opa2[idx2])
xbina = [[i, i + 10] for i in np.arange(xbin[i][0], xbin[i][1], 10)]
wvlm = np.zeros(len(xbina) + 1)
opa1m = np.zeros(len(xbina) + 1)
opa2m = np.zeros(len(xbina) + 1)
for j in range(len(xbina)):
idxa1 = np.where((wvl1s >= xbina[j][0]) & (wvl1s <= xbina[j][1]))
idxa2 = np.where((wvl2s >= xbina[j][0]) & (wvl2s <= xbina[j][1]))
# wvlm[j] = (xbina[j][0] + xbina[j][1]) / 2.0
wvlm[j] = xbina[j][0]
opa1m[j] = np.mean(opa1s[0, idxa1])
opa2m[j] = np.mean(opa2s[idxa2])
if j == len(xbina) - 1:
wvlm[len(xbina)] = xbina[j][1]
# plt.step(wvlm, opa1m, where = 'post', color = 'k')
plt.step(wvlm, opa2m, where = 'post', color = 'r')
plt.step(wvl3, opa3, where = 'post', color = 'purple')
# plt.plot(wvlm, opa1m, color = 'k')
# plt.plot(wvlm, opa2m, color = 'r')
# ax.plot(wvl1s, opa1s[0, :], color = 'k')
# ax.plot(wvl2s, opa2s, color = 'r')
# opa1m = np.mean(opa1[54, idx1])
# opa2m = np.mean(opa2[idx2])
# ax.axhline(y = opa1m, color = 'k', linestyle = '--')
# ax.axhline(y = opa2m, color = 'r', linestyle = '--')
ax.set_xlim(xbin[i][0], xbin[i][1])
ax.set_xlabel('Wavelength, A')
ax.set_ylabel(r'Opacity, cm$^{-1}$')
ax.set_yscale('log')
ax.xaxis.set_major_locator(MultipleLocator(10))
# leg = ax.legend(framealpha = 1, loc = 2, handletextpad = 1, prop = {'size': 7.5}, bbox_to_anchor=(0, 1.08))
# for obj in leg.legendHandles: obj.set_linewidth(3.0)
auxplt.savepdf(name, paths.figdir + 'plt_opac/')
os.chdir(paths.figdir + 'plt_opac/')
os.system('pdftk ' + pdfs + ' output overall.pdf')
os.chdir(paths.mscdir) | 0.189671 | 0.246443 |
import os
import numpy as np
from type.cell.function_cell_type import FunctionCellType
from type.cell.cell_type_pmf import CellTypePMF
from block_extractor.psl.features_v2 import Block2FeatV2
from utils.psl_utils import *
from block_extractor.block_extractor import BlockExtractor
from block_extractor.block_extractor_c2v import BlockExtractorC2V
from block_extractor.block_extractor_c2v_pretrain import BlockExtractorC2VPretrain
from type.block.simple_block import SimpleBlock
from type.block.block_type_pmf import BlockTypePMF
from type.block.function_block_type import FunctionBlockType
from typing import List
from reader.sheet import Sheet
import pandas as pd
class BlockExtractorPSLV2(BlockExtractor):
def __init__(self, model_file, config, beta=0.01, lmd=10):
if ("use_rnn" in config["block_extractor"]) and config["block_extractor"]["use_rnn"]:
self.c2v_model = BlockExtractorC2VPretrain(model_file, config)
else:
self.c2v_model = BlockExtractorC2V(model_file)
psl_name_file = config['block_extractor']['block_extractor_name']
self.psl_pred_file = config['block_extractor']['predicate_file']
self.psl_rule_file = config['block_extractor']['learned_rule_file']
self.psl_eval_data_path = config['block_extractor']['eval_path']
if not os.path.exists(self.psl_eval_data_path):
os.makedirs(self.psl_eval_data_path, exist_ok=True)
self.model = Model(psl_name_file)
self.feat = Block2FeatV2(beta, lmd, config['psl']['num_process'],
config['psl']['num_tree'])
def convert2cell(self, blocks, r, c):
pred = np.empty((r, c), dtype=CellTypePMF)
for block in blocks:
lx, ly = block.top_row, block.left_col
rx, ry = block.bottom_row, block.right_col
lab = block.block_type.get_best_type().str()
for i in range(lx, rx + 1):
for j in range(ly, ry + 1):
cell_class_dict = {
FunctionCellType.inverse_dict[lab]: 1.0
}
pred[i][j] = CellTypePMF(cell_class_dict)
return pred
def select_blks(self, labels, mr, mc):
blocks = []
for blk in labels.keys():
(lx, ly, rx, ry) = blk
typ = labels[blk][0]
temp = SimpleBlock(BlockTypePMF(
{FunctionBlockType.inverse_dict[typ]: 1.0}
),
ly, ry, lx, rx)
blocks.append(temp)
return blocks
def __read_label_df(self, df):
block_dic = {}
for (_, idx, li, lj, ri, rj, an, val) in df.itertuples(name=None):
if idx not in block_dic:
block_dic[idx] = {}
if (li, lj, ri, rj) not in block_dic[idx]:
block_dic[idx][(li, lj, ri, rj)] = (an, float(val))
elif block_dic[idx][(li, lj, ri, rj)][1] < float(val):
block_dic[idx][(li, lj, ri, rj)] = (an, float(val))
return block_dic
def __predict_wrapper(self, lab_pred, sheets):
label_dic = self.__read_label_df(lab_pred)
preds = []
for i in range(len(sheets)):
mr, mc = sheets[i].values.shape
pred = self.select_blks(label_dic[i], mr, mc)
preds.append(pred)
return preds
def generate_feats(self, sheets, tags, c2v_celltypes=None):
self.feat.write_feats(sheets, tags, None, c2v_celltypes,
self.psl_pred_file, self.psl_eval_data_path)
get_predicates(self.model, self.psl_pred_file)
add_data(self.model, self.psl_eval_data_path)
get_rules(self.model, self.psl_rule_file)
results = self.model.infer()
label_pred = results[self.model.get_predicate(self.feat.pred_name)]
return label_pred
def postprocessv2(self, blocks, sheet):
pos_dic = {}
for blk in blocks:
lx = blk.top_row
if lx not in pos_dic:
pos_dic[lx] = []
pos_dic[lx].append(blk)
new_dic = {}
new_pos_dic = {}
for lx in pos_dic:
blk_list = sorted(pos_dic[lx], key=lambda x:x.left_col)
temp_blk = None
new_dic[lx] = []
for i in range(len(blk_list)):
if temp_blk is None:
temp_blk = blk_list[i]
else:
assert temp_blk.right_col + 1 == blk_list[i].left_col
assert temp_blk.top_row == blk_list[i].top_row
assert temp_blk.bottom_row == blk_list[i].bottom_row
if temp_blk.block_type.get_best_type().str() == blk_list[i].block_type.get_best_type().str():
new_blk = SimpleBlock(
temp_blk.block_type,
temp_blk.get_left_col(), blk_list[i].get_right_col(),
temp_blk.get_top_row(), temp_blk.get_bottom_row())
temp_blk = new_blk
else:
new_dic[lx].append(temp_blk)
temp_blk = blk_list[i]
if temp_blk is not None:
new_dic[lx].append(temp_blk)
for k in new_dic.keys():
for blk in new_dic[k]:
if blk.left_col not in new_pos_dic:
new_pos_dic[blk.left_col] = []
new_pos_dic[blk.left_col].append(blk)
ret_list = []
for k in new_pos_dic:
blk_list = sorted(new_pos_dic[k], key=lambda x:x.top_row)
temp_blk = None
for i in range(len(blk_list)):
if temp_blk is None:
temp_blk = blk_list[i]
else:
if (temp_blk.bottom_row + 1 == blk_list[i].top_row) and (temp_blk.left_col == blk_list[i].left_col) and (temp_blk.right_col == blk_list[i].right_col) and (temp_blk.block_type.get_best_type().str() == blk_list[i].block_type.get_best_type().str()):
new_blk = SimpleBlock(
temp_blk.block_type,
temp_blk.get_left_col(), temp_blk.get_right_col(),
temp_blk.get_top_row(), blk_list[i].get_bottom_row())
temp_blk = new_blk
else:
ret_list.append(temp_blk)
temp_blk = blk_list[i]
if temp_blk is not None:
ret_list.append(temp_blk)
return ret_list
def extract_blocks(self, sheet: Sheet, tags: 'np.array[CellTypePMF]') -> List[SimpleBlock]:
c2v_tags = self.c2v_model.extract_blocks(sheet, None)
r, c = sheet.values.shape
c2v_celltypes = self.convert2cell(c2v_tags, r, c)
lab_pred = self.generate_feats([sheet], [tags], [c2v_celltypes])
all_blocks = self.__predict_wrapper(lab_pred, [sheet])
assert len(all_blocks) == 1
return self.postprocessv2(all_blocks[0], sheet)
def extract_blocks_all_tables(self, sheets, tags):
c2v_tags = self.c2v_model.extract_blocks_all_tables(sheets, None)
c2v_celltypes = [self.convert2cell(c2v_tags[i], sheets[i].values.shape[0],
sheets[i].values.shape[1]) for i in range(len(sheets))]
lab_pred = self.generate_feats(sheets, tags, c2v_celltypes)
all_blocks = self.__predict_wrapper(lab_pred, sheets)
new_blocks = []
for i, blocks in enumerate(all_blocks):
# post process: merging smaller blocks with the same functional type
new_blocks.append(self.postprocessv2(blocks, sheets[i]))
return new_blocks
def reset(self):
self.__init__() | block_extractor/block_extractor_psl_v2.py | import os
import numpy as np
from type.cell.function_cell_type import FunctionCellType
from type.cell.cell_type_pmf import CellTypePMF
from block_extractor.psl.features_v2 import Block2FeatV2
from utils.psl_utils import *
from block_extractor.block_extractor import BlockExtractor
from block_extractor.block_extractor_c2v import BlockExtractorC2V
from block_extractor.block_extractor_c2v_pretrain import BlockExtractorC2VPretrain
from type.block.simple_block import SimpleBlock
from type.block.block_type_pmf import BlockTypePMF
from type.block.function_block_type import FunctionBlockType
from typing import List
from reader.sheet import Sheet
import pandas as pd
class BlockExtractorPSLV2(BlockExtractor):
def __init__(self, model_file, config, beta=0.01, lmd=10):
if ("use_rnn" in config["block_extractor"]) and config["block_extractor"]["use_rnn"]:
self.c2v_model = BlockExtractorC2VPretrain(model_file, config)
else:
self.c2v_model = BlockExtractorC2V(model_file)
psl_name_file = config['block_extractor']['block_extractor_name']
self.psl_pred_file = config['block_extractor']['predicate_file']
self.psl_rule_file = config['block_extractor']['learned_rule_file']
self.psl_eval_data_path = config['block_extractor']['eval_path']
if not os.path.exists(self.psl_eval_data_path):
os.makedirs(self.psl_eval_data_path, exist_ok=True)
self.model = Model(psl_name_file)
self.feat = Block2FeatV2(beta, lmd, config['psl']['num_process'],
config['psl']['num_tree'])
def convert2cell(self, blocks, r, c):
pred = np.empty((r, c), dtype=CellTypePMF)
for block in blocks:
lx, ly = block.top_row, block.left_col
rx, ry = block.bottom_row, block.right_col
lab = block.block_type.get_best_type().str()
for i in range(lx, rx + 1):
for j in range(ly, ry + 1):
cell_class_dict = {
FunctionCellType.inverse_dict[lab]: 1.0
}
pred[i][j] = CellTypePMF(cell_class_dict)
return pred
def select_blks(self, labels, mr, mc):
blocks = []
for blk in labels.keys():
(lx, ly, rx, ry) = blk
typ = labels[blk][0]
temp = SimpleBlock(BlockTypePMF(
{FunctionBlockType.inverse_dict[typ]: 1.0}
),
ly, ry, lx, rx)
blocks.append(temp)
return blocks
def __read_label_df(self, df):
block_dic = {}
for (_, idx, li, lj, ri, rj, an, val) in df.itertuples(name=None):
if idx not in block_dic:
block_dic[idx] = {}
if (li, lj, ri, rj) not in block_dic[idx]:
block_dic[idx][(li, lj, ri, rj)] = (an, float(val))
elif block_dic[idx][(li, lj, ri, rj)][1] < float(val):
block_dic[idx][(li, lj, ri, rj)] = (an, float(val))
return block_dic
def __predict_wrapper(self, lab_pred, sheets):
label_dic = self.__read_label_df(lab_pred)
preds = []
for i in range(len(sheets)):
mr, mc = sheets[i].values.shape
pred = self.select_blks(label_dic[i], mr, mc)
preds.append(pred)
return preds
def generate_feats(self, sheets, tags, c2v_celltypes=None):
self.feat.write_feats(sheets, tags, None, c2v_celltypes,
self.psl_pred_file, self.psl_eval_data_path)
get_predicates(self.model, self.psl_pred_file)
add_data(self.model, self.psl_eval_data_path)
get_rules(self.model, self.psl_rule_file)
results = self.model.infer()
label_pred = results[self.model.get_predicate(self.feat.pred_name)]
return label_pred
def postprocessv2(self, blocks, sheet):
pos_dic = {}
for blk in blocks:
lx = blk.top_row
if lx not in pos_dic:
pos_dic[lx] = []
pos_dic[lx].append(blk)
new_dic = {}
new_pos_dic = {}
for lx in pos_dic:
blk_list = sorted(pos_dic[lx], key=lambda x:x.left_col)
temp_blk = None
new_dic[lx] = []
for i in range(len(blk_list)):
if temp_blk is None:
temp_blk = blk_list[i]
else:
assert temp_blk.right_col + 1 == blk_list[i].left_col
assert temp_blk.top_row == blk_list[i].top_row
assert temp_blk.bottom_row == blk_list[i].bottom_row
if temp_blk.block_type.get_best_type().str() == blk_list[i].block_type.get_best_type().str():
new_blk = SimpleBlock(
temp_blk.block_type,
temp_blk.get_left_col(), blk_list[i].get_right_col(),
temp_blk.get_top_row(), temp_blk.get_bottom_row())
temp_blk = new_blk
else:
new_dic[lx].append(temp_blk)
temp_blk = blk_list[i]
if temp_blk is not None:
new_dic[lx].append(temp_blk)
for k in new_dic.keys():
for blk in new_dic[k]:
if blk.left_col not in new_pos_dic:
new_pos_dic[blk.left_col] = []
new_pos_dic[blk.left_col].append(blk)
ret_list = []
for k in new_pos_dic:
blk_list = sorted(new_pos_dic[k], key=lambda x:x.top_row)
temp_blk = None
for i in range(len(blk_list)):
if temp_blk is None:
temp_blk = blk_list[i]
else:
if (temp_blk.bottom_row + 1 == blk_list[i].top_row) and (temp_blk.left_col == blk_list[i].left_col) and (temp_blk.right_col == blk_list[i].right_col) and (temp_blk.block_type.get_best_type().str() == blk_list[i].block_type.get_best_type().str()):
new_blk = SimpleBlock(
temp_blk.block_type,
temp_blk.get_left_col(), temp_blk.get_right_col(),
temp_blk.get_top_row(), blk_list[i].get_bottom_row())
temp_blk = new_blk
else:
ret_list.append(temp_blk)
temp_blk = blk_list[i]
if temp_blk is not None:
ret_list.append(temp_blk)
return ret_list
def extract_blocks(self, sheet: Sheet, tags: 'np.array[CellTypePMF]') -> List[SimpleBlock]:
c2v_tags = self.c2v_model.extract_blocks(sheet, None)
r, c = sheet.values.shape
c2v_celltypes = self.convert2cell(c2v_tags, r, c)
lab_pred = self.generate_feats([sheet], [tags], [c2v_celltypes])
all_blocks = self.__predict_wrapper(lab_pred, [sheet])
assert len(all_blocks) == 1
return self.postprocessv2(all_blocks[0], sheet)
def extract_blocks_all_tables(self, sheets, tags):
c2v_tags = self.c2v_model.extract_blocks_all_tables(sheets, None)
c2v_celltypes = [self.convert2cell(c2v_tags[i], sheets[i].values.shape[0],
sheets[i].values.shape[1]) for i in range(len(sheets))]
lab_pred = self.generate_feats(sheets, tags, c2v_celltypes)
all_blocks = self.__predict_wrapper(lab_pred, sheets)
new_blocks = []
for i, blocks in enumerate(all_blocks):
# post process: merging smaller blocks with the same functional type
new_blocks.append(self.postprocessv2(blocks, sheets[i]))
return new_blocks
def reset(self):
self.__init__() | 0.419291 | 0.241859 |
import string
import subprocess
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
community = 'public'
file = open('ips.txt', 'r')
for line in file:
constructor = line.split(":")[0]
ip = line.split(":")[1]
ip = ip.split("\n")[0]
if constructor == "F5":
bashCommand = "snmpget -O q -v2c -c "+community+" "+ip+" 1.3.6.1.4.1.3375.2.1.14.3.1.0"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "":
stat = stat.split(" ")[1]
stat = stat.split("\n")[0]
if stat is '4':
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '3':
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
elif constructor == "Radware":
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" .1.3.6.1.4.1.1872.2.6.1.2.1.3.1.2"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
if stat != "":
result = process.communicate()[0]
result = result.split("\n")
result.pop()
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" .1.3.6.1.4.1.1872.2.6.1.2.1.3.1.4"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
result2 = process.communicate()[0]
result2 = result2.split("\n")
result2.pop()
i=0
for vADC in result:
stat = result2[i].split(" ")[1]
vADC = vADC.split('"')[1]
if stat is '5':
print constructor+" : "+ vADC + " => " + bcolors.FAIL + "Offline" + bcolors.ENDC
elif stat is '6':
print constructor+" : "+ vADC + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '7':
print constructor+" : "+ vADC+" => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : "+ vADC+" => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
i+=1
elif constructor == "Juniper":
bashCommand = "snmpgetnext -O q -v2c -c "+community+" "+ip+" 1.3.6.1.4.1.2636.3.1.14.1"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "":
if "3.6.1.4.1.2636.3.1.14.1" in stat.split(" ")[0]:
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
elif constructor == "Fortinet":
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" 1.3.6.1.2.1.31.1.1.1.1"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
result = process.communicate()[0]
result = result.split("\n")
result2.pop()
if result != "":
interface = "notfound"
for line in result:
if '"port1"' in line:
line=line.split(" ")[0]
interface=line.split(".")[-1]
break
if interface is not "notfound":
bashCommand = "snmpget -O q -v2c -c "+community+" "+ip+" 1.3.6.1.2.1.31.1.1.1.16."+interface
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "" and "3.6.1.2.1.31.1.1.1.16" in stat.split(" ")[0]:
stat = stat.split(" ")[1]
stat = stat.split("\n")[0]
if stat is '1':
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '2':
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
else:
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Interface port1 Not found" + bcolors.ENDC | ActiveStandbyChecker/activestandbychecker.py | import string
import subprocess
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
community = 'public'
file = open('ips.txt', 'r')
for line in file:
constructor = line.split(":")[0]
ip = line.split(":")[1]
ip = ip.split("\n")[0]
if constructor == "F5":
bashCommand = "snmpget -O q -v2c -c "+community+" "+ip+" 1.3.6.1.4.1.3375.2.1.14.3.1.0"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "":
stat = stat.split(" ")[1]
stat = stat.split("\n")[0]
if stat is '4':
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '3':
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
elif constructor == "Radware":
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" .1.3.6.1.4.1.1872.2.6.1.2.1.3.1.2"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
if stat != "":
result = process.communicate()[0]
result = result.split("\n")
result.pop()
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" .1.3.6.1.4.1.1872.2.6.1.2.1.3.1.4"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
result2 = process.communicate()[0]
result2 = result2.split("\n")
result2.pop()
i=0
for vADC in result:
stat = result2[i].split(" ")[1]
vADC = vADC.split('"')[1]
if stat is '5':
print constructor+" : "+ vADC + " => " + bcolors.FAIL + "Offline" + bcolors.ENDC
elif stat is '6':
print constructor+" : "+ vADC + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '7':
print constructor+" : "+ vADC+" => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : "+ vADC+" => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
i+=1
elif constructor == "Juniper":
bashCommand = "snmpgetnext -O q -v2c -c "+community+" "+ip+" 1.3.6.1.4.1.2636.3.1.14.1"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "":
if "3.6.1.4.1.2636.3.1.14.1" in stat.split(" ")[0]:
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
elif constructor == "Fortinet":
bashCommand = "snmpwalk -O q -v2c -c "+community+" "+ip+" 1.3.6.1.2.1.31.1.1.1.1"
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
result = process.communicate()[0]
result = result.split("\n")
result2.pop()
if result != "":
interface = "notfound"
for line in result:
if '"port1"' in line:
line=line.split(" ")[0]
interface=line.split(".")[-1]
break
if interface is not "notfound":
bashCommand = "snmpget -O q -v2c -c "+community+" "+ip+" 1.3.6.1.2.1.31.1.1.1.16."+interface
process = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
stat = process.communicate()[0]
if stat != "" and "3.6.1.2.1.31.1.1.1.16" in stat.split(" ")[0]:
stat = stat.split(" ")[1]
stat = stat.split("\n")[0]
if stat is '1':
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Active" + bcolors.ENDC
elif stat is '2':
print constructor+" : " + ip + " => " + bcolors.WARNING + "Standby" + bcolors.ENDC
else:
print constructor+" : " + ip + " => " + bcolors.HEADER + "Unknown" + bcolors.ENDC
else:
print constructor + " : " + ip + " => " + bcolors.OKGREEN + "Interface port1 Not found" + bcolors.ENDC | 0.039435 | 0.059319 |
from tweepy import Stream, OAuthHandler
from tweepy.streaming import StreamListener
import pymysql
import threading
import time
from datetime import datetime
import json
from twitter_credenciais import User_password
from interface import interface_terminal as terminal
conn = pymysql.connect("localhost","root","", "tw")
c = conn.cursor()
class listener(StreamListener):
def __init__(self, _futuro):
self.futuro = _futuro
self.contador = 0
def on_data(self, data):
all_data = json.loads(data)
if((json.dumps(all_data).startswith('{"limit":')==False)):
self.contador += 1
id_tweet = all_data["id"]
source = all_data["source"]
user_id = all_data["user"]["id"]
username = all_data["user"]["screen_name"]
user_url = all_data["user"]["url"]
user_description = all_data["user"]["description"]
user_local = all_data["user"]["location"]
date_tweet = all_data["created_at"]
if(all_data["geo"] != None):
geo = json.dumps(all_data["geo"])
elif(all_data["geo"] == None):
geo = all_data["geo"]
if(all_data["coordinates"] != None):
coordinates = json.dumps(all_data["coordinates"])
elif(all_data["coordinates"] == None):
coordinates = all_data["coordinates"]
tweet = all_data["text"]
if(all_data["place"] != None):
place = json.dumps(all_data["place"])
elif(all_data["place"] == None):
place = all_data["place"]
c.execute("INSERT INTO tweet_tb (id_tweet, source, user_id, username, user_url, user_description, user_local, date_tweet, geo, coordinates, tweet, place) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(id_tweet, source, user_id, username, user_url, user_description, user_local, date_tweet, geo, coordinates, tweet, place))
conn.commit()
print(username,tweet)
if time.time() > self.futuro:
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
return False
else:
return True
elif(json.dumps(all_data).startswith('{"limit":')):
print('\n'+'*' * 30 + ' Dict Json ' + '*' * 30 + '\n')
def on_error(self, status):
if status == 420:
# Retorna Falso quando on_data exceder o limite da API
print (status)
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
return False
print (status)
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
class Extractor():
def __init__(self, tempo_segundos):
terminal.Mensagem('Iniciando em Extrator','d')
inicio = time.time()
futuro = (inicio + tempo_segundos)
t = threading.Timer(3.0, self.Run(futuro))
t.setName('Thread-Extractor')
t.start()
if True:
terminal.Mensagem('Cancelando a Thread....', 'w')
t.cancel()
fim = time.time()
duracao = fim - inicio
strfim = time.strftime("\nFim: %A, %d %b %Y %H:%M:%S +0000", time.localtime(fim))
strinicio = time.strftime("\nInício: %A, %d %b %Y %H:%M:%S +0000", time.localtime(inicio))
texto = '%s Cancelada!%s%s\nDuração: %s' % (str(t.getName()), strinicio, strfim, duracao)
terminal.Mensagem(texto, 'ok')
def Run(self, futuro):
up = User_password()
auth = OAuthHandler(up.CONSUMER_KEY(), up.CONSUMER_SECRET())
auth.set_access_token(up.ACCESS_TOKEN(), up.ACCESS_TOKEN_SECRET())
twitterStream = Stream(auth, listener(futuro))
twitterStream.filter(follow=None,track=['a'],languages=['pt']) | extractor.py | from tweepy import Stream, OAuthHandler
from tweepy.streaming import StreamListener
import pymysql
import threading
import time
from datetime import datetime
import json
from twitter_credenciais import User_password
from interface import interface_terminal as terminal
conn = pymysql.connect("localhost","root","", "tw")
c = conn.cursor()
class listener(StreamListener):
def __init__(self, _futuro):
self.futuro = _futuro
self.contador = 0
def on_data(self, data):
all_data = json.loads(data)
if((json.dumps(all_data).startswith('{"limit":')==False)):
self.contador += 1
id_tweet = all_data["id"]
source = all_data["source"]
user_id = all_data["user"]["id"]
username = all_data["user"]["screen_name"]
user_url = all_data["user"]["url"]
user_description = all_data["user"]["description"]
user_local = all_data["user"]["location"]
date_tweet = all_data["created_at"]
if(all_data["geo"] != None):
geo = json.dumps(all_data["geo"])
elif(all_data["geo"] == None):
geo = all_data["geo"]
if(all_data["coordinates"] != None):
coordinates = json.dumps(all_data["coordinates"])
elif(all_data["coordinates"] == None):
coordinates = all_data["coordinates"]
tweet = all_data["text"]
if(all_data["place"] != None):
place = json.dumps(all_data["place"])
elif(all_data["place"] == None):
place = all_data["place"]
c.execute("INSERT INTO tweet_tb (id_tweet, source, user_id, username, user_url, user_description, user_local, date_tweet, geo, coordinates, tweet, place) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
(id_tweet, source, user_id, username, user_url, user_description, user_local, date_tweet, geo, coordinates, tweet, place))
conn.commit()
print(username,tweet)
if time.time() > self.futuro:
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
return False
else:
return True
elif(json.dumps(all_data).startswith('{"limit":')):
print('\n'+'*' * 30 + ' Dict Json ' + '*' * 30 + '\n')
def on_error(self, status):
if status == 420:
# Retorna Falso quando on_data exceder o limite da API
print (status)
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
return False
print (status)
terminal.Mensagem('TOTAL TWEETS: %s' % self.contador, 'w')
class Extractor():
def __init__(self, tempo_segundos):
terminal.Mensagem('Iniciando em Extrator','d')
inicio = time.time()
futuro = (inicio + tempo_segundos)
t = threading.Timer(3.0, self.Run(futuro))
t.setName('Thread-Extractor')
t.start()
if True:
terminal.Mensagem('Cancelando a Thread....', 'w')
t.cancel()
fim = time.time()
duracao = fim - inicio
strfim = time.strftime("\nFim: %A, %d %b %Y %H:%M:%S +0000", time.localtime(fim))
strinicio = time.strftime("\nInício: %A, %d %b %Y %H:%M:%S +0000", time.localtime(inicio))
texto = '%s Cancelada!%s%s\nDuração: %s' % (str(t.getName()), strinicio, strfim, duracao)
terminal.Mensagem(texto, 'ok')
def Run(self, futuro):
up = User_password()
auth = OAuthHandler(up.CONSUMER_KEY(), up.CONSUMER_SECRET())
auth.set_access_token(up.ACCESS_TOKEN(), up.ACCESS_TOKEN_SECRET())
twitterStream = Stream(auth, listener(futuro))
twitterStream.filter(follow=None,track=['a'],languages=['pt']) | 0.183082 | 0.058804 |
import dataclasses
from dataclasses import dataclass
from typing import List, Dict
from ausbills.util.consts import *
from ausbills.util import BillExtractor, BillListExtractor
from ausbills.log import get_logger
from ausbills.models import Bill, BillMeta
from ausbills.types import House, Parliament, ChamberProgress, \
BillProgress, BillTypes, Timestamp
qld_logger = get_logger(__file__)
BASE = 'https://www.legislation.qld.gov.au/'
API_CALL = BASE + 'projectdata' \
'?ds=OQPC-BrowseDataSource&start=1&cou' \
'nt=9999&sortField=year&sortDirection' \
'=asc&filterField=year&expression=Repealed%3DN+' \
'AND+PrintType%3D(%22bill.first%2'\
'2+OR+%22bill.firstnongovintro%22)+AND+'\
'ParliamentNo%3D{}&subset=browse&collection=&_={}'
class QLDBillList(BillListExtractor):
@property
def all_bills(self):
return self._get_all_bills()
def _get_all_bills(self):
landing_page = self._download_html(BASE + 'browse/bills')
parl_number = landing_page.find(
'table', {
'class': 'table table-bordered table-condensed browse-table'
}) \
.find('td').text[:2] # Current parliament's number
api_json = self._download_json(
API_CALL.format(parl_number, self._get_epoch())
)
bills_list = []
for bill_data in api_json['data']:
bill_title = bill_data[TITLE][VALUE].replace('\u2019', "'")
bill_id = bill_data[ID][VALUE]
intro_date = self._get_timestamp(
bill_data['publication.date'], '%Y-%m-%dT00:00:00')
bills_list.append({
TITLE: bill_title,
ID: bill_id,
INTRO_ASSEMBLY: intro_date # QLD is unicamaral
})
return bills_list
@dataclass
class BillMetaQLD(BillMeta):
intro_assembly: Timestamp
id: str
def get_bills_metadata() -> List[BillMetaQLD]:
meta_list = []
all_bills = QLDBillList().all_bills
for bill_dict in all_bills:
meta_list.append(BillMetaQLD(
title=bill_dict[TITLE],
id=bill_dict[ID],
link=BASE + 'view/html/bill.first/' + bill_dict[ID],
intro_assembly=bill_dict[INTRO_ASSEMBLY],
parliament=Parliament.QLD.value
))
return meta_list
@dataclass
class BillQLD(Bill, BillMetaQLD):
bill_type: str
bill_em_links: List[Dict]
class QLDBillHelper(BillExtractor):
def __init__(self, bill_meta: BillMetaQLD):
self.url = bill_meta.link
self.bill_id = bill_meta.id
self.history_page = self._download_html(
self.url + '/lh').find('table', {'class': 'table table-striped'})
self.intro_date = bill_meta.intro_assembly
self.assent_date = None
def __str__(self):
return f"<Bill | URL: '{self.url}'>"
def __repr__(self):
return ('<{}.{} : {} object at {}>'.format(
self.__class__.__module__,
self.__class__.__name__,
self.bill_id,
hex(id(self))))
@property
def bill_type(self):
return self._get_bill_type()
def _get_bill_type(self):
if(self.history_page.find('tr').text.strip() == 'Government Bill'):
return BillTypes.GOVERNMENT.value
else:
return BillTypes.PRIVATE_MEMBER.value
@property
def em_links(self):
return self._get_em_links()
def _get_em_links(self):
ret_dict = []
index = 0
links_column = self.history_page.find_all('tr')[1].find_all('td')[-1]
em_tag = links_column.find('a', text='Explanatory Note')
hr_compat_tag = links_column.find(
'a', {'href': f'/view/pdf/bill.first.hrc/{self.bill_id}'}
)
if em_tag:
ret_dict.append({
API_ID: index,
API_HOUSE: House.LOWER.value,
API_TIME: self.intro_date,
URL: BASE + em_tag['href']
})
index += 1
if hr_compat_tag:
ret_dict.append({
API_ID: index,
API_HOUSE: House.LOWER.value,
API_TIME: self.intro_date,
URL: BASE + hr_compat_tag['href']
})
return ret_dict
@property
def text_links(self):
return self._get_text_links()
def _get_text_links(self):
pass
@property
def progress(self):
return self._get_progress()
def _get_progress(self):
prog_dict = {
BillProgress.FIRST.value: True,
BillProgress.ASSENTED.value: False
}
final_stage = self.history_page.find_all('tr')[-1]
if 'Assent' in final_stage.find('td').text:
prog_dict[BillProgress.ASSENTED.value] = True
chamb_progress = ChamberProgress.THIRD_READING.value
elif '3rd' in final_stage.find('td').text:
chamb_progress = ChamberProgress.THIRD_READING.value
elif 'Indicative Reprint' in final_stage.find('td').text:
chamb_progress = ChamberProgress.SECOND_READING.value
else:
chamb_progress = ChamberProgress.FIRST_READING.value
return [prog_dict, chamb_progress]
def get_bill(bill_meta: BillMetaQLD) -> BillQLD:
qld_helper = QLDBillHelper(bill_meta)
bill_meta = BillQLD(
**dataclasses.asdict(bill_meta),
bill_type=qld_helper.bill_type,
bill_em_links=qld_helper.em_links,
bill_text_links=qld_helper.text_links,
progress=qld_helper.progress[0],
chamber_progress=qld_helper.progress[1]
)
return bill_meta | ausbills/parliament/qld.py | import dataclasses
from dataclasses import dataclass
from typing import List, Dict
from ausbills.util.consts import *
from ausbills.util import BillExtractor, BillListExtractor
from ausbills.log import get_logger
from ausbills.models import Bill, BillMeta
from ausbills.types import House, Parliament, ChamberProgress, \
BillProgress, BillTypes, Timestamp
qld_logger = get_logger(__file__)
BASE = 'https://www.legislation.qld.gov.au/'
API_CALL = BASE + 'projectdata' \
'?ds=OQPC-BrowseDataSource&start=1&cou' \
'nt=9999&sortField=year&sortDirection' \
'=asc&filterField=year&expression=Repealed%3DN+' \
'AND+PrintType%3D(%22bill.first%2'\
'2+OR+%22bill.firstnongovintro%22)+AND+'\
'ParliamentNo%3D{}&subset=browse&collection=&_={}'
class QLDBillList(BillListExtractor):
@property
def all_bills(self):
return self._get_all_bills()
def _get_all_bills(self):
landing_page = self._download_html(BASE + 'browse/bills')
parl_number = landing_page.find(
'table', {
'class': 'table table-bordered table-condensed browse-table'
}) \
.find('td').text[:2] # Current parliament's number
api_json = self._download_json(
API_CALL.format(parl_number, self._get_epoch())
)
bills_list = []
for bill_data in api_json['data']:
bill_title = bill_data[TITLE][VALUE].replace('\u2019', "'")
bill_id = bill_data[ID][VALUE]
intro_date = self._get_timestamp(
bill_data['publication.date'], '%Y-%m-%dT00:00:00')
bills_list.append({
TITLE: bill_title,
ID: bill_id,
INTRO_ASSEMBLY: intro_date # QLD is unicamaral
})
return bills_list
@dataclass
class BillMetaQLD(BillMeta):
intro_assembly: Timestamp
id: str
def get_bills_metadata() -> List[BillMetaQLD]:
meta_list = []
all_bills = QLDBillList().all_bills
for bill_dict in all_bills:
meta_list.append(BillMetaQLD(
title=bill_dict[TITLE],
id=bill_dict[ID],
link=BASE + 'view/html/bill.first/' + bill_dict[ID],
intro_assembly=bill_dict[INTRO_ASSEMBLY],
parliament=Parliament.QLD.value
))
return meta_list
@dataclass
class BillQLD(Bill, BillMetaQLD):
bill_type: str
bill_em_links: List[Dict]
class QLDBillHelper(BillExtractor):
def __init__(self, bill_meta: BillMetaQLD):
self.url = bill_meta.link
self.bill_id = bill_meta.id
self.history_page = self._download_html(
self.url + '/lh').find('table', {'class': 'table table-striped'})
self.intro_date = bill_meta.intro_assembly
self.assent_date = None
def __str__(self):
return f"<Bill | URL: '{self.url}'>"
def __repr__(self):
return ('<{}.{} : {} object at {}>'.format(
self.__class__.__module__,
self.__class__.__name__,
self.bill_id,
hex(id(self))))
@property
def bill_type(self):
return self._get_bill_type()
def _get_bill_type(self):
if(self.history_page.find('tr').text.strip() == 'Government Bill'):
return BillTypes.GOVERNMENT.value
else:
return BillTypes.PRIVATE_MEMBER.value
@property
def em_links(self):
return self._get_em_links()
def _get_em_links(self):
ret_dict = []
index = 0
links_column = self.history_page.find_all('tr')[1].find_all('td')[-1]
em_tag = links_column.find('a', text='Explanatory Note')
hr_compat_tag = links_column.find(
'a', {'href': f'/view/pdf/bill.first.hrc/{self.bill_id}'}
)
if em_tag:
ret_dict.append({
API_ID: index,
API_HOUSE: House.LOWER.value,
API_TIME: self.intro_date,
URL: BASE + em_tag['href']
})
index += 1
if hr_compat_tag:
ret_dict.append({
API_ID: index,
API_HOUSE: House.LOWER.value,
API_TIME: self.intro_date,
URL: BASE + hr_compat_tag['href']
})
return ret_dict
@property
def text_links(self):
return self._get_text_links()
def _get_text_links(self):
pass
@property
def progress(self):
return self._get_progress()
def _get_progress(self):
prog_dict = {
BillProgress.FIRST.value: True,
BillProgress.ASSENTED.value: False
}
final_stage = self.history_page.find_all('tr')[-1]
if 'Assent' in final_stage.find('td').text:
prog_dict[BillProgress.ASSENTED.value] = True
chamb_progress = ChamberProgress.THIRD_READING.value
elif '3rd' in final_stage.find('td').text:
chamb_progress = ChamberProgress.THIRD_READING.value
elif 'Indicative Reprint' in final_stage.find('td').text:
chamb_progress = ChamberProgress.SECOND_READING.value
else:
chamb_progress = ChamberProgress.FIRST_READING.value
return [prog_dict, chamb_progress]
def get_bill(bill_meta: BillMetaQLD) -> BillQLD:
qld_helper = QLDBillHelper(bill_meta)
bill_meta = BillQLD(
**dataclasses.asdict(bill_meta),
bill_type=qld_helper.bill_type,
bill_em_links=qld_helper.em_links,
bill_text_links=qld_helper.text_links,
progress=qld_helper.progress[0],
chamber_progress=qld_helper.progress[1]
)
return bill_meta | 0.598077 | 0.112162 |
from __future__ import absolute_import, print_function
import random
import warnings
import pytest
from requests.packages import urllib3
import zhmcclient
# pylint: disable=line-too-long,unused-import
from zhmcclient.testutils.hmc_definition_fixtures import hmc_definition, hmc_session # noqa: F401, E501
from zhmcclient.testutils.cpc_fixtures import dpm_mode_cpcs # noqa: F401, E501
# pylint: enable=line-too-long,unused-import
from .utils import runtest_find_list, TEST_PREFIX, standard_partition_props, \
End2endTestWarning
urllib3.disable_warnings()
# Properties in minimalistic Partition objects (e.g. find_by_name())
PART_MINIMAL_PROPS = ['object-uri', 'name']
# Properties in Partition objects returned by list() without full props
PART_LIST_PROPS = ['object-uri', 'name', 'status', 'type']
# Properties whose values can change between retrievals of Partition objects
PART_VOLATILE_PROPS = []
def test_part_find_list(dpm_mode_cpcs): # noqa: F811
# pylint: disable=redefined-outer-name
"""
Test list(), find(), findall().
"""
if not dpm_mode_cpcs:
pytest.skip("No CPCs in DPM mode provided")
for cpc in dpm_mode_cpcs:
assert cpc.dpm_enabled
print("Testing on CPC {} (DPM mode)".format(cpc.name))
session = cpc.manager.session
# Pick a random partition
part_list = cpc.partitions.list()
if not part_list:
msg_txt = "No partitions on CPC {}".format(cpc.name)
warnings.warn(msg_txt, End2endTestWarning)
pytest.skip(msg_txt)
part = random.choice(part_list)
runtest_find_list(
session, cpc.partitions, part.name, 'name', 'status',
PART_VOLATILE_PROPS, PART_MINIMAL_PROPS, PART_LIST_PROPS)
def test_part_crud(dpm_mode_cpcs): # noqa: F811
# pylint: disable=redefined-outer-name
"""
Test create, read, update and delete a partition.
"""
if not dpm_mode_cpcs:
pytest.skip("No CPCs in DPM mode provided")
for cpc in dpm_mode_cpcs:
assert cpc.dpm_enabled
print("Testing on CPC {} (DPM mode)".format(cpc.name))
part_name = TEST_PREFIX + ' test_part_crud part1'
part_name_new = part_name + ' new'
# Ensure a clean starting point for this test
try:
part = cpc.partitions.find(name=part_name)
except zhmcclient.NotFound:
pass
else:
warnings.warn(
"Deleting test partition from previous run: '{p}' on CPC '{c}'".
format(p=part_name, c=cpc.name), UserWarning)
status = part.get_property('status')
if status != 'stopped':
part.stop()
part.delete()
try:
part = cpc.partitions.find(name=part_name_new)
except zhmcclient.NotFound:
pass
else:
warnings.warn(
"Deleting test partition from previous run: '{p}' on CPC '{c}'".
format(p=part_name_new, c=cpc.name), UserWarning)
status = part.get_property('status')
if status != 'stopped':
part.stop()
part.delete()
# Test creating the partition
part_input_props = standard_partition_props(cpc, part_name)
part_auto_props = {
'status': 'stopped',
}
# The code to be tested
part = cpc.partitions.create(part_input_props)
for pn, exp_value in part_input_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
part.pull_full_properties()
for pn, exp_value in part_input_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
for pn, exp_value in part_auto_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
# Test updating a property of the partition
new_desc = "Updated partition description."
# The code to be tested
part.update_properties(dict(description=new_desc))
assert part.properties['description'] == new_desc
part.pull_full_properties()
assert part.properties['description'] == new_desc
# Test renaming the partition
# The code to be tested
part.update_properties(dict(name=part_name_new))
assert part.properties['name'] == part_name_new
part.pull_full_properties()
assert part.properties['name'] == part_name_new
with pytest.raises(zhmcclient.NotFound):
cpc.partitions.find(name=part_name)
# Test deleting the partition
# The code to be tested
part.delete()
with pytest.raises(zhmcclient.NotFound):
cpc.partitions.find(name=part_name_new) | tests/end2end/test_partition.py | from __future__ import absolute_import, print_function
import random
import warnings
import pytest
from requests.packages import urllib3
import zhmcclient
# pylint: disable=line-too-long,unused-import
from zhmcclient.testutils.hmc_definition_fixtures import hmc_definition, hmc_session # noqa: F401, E501
from zhmcclient.testutils.cpc_fixtures import dpm_mode_cpcs # noqa: F401, E501
# pylint: enable=line-too-long,unused-import
from .utils import runtest_find_list, TEST_PREFIX, standard_partition_props, \
End2endTestWarning
urllib3.disable_warnings()
# Properties in minimalistic Partition objects (e.g. find_by_name())
PART_MINIMAL_PROPS = ['object-uri', 'name']
# Properties in Partition objects returned by list() without full props
PART_LIST_PROPS = ['object-uri', 'name', 'status', 'type']
# Properties whose values can change between retrievals of Partition objects
PART_VOLATILE_PROPS = []
def test_part_find_list(dpm_mode_cpcs): # noqa: F811
# pylint: disable=redefined-outer-name
"""
Test list(), find(), findall().
"""
if not dpm_mode_cpcs:
pytest.skip("No CPCs in DPM mode provided")
for cpc in dpm_mode_cpcs:
assert cpc.dpm_enabled
print("Testing on CPC {} (DPM mode)".format(cpc.name))
session = cpc.manager.session
# Pick a random partition
part_list = cpc.partitions.list()
if not part_list:
msg_txt = "No partitions on CPC {}".format(cpc.name)
warnings.warn(msg_txt, End2endTestWarning)
pytest.skip(msg_txt)
part = random.choice(part_list)
runtest_find_list(
session, cpc.partitions, part.name, 'name', 'status',
PART_VOLATILE_PROPS, PART_MINIMAL_PROPS, PART_LIST_PROPS)
def test_part_crud(dpm_mode_cpcs): # noqa: F811
# pylint: disable=redefined-outer-name
"""
Test create, read, update and delete a partition.
"""
if not dpm_mode_cpcs:
pytest.skip("No CPCs in DPM mode provided")
for cpc in dpm_mode_cpcs:
assert cpc.dpm_enabled
print("Testing on CPC {} (DPM mode)".format(cpc.name))
part_name = TEST_PREFIX + ' test_part_crud part1'
part_name_new = part_name + ' new'
# Ensure a clean starting point for this test
try:
part = cpc.partitions.find(name=part_name)
except zhmcclient.NotFound:
pass
else:
warnings.warn(
"Deleting test partition from previous run: '{p}' on CPC '{c}'".
format(p=part_name, c=cpc.name), UserWarning)
status = part.get_property('status')
if status != 'stopped':
part.stop()
part.delete()
try:
part = cpc.partitions.find(name=part_name_new)
except zhmcclient.NotFound:
pass
else:
warnings.warn(
"Deleting test partition from previous run: '{p}' on CPC '{c}'".
format(p=part_name_new, c=cpc.name), UserWarning)
status = part.get_property('status')
if status != 'stopped':
part.stop()
part.delete()
# Test creating the partition
part_input_props = standard_partition_props(cpc, part_name)
part_auto_props = {
'status': 'stopped',
}
# The code to be tested
part = cpc.partitions.create(part_input_props)
for pn, exp_value in part_input_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
part.pull_full_properties()
for pn, exp_value in part_input_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
for pn, exp_value in part_auto_props.items():
assert part.properties[pn] == exp_value, \
"Unexpected value for property {!r}".format(pn)
# Test updating a property of the partition
new_desc = "Updated partition description."
# The code to be tested
part.update_properties(dict(description=new_desc))
assert part.properties['description'] == new_desc
part.pull_full_properties()
assert part.properties['description'] == new_desc
# Test renaming the partition
# The code to be tested
part.update_properties(dict(name=part_name_new))
assert part.properties['name'] == part_name_new
part.pull_full_properties()
assert part.properties['name'] == part_name_new
with pytest.raises(zhmcclient.NotFound):
cpc.partitions.find(name=part_name)
# Test deleting the partition
# The code to be tested
part.delete()
with pytest.raises(zhmcclient.NotFound):
cpc.partitions.find(name=part_name_new) | 0.475605 | 0.188063 |
import gui3d
import humanmodifier
print 'Face imported'
class GroupBoxRadioButton(gui3d.RadioButton):
def __init__(self, group, label, groupBox, selected=False):
gui3d.RadioButton.__init__(self, group, label, selected, style=gui3d.ButtonStyle)
self.groupBox = groupBox
def onClicked(self, event):
gui3d.RadioButton.onClicked(self, event)
self.parent.parent.hideAllBoxes()
self.groupBox.show()
class FaceSlider(humanmodifier.ModifierSlider):
def __init__(self, modifier, image, view):
humanmodifier.ModifierSlider.__init__(self, min=-1.0, max=1.0, modifier=modifier, style=gui3d.SliderStyle._replace(height=56, normal=image), thumbStyle=gui3d.SliderThumbStyle._replace(width = 32, height = 32, normal="slider2.png", focused="slider2_focused.png"))
self.view = getattr(gui3d.app, view)
def onFocus(self, event):
humanmodifier.ModifierSlider.onFocus(self, event)
self.view()
def setPosition(self, position):
humanmodifier.ModifierSlider.setPosition(self, position)
self.thumb.setPosition([position[0], position[1] + self.style.height / 2 - self.thumbStyle.height / 2, position[2] + 0.01])
self.setValue(self.getValue())
class FaceSlider2(humanmodifier.ModifierSlider):
def __init__(self, modifier, image, view):
humanmodifier.ModifierSlider.__init__(self, min=0.0, max=1.0, modifier=modifier, style=gui3d.SliderStyle._replace(height=56, normal=image), thumbStyle=gui3d.SliderThumbStyle._replace(width = 32, height = 32, normal="slider2.png", focused="slider2_focused.png"))
self.view = getattr(gui3d.app, view)
def onFocus(self, event):
humanmodifier.ModifierSlider.onFocus(self, event)
self.view()
def setPosition(self, position):
humanmodifier.ModifierSlider.setPosition(self, position)
self.thumb.setPosition([position[0], position[1] + self.style.height / 2 - self.thumbStyle.height / 2, position[2] + 0.01])
self.setValue(self.getValue())
class FaceTaskView(gui3d.TaskView):
def __init__(self, category):
gui3d.TaskView.__init__(self, category, 'Face')
features = [
('head', [('data/targets/head/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/head/images/', i[3]) for i in
[
('head-age', 'less', 'more', 'frontView'),
('head-angle', 'in', 'out', 'rightView'),
('head-scale-depth', 'less', 'more', 'rightView'),
('head-scale-horiz', 'less', 'more', 'frontView'),
('head-scale-vert', 'more', 'less', 'frontView'),
('head-trans', 'in', 'out', 'frontView'),
('head-trans', 'down', 'up', 'frontView'),
('head-trans', 'forward', 'backward', 'rightView'),
]]),
('neck', [('data/targets/neck/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/neck/images/', i[3]) for i in
[
('neck-scale-depth', 'less', 'more', 'rightView'),
('neck-scale-horiz', 'less', 'more', 'frontView'),
('neck-scale-vert', 'more', 'less', 'frontView'),
('neck-trans', 'in', 'out', 'frontView'),
('neck-trans', 'down', 'up', 'frontView'),
('neck-trans', 'forward', 'backward', 'rightView'),
]]),
('right eye', [('data/targets/eyes/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/eyes/images/', i[3]) for i in
[
('r-eye-height1', 'min', 'max', 'frontView'),
('r-eye-height2', 'min', 'max', 'frontView'),
('r-eye-height3', 'min', 'max', 'frontView'),
('r-eye-push1', 'in', 'out', 'frontView'),
('r-eye-push2', 'in', 'out', 'frontView'),
('r-eye-move', 'in', 'out', 'frontView'),
('r-eye-move', 'up', 'down', 'frontView'),
('r-eye', 'small', 'big', 'frontView'),
('r-eye-corner1', 'up', 'down', 'frontView'),
('r-eye-corner2', 'up', 'down', 'frontView')
]]),
('left eye', [('data/targets/eyes/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/eyes/images/', i[3]) for i in
[
('l-eye-height1', 'min', 'max', 'frontView'),
('l-eye-height2', 'min', 'max', 'frontView'),
('l-eye-height3', 'min', 'max', 'frontView'),
('l-eye-push1', 'in', 'out', 'frontView'),
('l-eye-push2', 'in', 'out', 'frontView'),
('l-eye-move', 'in', 'out', 'frontView'),
('l-eye-move', 'up', 'down', 'frontView'),
('l-eye', 'small', 'big', 'frontView'),
('l-eye-corner1', 'up', 'down', 'frontView'),
('l-eye-corner2', 'up', 'down', 'frontView'),
]]),
('nose features', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose', 'compress', 'uncompress', 'rightView'),
('nose', 'convex', 'concave', 'rightView'),
('nose', 'greek', 'ungreek', 'rightView'),
('nose', 'hump', 'unhump', 'rightView'),
('nose', 'potato', 'point', 'rightView'),
('nose-nostrils', 'point', 'unpoint', 'frontView'),
('nose-nostrils', 'up', 'down', 'rightView'),
('nose-point', 'up', 'down', 'rightView'),
]]),
('nose size details', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose-nostril-width', 'min', 'max', 'frontView'),
('nose-height', 'min', 'max', 'rightView'),
('nose-width1', 'min', 'max', 'frontView'),
('nose-width2', 'min', 'max', 'frontView'),
('nose-width3', 'min', 'max', 'frontView'),
('nose-width', 'min', 'max', 'frontView'),
]]),
('nose size', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose-trans', 'up', 'down', 'frontView'),
('nose-trans', 'forward', 'backward', 'rightView'),
('nose-trans', 'in', 'out', 'frontView'),
('nose-scale-vert', 'incr', 'decr', 'frontView'),
('nose-scale-horiz', 'incr', 'decr', 'frontView'),
('nose-scale-depth', 'incr', 'decr', 'rightView'),
]]),
('mouth size', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-scale-horiz', 'incr', 'decr', 'frontView'),
('mouth-scale-vert', 'incr', 'decr', 'frontView'),
('mouth-scale-depth', 'incr', 'decr', 'rightView'),
('mouth-trans', 'in', 'out', 'frontView'),
('mouth-trans', 'up', 'down', 'frontView'),
('mouth-trans', 'forward', 'backward', 'rightView'),
]]),
('mouth size details', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-lowerlip-height', 'min', 'max', 'frontView'),
('mouth-lowerlip-middle', 'up', 'down', 'frontView'),
('mouth-lowerlip-width', 'min', 'max', 'frontView'),
('mouth-upperlip-height', 'min', 'max', 'frontView'),
('mouth-upperlip-width', 'min', 'max', 'frontView'),
]]),
('mouth features', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-lowerlip-ext', 'up', 'down', 'frontView'),
('mouth-angles', 'up', 'down', 'frontView'),
('mouth-lowerlip-middle', 'up', 'down', 'frontView'),
('mouth-lowerlip', 'deflate', 'inflate', 'rightView'),
('mouth-philtrum', 'up', 'down', 'frontView'),
('mouth-philtrum', 'increase', 'decrease', 'rightView'),
('mouth', 'up', 'down', 'frontView'),
('mouth-upperlip', 'deflate', 'inflate', 'rightView'),
('mouth-upperlip-ext', 'up', 'down', 'frontView'),
('mouth-upperlip-middle', 'up', 'down', 'frontView'),
]]),
('right ear', [('data/targets/ears/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/ears/images/', i[3]) for i in
[
('r-ear', 'backward', 'forward', 'rightView'),
('r-ear', 'big', 'small', 'rightView'),
('r-ear', 'down', 'up', 'rightView'),
('r-ear-height', 'min', 'max', 'rightView'),
('r-ear-lobe', 'min', 'max', 'rightView'),
('r-ear', 'pointed', 'triangle', 'rightView'),
('r-ear-rot', 'backward', 'forward', 'rightView'),
('r-ear', 'square', 'round', 'rightView'),
('r-ear-width', 'max', 'min', 'rightView'),
('r-ear-wing', 'out', 'in', 'frontView'),
('r-ear-flap', 'out', 'in', 'frontView'),
]]),
('left ear', [('data/targets/ears/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/ears/images/', i[3]) for i in
[
('l-ear', 'backward', 'forward', 'leftView'),
('l-ear', 'big', 'small', 'leftView'),
('l-ear', 'down', 'up', 'leftView'),
('l-ear-height', 'min', 'max', 'leftView'),
('l-ear-lobe', 'min', 'max', 'leftView'),
('l-ear', 'pointed', 'triangle', 'leftView'),
('l-ear-rot', 'backward', 'forward', 'leftView'),
('l-ear', 'square', 'round', 'leftView'),
('l-ear-width', 'max', 'min', 'leftView'),
('l-ear-wing', 'out', 'in', 'frontView'),
('l-ear-flap', 'out', 'in', 'frontView'),
]]),
('chin', [('data/targets/chin/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/chin/images/', i[3]) for i in
[
('chin', 'in', 'out', 'rightView'),
('chin-width', 'min', 'max', 'frontView'),
('chin-height', 'min', 'max', 'frontView'),
('chin', 'squared', 'round', 'frontView'),
('chin', 'prognathism1', 'prognathism2', 'rightView'),
]]),
('cheek', [('data/targets/cheek/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/cheek/images/', i[3]) for i in
[
('l-cheek', 'in', 'out', 'frontView'),
('l-cheek-bones', 'out', 'in', 'frontView'),
('r-cheek', 'in', 'out', 'frontView'),
('r-cheek-bones', 'out', 'in', 'frontView'),
]])
]
features2 = [
('head shape', [('data/targets/head/${ethnic}/${gender}_${age}/%s.target' % (i[0]), i[0], 'data/targets/head/images/', i[1]) for i in
[
('head-oval', 'frontView'),
('head-round', 'frontView'),
('head-rectangular', 'frontView'),
('head-square', 'frontView'),
('head-triangular', 'frontView'),
('head-invertedtriangular', 'frontView'),
('head-diamond', 'frontView'),
]])
]
y = 80
self.groupBoxes = []
self.radioButtons = []
self.sliders = []
self.modifiers = {}
self.categoryBox = self.addView(gui3d.GroupBox([650, y, 9.0], 'Category'))
y += 25
for name, templates in features2:
for index, template in enumerate(templates):
if index % 12 == 0:
if len(templates) <= 12:
title = name.capitalize()
else:
title = '%s %d' % (name.capitalize(), index / 12 + 1)
# Create box
box = self.addView(gui3d.GroupBox([10, 80, 9.0], title, gui3d.GroupBoxStyle._replace(width=128+112+4)))
self.groupBoxes.append(box)
# Create radiobutton
radio = self.categoryBox.addView(GroupBoxRadioButton(self.radioButtons, title, box, selected=len(self.radioButtons) == 0))
y += 24
# Create sliders
modifier = humanmodifier.GenderAgeEthnicModifier2(template[0])
self.modifiers['%s%d' % (name, index + 1)] = modifier
slider = box.addView( (FaceSlider2(modifier, '%s%s.png' % (template[2], template[1]), template[3])))
self.sliders.append(slider)
for name, templates in features:
for index, template in enumerate(templates):
if index % 12 == 0:
if len(templates) <= 12:
title = name.capitalize()
else:
title = '%s %d' % (name.capitalize(), index / 12 + 1)
# Create box
box = self.addView(gui3d.GroupBox([10, 80, 9.0], title, gui3d.GroupBoxStyle._replace(width=128+112+4)))
self.groupBoxes.append(box)
# Create radiobutton
radio = self.categoryBox.addView(GroupBoxRadioButton(self.radioButtons, title, box, selected=len(self.radioButtons) == 0))
y += 24
# Create sliders
modifier = humanmodifier.GenderAgeEthnicAsymmetricModifier(template[0], 'value', template[2], template[3], False)
self.modifiers['%s%d' % (name, index + 1)] = modifier
slider = box.addView(FaceSlider(modifier, '%s%s-%s-%s.png' % (template[4], template[1], template[2], template[3]), template[5]))
self.sliders.append(slider)
y += 16
self.hideAllBoxes()
self.groupBoxes[0].show()
def hideAllBoxes(self):
for box in self.groupBoxes:
box.hide()
def onShow(self, event):
gui3d.TaskView.onShow(self, event)
gui3d.app.setFaceCamera()
for slider in self.sliders:
slider.update()
def onResized(self, event):
self.categoryBox.setPosition([event.width - 150, self.categoryBox.getPosition()[1], 9.0])
def onHumanChanged(self, event):
human = event.human
for slider in self.sliders:
slider.update()
def loadHandler(self, human, values):
if values[0] == 'face':
modifier = self.modifiers.get(values[1].replace("-", " "), None)
if modifier:
modifier.setValue(human, float(values[2]))
def saveHandler(self, human, file):
for name, modifier in self.modifiers.iteritems():
value = modifier.getValue(human)
if value:
file.write('face %s %f\n' % (name.replace(" ", "-"), value))
def load(app):
category = app.getCategory('Modelling')
taskview = category.addView(FaceTaskView(category))
app.addLoadHandler('face', taskview.loadHandler)
app.addSaveHandler(taskview.saveHandler)
print 'Face loaded'
def unload(app):
pass | plugins/0_modeling_3_face.py |
import gui3d
import humanmodifier
print 'Face imported'
class GroupBoxRadioButton(gui3d.RadioButton):
def __init__(self, group, label, groupBox, selected=False):
gui3d.RadioButton.__init__(self, group, label, selected, style=gui3d.ButtonStyle)
self.groupBox = groupBox
def onClicked(self, event):
gui3d.RadioButton.onClicked(self, event)
self.parent.parent.hideAllBoxes()
self.groupBox.show()
class FaceSlider(humanmodifier.ModifierSlider):
def __init__(self, modifier, image, view):
humanmodifier.ModifierSlider.__init__(self, min=-1.0, max=1.0, modifier=modifier, style=gui3d.SliderStyle._replace(height=56, normal=image), thumbStyle=gui3d.SliderThumbStyle._replace(width = 32, height = 32, normal="slider2.png", focused="slider2_focused.png"))
self.view = getattr(gui3d.app, view)
def onFocus(self, event):
humanmodifier.ModifierSlider.onFocus(self, event)
self.view()
def setPosition(self, position):
humanmodifier.ModifierSlider.setPosition(self, position)
self.thumb.setPosition([position[0], position[1] + self.style.height / 2 - self.thumbStyle.height / 2, position[2] + 0.01])
self.setValue(self.getValue())
class FaceSlider2(humanmodifier.ModifierSlider):
def __init__(self, modifier, image, view):
humanmodifier.ModifierSlider.__init__(self, min=0.0, max=1.0, modifier=modifier, style=gui3d.SliderStyle._replace(height=56, normal=image), thumbStyle=gui3d.SliderThumbStyle._replace(width = 32, height = 32, normal="slider2.png", focused="slider2_focused.png"))
self.view = getattr(gui3d.app, view)
def onFocus(self, event):
humanmodifier.ModifierSlider.onFocus(self, event)
self.view()
def setPosition(self, position):
humanmodifier.ModifierSlider.setPosition(self, position)
self.thumb.setPosition([position[0], position[1] + self.style.height / 2 - self.thumbStyle.height / 2, position[2] + 0.01])
self.setValue(self.getValue())
class FaceTaskView(gui3d.TaskView):
def __init__(self, category):
gui3d.TaskView.__init__(self, category, 'Face')
features = [
('head', [('data/targets/head/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/head/images/', i[3]) for i in
[
('head-age', 'less', 'more', 'frontView'),
('head-angle', 'in', 'out', 'rightView'),
('head-scale-depth', 'less', 'more', 'rightView'),
('head-scale-horiz', 'less', 'more', 'frontView'),
('head-scale-vert', 'more', 'less', 'frontView'),
('head-trans', 'in', 'out', 'frontView'),
('head-trans', 'down', 'up', 'frontView'),
('head-trans', 'forward', 'backward', 'rightView'),
]]),
('neck', [('data/targets/neck/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/neck/images/', i[3]) for i in
[
('neck-scale-depth', 'less', 'more', 'rightView'),
('neck-scale-horiz', 'less', 'more', 'frontView'),
('neck-scale-vert', 'more', 'less', 'frontView'),
('neck-trans', 'in', 'out', 'frontView'),
('neck-trans', 'down', 'up', 'frontView'),
('neck-trans', 'forward', 'backward', 'rightView'),
]]),
('right eye', [('data/targets/eyes/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/eyes/images/', i[3]) for i in
[
('r-eye-height1', 'min', 'max', 'frontView'),
('r-eye-height2', 'min', 'max', 'frontView'),
('r-eye-height3', 'min', 'max', 'frontView'),
('r-eye-push1', 'in', 'out', 'frontView'),
('r-eye-push2', 'in', 'out', 'frontView'),
('r-eye-move', 'in', 'out', 'frontView'),
('r-eye-move', 'up', 'down', 'frontView'),
('r-eye', 'small', 'big', 'frontView'),
('r-eye-corner1', 'up', 'down', 'frontView'),
('r-eye-corner2', 'up', 'down', 'frontView')
]]),
('left eye', [('data/targets/eyes/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/eyes/images/', i[3]) for i in
[
('l-eye-height1', 'min', 'max', 'frontView'),
('l-eye-height2', 'min', 'max', 'frontView'),
('l-eye-height3', 'min', 'max', 'frontView'),
('l-eye-push1', 'in', 'out', 'frontView'),
('l-eye-push2', 'in', 'out', 'frontView'),
('l-eye-move', 'in', 'out', 'frontView'),
('l-eye-move', 'up', 'down', 'frontView'),
('l-eye', 'small', 'big', 'frontView'),
('l-eye-corner1', 'up', 'down', 'frontView'),
('l-eye-corner2', 'up', 'down', 'frontView'),
]]),
('nose features', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose', 'compress', 'uncompress', 'rightView'),
('nose', 'convex', 'concave', 'rightView'),
('nose', 'greek', 'ungreek', 'rightView'),
('nose', 'hump', 'unhump', 'rightView'),
('nose', 'potato', 'point', 'rightView'),
('nose-nostrils', 'point', 'unpoint', 'frontView'),
('nose-nostrils', 'up', 'down', 'rightView'),
('nose-point', 'up', 'down', 'rightView'),
]]),
('nose size details', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose-nostril-width', 'min', 'max', 'frontView'),
('nose-height', 'min', 'max', 'rightView'),
('nose-width1', 'min', 'max', 'frontView'),
('nose-width2', 'min', 'max', 'frontView'),
('nose-width3', 'min', 'max', 'frontView'),
('nose-width', 'min', 'max', 'frontView'),
]]),
('nose size', [('data/targets/nose/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/nose/images/', i[3]) for i in
[
('nose-trans', 'up', 'down', 'frontView'),
('nose-trans', 'forward', 'backward', 'rightView'),
('nose-trans', 'in', 'out', 'frontView'),
('nose-scale-vert', 'incr', 'decr', 'frontView'),
('nose-scale-horiz', 'incr', 'decr', 'frontView'),
('nose-scale-depth', 'incr', 'decr', 'rightView'),
]]),
('mouth size', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-scale-horiz', 'incr', 'decr', 'frontView'),
('mouth-scale-vert', 'incr', 'decr', 'frontView'),
('mouth-scale-depth', 'incr', 'decr', 'rightView'),
('mouth-trans', 'in', 'out', 'frontView'),
('mouth-trans', 'up', 'down', 'frontView'),
('mouth-trans', 'forward', 'backward', 'rightView'),
]]),
('mouth size details', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-lowerlip-height', 'min', 'max', 'frontView'),
('mouth-lowerlip-middle', 'up', 'down', 'frontView'),
('mouth-lowerlip-width', 'min', 'max', 'frontView'),
('mouth-upperlip-height', 'min', 'max', 'frontView'),
('mouth-upperlip-width', 'min', 'max', 'frontView'),
]]),
('mouth features', [('data/targets/mouth/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/mouth/images/', i[3]) for i in
[
('mouth-lowerlip-ext', 'up', 'down', 'frontView'),
('mouth-angles', 'up', 'down', 'frontView'),
('mouth-lowerlip-middle', 'up', 'down', 'frontView'),
('mouth-lowerlip', 'deflate', 'inflate', 'rightView'),
('mouth-philtrum', 'up', 'down', 'frontView'),
('mouth-philtrum', 'increase', 'decrease', 'rightView'),
('mouth', 'up', 'down', 'frontView'),
('mouth-upperlip', 'deflate', 'inflate', 'rightView'),
('mouth-upperlip-ext', 'up', 'down', 'frontView'),
('mouth-upperlip-middle', 'up', 'down', 'frontView'),
]]),
('right ear', [('data/targets/ears/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/ears/images/', i[3]) for i in
[
('r-ear', 'backward', 'forward', 'rightView'),
('r-ear', 'big', 'small', 'rightView'),
('r-ear', 'down', 'up', 'rightView'),
('r-ear-height', 'min', 'max', 'rightView'),
('r-ear-lobe', 'min', 'max', 'rightView'),
('r-ear', 'pointed', 'triangle', 'rightView'),
('r-ear-rot', 'backward', 'forward', 'rightView'),
('r-ear', 'square', 'round', 'rightView'),
('r-ear-width', 'max', 'min', 'rightView'),
('r-ear-wing', 'out', 'in', 'frontView'),
('r-ear-flap', 'out', 'in', 'frontView'),
]]),
('left ear', [('data/targets/ears/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/ears/images/', i[3]) for i in
[
('l-ear', 'backward', 'forward', 'leftView'),
('l-ear', 'big', 'small', 'leftView'),
('l-ear', 'down', 'up', 'leftView'),
('l-ear-height', 'min', 'max', 'leftView'),
('l-ear-lobe', 'min', 'max', 'leftView'),
('l-ear', 'pointed', 'triangle', 'leftView'),
('l-ear-rot', 'backward', 'forward', 'leftView'),
('l-ear', 'square', 'round', 'leftView'),
('l-ear-width', 'max', 'min', 'leftView'),
('l-ear-wing', 'out', 'in', 'frontView'),
('l-ear-flap', 'out', 'in', 'frontView'),
]]),
('chin', [('data/targets/chin/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/chin/images/', i[3]) for i in
[
('chin', 'in', 'out', 'rightView'),
('chin-width', 'min', 'max', 'frontView'),
('chin-height', 'min', 'max', 'frontView'),
('chin', 'squared', 'round', 'frontView'),
('chin', 'prognathism1', 'prognathism2', 'rightView'),
]]),
('cheek', [('data/targets/cheek/${ethnic}/${gender}_${age}/%s-${value}.target' % (i[0]), i[0], i[1], i[2], 'data/targets/cheek/images/', i[3]) for i in
[
('l-cheek', 'in', 'out', 'frontView'),
('l-cheek-bones', 'out', 'in', 'frontView'),
('r-cheek', 'in', 'out', 'frontView'),
('r-cheek-bones', 'out', 'in', 'frontView'),
]])
]
features2 = [
('head shape', [('data/targets/head/${ethnic}/${gender}_${age}/%s.target' % (i[0]), i[0], 'data/targets/head/images/', i[1]) for i in
[
('head-oval', 'frontView'),
('head-round', 'frontView'),
('head-rectangular', 'frontView'),
('head-square', 'frontView'),
('head-triangular', 'frontView'),
('head-invertedtriangular', 'frontView'),
('head-diamond', 'frontView'),
]])
]
y = 80
self.groupBoxes = []
self.radioButtons = []
self.sliders = []
self.modifiers = {}
self.categoryBox = self.addView(gui3d.GroupBox([650, y, 9.0], 'Category'))
y += 25
for name, templates in features2:
for index, template in enumerate(templates):
if index % 12 == 0:
if len(templates) <= 12:
title = name.capitalize()
else:
title = '%s %d' % (name.capitalize(), index / 12 + 1)
# Create box
box = self.addView(gui3d.GroupBox([10, 80, 9.0], title, gui3d.GroupBoxStyle._replace(width=128+112+4)))
self.groupBoxes.append(box)
# Create radiobutton
radio = self.categoryBox.addView(GroupBoxRadioButton(self.radioButtons, title, box, selected=len(self.radioButtons) == 0))
y += 24
# Create sliders
modifier = humanmodifier.GenderAgeEthnicModifier2(template[0])
self.modifiers['%s%d' % (name, index + 1)] = modifier
slider = box.addView( (FaceSlider2(modifier, '%s%s.png' % (template[2], template[1]), template[3])))
self.sliders.append(slider)
for name, templates in features:
for index, template in enumerate(templates):
if index % 12 == 0:
if len(templates) <= 12:
title = name.capitalize()
else:
title = '%s %d' % (name.capitalize(), index / 12 + 1)
# Create box
box = self.addView(gui3d.GroupBox([10, 80, 9.0], title, gui3d.GroupBoxStyle._replace(width=128+112+4)))
self.groupBoxes.append(box)
# Create radiobutton
radio = self.categoryBox.addView(GroupBoxRadioButton(self.radioButtons, title, box, selected=len(self.radioButtons) == 0))
y += 24
# Create sliders
modifier = humanmodifier.GenderAgeEthnicAsymmetricModifier(template[0], 'value', template[2], template[3], False)
self.modifiers['%s%d' % (name, index + 1)] = modifier
slider = box.addView(FaceSlider(modifier, '%s%s-%s-%s.png' % (template[4], template[1], template[2], template[3]), template[5]))
self.sliders.append(slider)
y += 16
self.hideAllBoxes()
self.groupBoxes[0].show()
def hideAllBoxes(self):
for box in self.groupBoxes:
box.hide()
def onShow(self, event):
gui3d.TaskView.onShow(self, event)
gui3d.app.setFaceCamera()
for slider in self.sliders:
slider.update()
def onResized(self, event):
self.categoryBox.setPosition([event.width - 150, self.categoryBox.getPosition()[1], 9.0])
def onHumanChanged(self, event):
human = event.human
for slider in self.sliders:
slider.update()
def loadHandler(self, human, values):
if values[0] == 'face':
modifier = self.modifiers.get(values[1].replace("-", " "), None)
if modifier:
modifier.setValue(human, float(values[2]))
def saveHandler(self, human, file):
for name, modifier in self.modifiers.iteritems():
value = modifier.getValue(human)
if value:
file.write('face %s %f\n' % (name.replace(" ", "-"), value))
def load(app):
category = app.getCategory('Modelling')
taskview = category.addView(FaceTaskView(category))
app.addLoadHandler('face', taskview.loadHandler)
app.addSaveHandler(taskview.saveHandler)
print 'Face loaded'
def unload(app):
pass | 0.388386 | 0.107789 |
import datetime
_registered_parse_convertible_types = {}
class ParseConvertibleType(type):
def __init__(cls, class_name, bases, class_dict):
super(ParseConvertibleType, cls).__init__(class_name, bases, class_dict)
if class_name != 'ParseConvertible':
parse_type_name_func = getattr(cls, 'parse_type_name', None)
if parse_type_name_func:
_registered_parse_convertible_types[parse_type_name_func()] = cls
class ParseConvertible(object, metaclass=ParseConvertibleType):
def to_parse(self):
raise NotImplementedError('Implement how this type convet to Parse\'s representation.')
@classmethod
def to_python(cls, parse_dict):
raise NotImplementedError('Implement how this type convet from Parse\'s representation.')
@classmethod
def parse_type_name(cls):
raise NotImplementedError('Should return the __type value.')
@staticmethod
def guess_to_python(value):
if isinstance(value, dict):
try:
klass = _registered_parse_convertible_types[value['__type']]
except KeyError:
pass
else:
return klass.to_python(value)
return value
@staticmethod
def guess_to_parse(value):
if isinstance(value, datetime.datetime):
# Since createdAt and updatedAt won't use this guess_to_parse, it's safe to return dict directly
return datetime_to_parse_dict(value)
elif isinstance(value, ParseConvertible):
return value.to_parse()
return value
# == GeoPoint ==========================================================================================================
class GeoPoint(ParseConvertible):
"""
A class used to represent GeoPoint data
"""
def __init__(self, latitude, longitude):
"""
Create a GeoPoint data
:param latitude: the latitude of this point
:type latitude: float
:param longitude: the longitude of this point
:type longitude: float
"""
self.latitude = latitude
self.longitude = longitude
def __str__(self):
return repr(self)
def __repr__(self):
return 'GeoPoint({0.latitude}, {0.longitude})'.format(self)
def to_parse(self):
return {
'__type': self.parse_type_name(),
'latitude': self.latitude,
'longitude': self.longitude,
}
@classmethod
def to_python(cls, parse_dict):
if parse_dict['__type'] != cls.parse_type_name():
raise TypeError('This is not a GeoPoint dict.')
return cls(parse_dict['latitude'], parse_dict['longitude'])
@classmethod
def parse_type_name(cls):
return 'GeoPoint'
# == datetime ==========================================================================================================
class UTC(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
return datetime.timedelta(0)
@staticmethod
def tzname(*args, **kwargs):
return "UTC"
@staticmethod
def dst(*args, **kwargs):
return datetime.timedelta(0)
def __str__(self):
return repr(self)
def __repr__(self):
return self.tzname()
class LocalTimezone(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
timedelta = datetime.datetime.now() - datetime.datetime.utcnow()
return datetime.timedelta(minutes=round(timedelta.total_seconds()/60))
@staticmethod
def tzname(*args, **kwargs):
return "<Local>"
@staticmethod
def dst(*args, **kwargs):
return datetime.timedelta(0)
def __str__(self):
return repr(self)
def __repr__(self):
return self.tzname()
def datetime_to_parse_str(datetime_obj):
"""
:type datetime_obj: datetime.datetime
:rtype: str
"""
if not datetime_obj.tzinfo:
datetime_obj = datetime_obj.replace(tzinfo=LocalTimezone())
return datetime_obj.astimezone(UTC()).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]+'Z'
def datetime_str_to_python(parse_str):
"""
:type parse_str: str
:rtype: datetime.datetime
"""
return datetime.datetime.strptime(parse_str, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=UTC())
def datetime_to_parse_dict(datetime_obj):
"""
:type datetime_obj: datetime.datetime
:rtype: dict
"""
return {
'__type': 'Date',
'iso': datetime_to_parse_str(datetime_obj),
}
def datetime_dict_to_python(parse_dict):
"""
:type parse_dict: dict
:rtype: datetime.datetime
"""
if parse_dict['__type'] != 'Date':
raise TypeError('This is not a Datetime dict.')
return datetime_str_to_python(parse_dict['iso'])
class _DatetimeParseConverible(ParseConvertible):
@classmethod
def to_python(cls, parse_dict):
return datetime_dict_to_python(parse_dict)
@classmethod
def parse_type_name(cls):
return 'Date'
def to_parse(self):
# This class is just a wrapper for `guess_to_python`
return None | pyparse/core/data/types.py |
import datetime
_registered_parse_convertible_types = {}
class ParseConvertibleType(type):
def __init__(cls, class_name, bases, class_dict):
super(ParseConvertibleType, cls).__init__(class_name, bases, class_dict)
if class_name != 'ParseConvertible':
parse_type_name_func = getattr(cls, 'parse_type_name', None)
if parse_type_name_func:
_registered_parse_convertible_types[parse_type_name_func()] = cls
class ParseConvertible(object, metaclass=ParseConvertibleType):
def to_parse(self):
raise NotImplementedError('Implement how this type convet to Parse\'s representation.')
@classmethod
def to_python(cls, parse_dict):
raise NotImplementedError('Implement how this type convet from Parse\'s representation.')
@classmethod
def parse_type_name(cls):
raise NotImplementedError('Should return the __type value.')
@staticmethod
def guess_to_python(value):
if isinstance(value, dict):
try:
klass = _registered_parse_convertible_types[value['__type']]
except KeyError:
pass
else:
return klass.to_python(value)
return value
@staticmethod
def guess_to_parse(value):
if isinstance(value, datetime.datetime):
# Since createdAt and updatedAt won't use this guess_to_parse, it's safe to return dict directly
return datetime_to_parse_dict(value)
elif isinstance(value, ParseConvertible):
return value.to_parse()
return value
# == GeoPoint ==========================================================================================================
class GeoPoint(ParseConvertible):
"""
A class used to represent GeoPoint data
"""
def __init__(self, latitude, longitude):
"""
Create a GeoPoint data
:param latitude: the latitude of this point
:type latitude: float
:param longitude: the longitude of this point
:type longitude: float
"""
self.latitude = latitude
self.longitude = longitude
def __str__(self):
return repr(self)
def __repr__(self):
return 'GeoPoint({0.latitude}, {0.longitude})'.format(self)
def to_parse(self):
return {
'__type': self.parse_type_name(),
'latitude': self.latitude,
'longitude': self.longitude,
}
@classmethod
def to_python(cls, parse_dict):
if parse_dict['__type'] != cls.parse_type_name():
raise TypeError('This is not a GeoPoint dict.')
return cls(parse_dict['latitude'], parse_dict['longitude'])
@classmethod
def parse_type_name(cls):
return 'GeoPoint'
# == datetime ==========================================================================================================
class UTC(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
return datetime.timedelta(0)
@staticmethod
def tzname(*args, **kwargs):
return "UTC"
@staticmethod
def dst(*args, **kwargs):
return datetime.timedelta(0)
def __str__(self):
return repr(self)
def __repr__(self):
return self.tzname()
class LocalTimezone(datetime.tzinfo):
@staticmethod
def utcoffset(*args, **kwargs):
timedelta = datetime.datetime.now() - datetime.datetime.utcnow()
return datetime.timedelta(minutes=round(timedelta.total_seconds()/60))
@staticmethod
def tzname(*args, **kwargs):
return "<Local>"
@staticmethod
def dst(*args, **kwargs):
return datetime.timedelta(0)
def __str__(self):
return repr(self)
def __repr__(self):
return self.tzname()
def datetime_to_parse_str(datetime_obj):
"""
:type datetime_obj: datetime.datetime
:rtype: str
"""
if not datetime_obj.tzinfo:
datetime_obj = datetime_obj.replace(tzinfo=LocalTimezone())
return datetime_obj.astimezone(UTC()).strftime('%Y-%m-%dT%H:%M:%S.%f')[:-3]+'Z'
def datetime_str_to_python(parse_str):
"""
:type parse_str: str
:rtype: datetime.datetime
"""
return datetime.datetime.strptime(parse_str, '%Y-%m-%dT%H:%M:%S.%fZ').replace(tzinfo=UTC())
def datetime_to_parse_dict(datetime_obj):
"""
:type datetime_obj: datetime.datetime
:rtype: dict
"""
return {
'__type': 'Date',
'iso': datetime_to_parse_str(datetime_obj),
}
def datetime_dict_to_python(parse_dict):
"""
:type parse_dict: dict
:rtype: datetime.datetime
"""
if parse_dict['__type'] != 'Date':
raise TypeError('This is not a Datetime dict.')
return datetime_str_to_python(parse_dict['iso'])
class _DatetimeParseConverible(ParseConvertible):
@classmethod
def to_python(cls, parse_dict):
return datetime_dict_to_python(parse_dict)
@classmethod
def parse_type_name(cls):
return 'Date'
def to_parse(self):
# This class is just a wrapper for `guess_to_python`
return None | 0.794544 | 0.42668 |
import os
import sqlite3
# construct a path to wherever your database exists
#DB_FILEPATH = "rpg.db"
DB_FILEPATH = os.path.join(os.path.dirname(__file__), "..", "data", "rpg_db.sqlite3")
connection = sqlite3.connect(DB_FILEPATH)
connection.row_factory = sqlite3.Row
print("CONNECTION:", connection)
cursor = connection.cursor()
print("CURSOR", cursor)
#How many characters are there?
#How many of each specific subclass?
#How many total Items?
#How many of the Items are weapons? How many are not?
#How many Items does each character have? (Return first 20 rows)
#How many Weapons does each character have? (Return first 20 rows)
#On average, how many Items does each Character have?
#On average, how many Weapons does each character have?
# How many characters are there? -302
query = """
SELECT
c.character_id
,count(distinct c.character_id)
FROM
charactercreator_character c
"""
# How many of each specific subclass?
#come back, takes too long to load, coding error?#######
# cleric = 75
# figher = 68
# mage = 108
# thief = 51
# mage/necromancer = 11
query2 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct cleric.character_ptr_id) as cleric_characters
,count(distinct f.character_ptr_id) as fighter_characters
,count(distinct mage.character_ptr_id) as mage_characters
,count(distinct thief.character_ptr_id) as thief_characters
FROM charactercreator_character c
LEFT JOIN charactercreator_cleric cleric ON cleric.character_ptr_id = cleric.character_ptr_id
LEFT JOIN charactercreator_fighter f ON f.character_ptr_id = f.character_ptr_id
LEFT JOIN charactercreator_mage mage ON mage.character_ptr_id = mage.character_ptr_id
LEFT JOIN charactercreator_thief thief ON thief.character_ptr_id = thief.character_ptr_id
"""
# How many total items?
query3 = """
SELECT
ai.item_id
,count(distinct ai.item_id)
FROM
armory_item ai
"""
# How many Items are weapons? = 37 How many are not? 174-37= 137
query4 = """
SELECT
ai.item_id
,count(distinct ai.item_id) as items
,count(distinct aw.item_ptr_id) as weapon_count
FROM armory_item ai
LEFT JOIN armory_weapon aw ON ai.item_id = ai.item_id
"""
# How many items does each character have?
query5 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct ai.item_id) as item_count
FROM charactercreator_character c
LEFT JOIN charactercreator_character_inventory inv ON c.character_id = inv.character_id
LEFT JOIN armory_item ai ON ai.item_id = inv.item_id
GROUP BY c.character_id
LIMIT 20;
"""
# How many weapons does each character have?
query6 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct w.item_ptr_id) as weapon_count
FROM charactercreator_character c
LEFT JOIN charactercreator_character_inventory inv ON c.character_id = inv.character_id
LEFT JOIN armory_weapon w ON w.item_ptr_id = inv.item_id
GROUP BY c.character_id
LIMIT 20;
"""
result = cursor.execute(query)
print("RESULT", dict(result)) #> returns cursor object w/o results (need to fetch the results)
result2 = cursor.execute(query2).fetchall()
print("RESULT 2", dict(result2))
result3 = cursor.execute(query3).fetchall()
print("RESULT 3", dict(result3))
result4 = cursor.execute(query4).fetchall()
print("RESULT 4", dict(result4))
result5 = cursor.execute(query5).fetchall()
print("RESULT 5", dict(result5))
result6 = cursor.execute(query6).fetchall()
print("RESULT 6", dict(result6)) | app/rpg_queries.py | import os
import sqlite3
# construct a path to wherever your database exists
#DB_FILEPATH = "rpg.db"
DB_FILEPATH = os.path.join(os.path.dirname(__file__), "..", "data", "rpg_db.sqlite3")
connection = sqlite3.connect(DB_FILEPATH)
connection.row_factory = sqlite3.Row
print("CONNECTION:", connection)
cursor = connection.cursor()
print("CURSOR", cursor)
#How many characters are there?
#How many of each specific subclass?
#How many total Items?
#How many of the Items are weapons? How many are not?
#How many Items does each character have? (Return first 20 rows)
#How many Weapons does each character have? (Return first 20 rows)
#On average, how many Items does each Character have?
#On average, how many Weapons does each character have?
# How many characters are there? -302
query = """
SELECT
c.character_id
,count(distinct c.character_id)
FROM
charactercreator_character c
"""
# How many of each specific subclass?
#come back, takes too long to load, coding error?#######
# cleric = 75
# figher = 68
# mage = 108
# thief = 51
# mage/necromancer = 11
query2 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct cleric.character_ptr_id) as cleric_characters
,count(distinct f.character_ptr_id) as fighter_characters
,count(distinct mage.character_ptr_id) as mage_characters
,count(distinct thief.character_ptr_id) as thief_characters
FROM charactercreator_character c
LEFT JOIN charactercreator_cleric cleric ON cleric.character_ptr_id = cleric.character_ptr_id
LEFT JOIN charactercreator_fighter f ON f.character_ptr_id = f.character_ptr_id
LEFT JOIN charactercreator_mage mage ON mage.character_ptr_id = mage.character_ptr_id
LEFT JOIN charactercreator_thief thief ON thief.character_ptr_id = thief.character_ptr_id
"""
# How many total items?
query3 = """
SELECT
ai.item_id
,count(distinct ai.item_id)
FROM
armory_item ai
"""
# How many Items are weapons? = 37 How many are not? 174-37= 137
query4 = """
SELECT
ai.item_id
,count(distinct ai.item_id) as items
,count(distinct aw.item_ptr_id) as weapon_count
FROM armory_item ai
LEFT JOIN armory_weapon aw ON ai.item_id = ai.item_id
"""
# How many items does each character have?
query5 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct ai.item_id) as item_count
FROM charactercreator_character c
LEFT JOIN charactercreator_character_inventory inv ON c.character_id = inv.character_id
LEFT JOIN armory_item ai ON ai.item_id = inv.item_id
GROUP BY c.character_id
LIMIT 20;
"""
# How many weapons does each character have?
query6 = """
SELECT
c.character_id
,c.name as character_name
,count(distinct w.item_ptr_id) as weapon_count
FROM charactercreator_character c
LEFT JOIN charactercreator_character_inventory inv ON c.character_id = inv.character_id
LEFT JOIN armory_weapon w ON w.item_ptr_id = inv.item_id
GROUP BY c.character_id
LIMIT 20;
"""
result = cursor.execute(query)
print("RESULT", dict(result)) #> returns cursor object w/o results (need to fetch the results)
result2 = cursor.execute(query2).fetchall()
print("RESULT 2", dict(result2))
result3 = cursor.execute(query3).fetchall()
print("RESULT 3", dict(result3))
result4 = cursor.execute(query4).fetchall()
print("RESULT 4", dict(result4))
result5 = cursor.execute(query5).fetchall()
print("RESULT 5", dict(result5))
result6 = cursor.execute(query6).fetchall()
print("RESULT 6", dict(result6)) | 0.290779 | 0.116412 |
from typing import Sequence
import torch
from detectron2.engine.hooks import HookBase
from detectron2.data import (
build_detection_train_loader,
DatasetMapper,
)
from detectron2.utils import comm
from detectron2.config import CfgNode
from detectron2.data.transforms import Augmentation
class ValidationLoss(HookBase):
r"""Hook that computes validation loss during training
Parameters
----------
cfg : CfgNode
Training configuration
val_augmentation : Sequence[Augmentation]
Data augmentation functions applied to validation data
period : int
The validation loss values are updated each `period` iterations
Attributes
----------
cfg : CfgNode
Clone of `cfg` parameters
_loader : detectron2.data.DataLoader
Validation data loader
_period : int
See `period` parameter
num_steps : int
It keeps track of the current iteration id
"""
def __init__(self, cfg: CfgNode, val_augmentation: Sequence[Augmentation], period: int):
super().__init__()
self.cfg = cfg.clone()
self.cfg.DATASETS.TRAIN = cfg.DATASETS.TEST
self._loader = iter(
build_detection_train_loader(
self.cfg,
mapper=DatasetMapper(
self.cfg, is_train=True, augmentations=val_augmentation
),
)
)
self._period = period
self.num_steps = 0
def after_step(self):
"""Run after every iteration, see parent for details"""
self.num_steps += 1
if self.num_steps % self._period == 0:
data = next(self._loader)
if torch.cuda.is_available():
torch.cuda.synchronize()
with torch.no_grad():
loss_dict = self.trainer.model(data)
losses = sum(loss_dict.values())
assert torch.isfinite(losses).all(), loss_dict
loss_dict_reduced = {
"val_" + k: v.item() for k, v in comm.reduce_dict(loss_dict).items()
}
losses_reduced = sum(loss for loss in loss_dict_reduced.values())
if comm.is_main_process():
self.trainer.storage.put_scalars(
total_val_loss=losses_reduced, **loss_dict_reduced
)
comm.synchronize()
else:
pass | amazon-sagemaker-pytorch-detectron2/container_training/sku-110k/engine/hooks.py | from typing import Sequence
import torch
from detectron2.engine.hooks import HookBase
from detectron2.data import (
build_detection_train_loader,
DatasetMapper,
)
from detectron2.utils import comm
from detectron2.config import CfgNode
from detectron2.data.transforms import Augmentation
class ValidationLoss(HookBase):
r"""Hook that computes validation loss during training
Parameters
----------
cfg : CfgNode
Training configuration
val_augmentation : Sequence[Augmentation]
Data augmentation functions applied to validation data
period : int
The validation loss values are updated each `period` iterations
Attributes
----------
cfg : CfgNode
Clone of `cfg` parameters
_loader : detectron2.data.DataLoader
Validation data loader
_period : int
See `period` parameter
num_steps : int
It keeps track of the current iteration id
"""
def __init__(self, cfg: CfgNode, val_augmentation: Sequence[Augmentation], period: int):
super().__init__()
self.cfg = cfg.clone()
self.cfg.DATASETS.TRAIN = cfg.DATASETS.TEST
self._loader = iter(
build_detection_train_loader(
self.cfg,
mapper=DatasetMapper(
self.cfg, is_train=True, augmentations=val_augmentation
),
)
)
self._period = period
self.num_steps = 0
def after_step(self):
"""Run after every iteration, see parent for details"""
self.num_steps += 1
if self.num_steps % self._period == 0:
data = next(self._loader)
if torch.cuda.is_available():
torch.cuda.synchronize()
with torch.no_grad():
loss_dict = self.trainer.model(data)
losses = sum(loss_dict.values())
assert torch.isfinite(losses).all(), loss_dict
loss_dict_reduced = {
"val_" + k: v.item() for k, v in comm.reduce_dict(loss_dict).items()
}
losses_reduced = sum(loss for loss in loss_dict_reduced.values())
if comm.is_main_process():
self.trainer.storage.put_scalars(
total_val_loss=losses_reduced, **loss_dict_reduced
)
comm.synchronize()
else:
pass | 0.952508 | 0.551332 |
from collections import namedtuple
from django.db.models import Sum
from django.template.response import TemplateResponse
from django.urls import reverse_lazy as reverse
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from ...forms.reports.orderables import OrderablePaymentsForm, OrderablePaymentsStatusForm, OrderableStatsForm
from ...models.agegroup import AgeGroup
from ...models.citizenship import Citizenship
from ...models.orderables import Orderable, OrderableRegistration
from ...models.roles import Participant
from ...models.subjects import SubjectPayment, SubjectRegistrationParticipant, SubjectType
from ...views.generic import FormView
class ReportOrderablePaymentsView(FormView):
form_class = OrderablePaymentsForm
template_name = "leprikon/reports/orderable_payments.html"
title = _("Orderable payments")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["received_payments"] = SubjectPayment.objects.filter(
target_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["returned_payments"] = SubjectPayment.objects.filter(
source_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["received_payments_sum"] = context["received_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["returned_payments_sum"] = context["returned_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["sum"] = context["received_payments_sum"] - context["returned_payments_sum"]
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class ReportOrderablePaymentsStatusView(FormView):
form_class = OrderablePaymentsStatusForm
template_name = "leprikon/reports/orderable_payments_status.html"
title = _("Orderable event payments status")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
OrderablePaymentsStatusSums = namedtuple("OrderablePaymentsStatusSums", ("registrations", "status"))
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["reports"] = [
self.Report(orderable, context["date"])
for orderable in Orderable.objects.filter(school_year=self.request.school_year)
]
context["sum"] = self.OrderablePaymentsStatusSums(
registrations=sum(len(r.registration_statuses) for r in context["reports"]),
status=sum(r.status for r in context["reports"]),
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class Report:
def __init__(self, orderable, d):
self.orderable = orderable
self.date = d
RegPaymentStatus = namedtuple("RegPaymentStatus", ("registration", "status"))
@cached_property
def registration_statuses(self):
return [
registration_status
for registration_status in (
self.RegPaymentStatus(
registration=registration,
status=registration.get_payment_status(self.date),
)
for registration in OrderableRegistration.objects.filter(
subject=self.orderable,
approved__date__lte=self.date,
)
)
if registration_status.status.receivable
]
@cached_property
def status(self):
return sum(rs.status for rs in self.registration_statuses)
class ReportOrderableStatsView(FormView):
form_class = OrderableStatsForm
template_name = "leprikon/reports/orderable_stats.html"
title = _("Orderable statistics")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
ReportItem = namedtuple("ReportItem", ("age_group", "all", "boys", "girls", "citizenships"))
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["school_year"] = self.request.school_year
return kwargs
def form_valid(self, form):
d = form.cleaned_data["date"]
paid_only = form.cleaned_data["paid_only"]
context = form.cleaned_data
context["form"] = form
participants = (
SubjectRegistrationParticipant.objects.filter(
registration__subject__in=form.cleaned_data["orderables"],
registration__approved__date__lte=d,
)
.exclude(registration__canceled__date__lte=d)
.select_related("registration", "age_group")
)
if paid_only:
participants = [
participant
for participant in participants
if participant.registration.orderableregistration.get_payment_status(d).balance >= 0
]
else:
participants = list(participants)
context["orderables_count"] = len(set(participant.registration.subject_id for participant in participants))
citizenships = list(Citizenship.objects.all())
context["citizenships"] = citizenships
context["participants_counts"] = self.ReportItem(
age_group=None,
all=len(participants),
boys=len([p for p in participants if p.gender == Participant.MALE]),
girls=len([p for p in participants if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in participants if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
context["participants_counts_by_age_groups"] = []
for age_group in AgeGroup.objects.all():
parts = [p for p in participants if p.age_group == age_group]
context["participants_counts_by_age_groups"].append(
self.ReportItem(
age_group=age_group,
all=len(parts),
boys=len([p for p in parts if p.gender == Participant.MALE]),
girls=len([p for p in parts if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in parts if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context)) | leprikon/views/reports/orderables.py | from collections import namedtuple
from django.db.models import Sum
from django.template.response import TemplateResponse
from django.urls import reverse_lazy as reverse
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from ...forms.reports.orderables import OrderablePaymentsForm, OrderablePaymentsStatusForm, OrderableStatsForm
from ...models.agegroup import AgeGroup
from ...models.citizenship import Citizenship
from ...models.orderables import Orderable, OrderableRegistration
from ...models.roles import Participant
from ...models.subjects import SubjectPayment, SubjectRegistrationParticipant, SubjectType
from ...views.generic import FormView
class ReportOrderablePaymentsView(FormView):
form_class = OrderablePaymentsForm
template_name = "leprikon/reports/orderable_payments.html"
title = _("Orderable payments")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["received_payments"] = SubjectPayment.objects.filter(
target_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["returned_payments"] = SubjectPayment.objects.filter(
source_registration__subject__subject_type__subject_type=SubjectType.ORDERABLE,
accounted__gte=context["date_start"],
accounted__lte=context["date_end"],
)
context["received_payments_sum"] = context["received_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["returned_payments_sum"] = context["returned_payments"].aggregate(sum=Sum("amount"))["sum"] or 0
context["sum"] = context["received_payments_sum"] - context["returned_payments_sum"]
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class ReportOrderablePaymentsStatusView(FormView):
form_class = OrderablePaymentsStatusForm
template_name = "leprikon/reports/orderable_payments_status.html"
title = _("Orderable event payments status")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
OrderablePaymentsStatusSums = namedtuple("OrderablePaymentsStatusSums", ("registrations", "status"))
def form_valid(self, form):
context = form.cleaned_data
context["form"] = form
context["reports"] = [
self.Report(orderable, context["date"])
for orderable in Orderable.objects.filter(school_year=self.request.school_year)
]
context["sum"] = self.OrderablePaymentsStatusSums(
registrations=sum(len(r.registration_statuses) for r in context["reports"]),
status=sum(r.status for r in context["reports"]),
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context))
class Report:
def __init__(self, orderable, d):
self.orderable = orderable
self.date = d
RegPaymentStatus = namedtuple("RegPaymentStatus", ("registration", "status"))
@cached_property
def registration_statuses(self):
return [
registration_status
for registration_status in (
self.RegPaymentStatus(
registration=registration,
status=registration.get_payment_status(self.date),
)
for registration in OrderableRegistration.objects.filter(
subject=self.orderable,
approved__date__lte=self.date,
)
)
if registration_status.status.receivable
]
@cached_property
def status(self):
return sum(rs.status for rs in self.registration_statuses)
class ReportOrderableStatsView(FormView):
form_class = OrderableStatsForm
template_name = "leprikon/reports/orderable_stats.html"
title = _("Orderable statistics")
submit_label = _("Show")
back_url = reverse("leprikon:report_list")
ReportItem = namedtuple("ReportItem", ("age_group", "all", "boys", "girls", "citizenships"))
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs["school_year"] = self.request.school_year
return kwargs
def form_valid(self, form):
d = form.cleaned_data["date"]
paid_only = form.cleaned_data["paid_only"]
context = form.cleaned_data
context["form"] = form
participants = (
SubjectRegistrationParticipant.objects.filter(
registration__subject__in=form.cleaned_data["orderables"],
registration__approved__date__lte=d,
)
.exclude(registration__canceled__date__lte=d)
.select_related("registration", "age_group")
)
if paid_only:
participants = [
participant
for participant in participants
if participant.registration.orderableregistration.get_payment_status(d).balance >= 0
]
else:
participants = list(participants)
context["orderables_count"] = len(set(participant.registration.subject_id for participant in participants))
citizenships = list(Citizenship.objects.all())
context["citizenships"] = citizenships
context["participants_counts"] = self.ReportItem(
age_group=None,
all=len(participants),
boys=len([p for p in participants if p.gender == Participant.MALE]),
girls=len([p for p in participants if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in participants if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
context["participants_counts_by_age_groups"] = []
for age_group in AgeGroup.objects.all():
parts = [p for p in participants if p.age_group == age_group]
context["participants_counts_by_age_groups"].append(
self.ReportItem(
age_group=age_group,
all=len(parts),
boys=len([p for p in parts if p.gender == Participant.MALE]),
girls=len([p for p in parts if p.gender == Participant.FEMALE]),
citizenships=[
len([p for p in parts if p.citizenship_id == citizenship.id]) for citizenship in citizenships
],
)
)
return TemplateResponse(self.request, self.template_name, self.get_context_data(**context)) | 0.670932 | 0.076064 |
import json
from excepciones import *
import re
import nltk
from nltk.corpus import stopwords
from string import punctuation
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import pandas as pd
import logging
#Descargar las stopwords, necesario para la nueva funcionalidad
nltk.download('stopwords')
def leer_json_alimentos():
with open('./json/alimentos_es.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
logging.error("Error en la lectura del Json")
datos_alimentos = json.loads(c)
return datos_alimentos
def obtener_diccionario_alimentos():
datos_alimentos = leer_json_alimentos()
array_alimentos = []
array_alimentos = [dato["nombre"] for dato in datos_alimentos]
diccionario_alimentos = {"alimentos":array_alimentos}
return diccionario_alimentos
def lanzar_excepcion(atributo, causa):
try:
raise MisExcepciones(atributo, causa)
except MisExcepciones as e:
logging.info("El campo erróneo es " + e.campo)
logging.info("El error es " + e.informacion)
def lanzar_excepcion_alimento():
try:
raise MisExcepciones("Tiempo", "El tiempo está expresado en minutos. No te líes.")
except MisExcepciones as e:
logging.info("Alimentos")
logging.info("El formato de los alimentos es incorrecto")
def lanzar_excepcion_tiempo():
try:
raise MisExcepciones("Tiempo", "El tiempo está expresado en minutos. No te líes.")
except MisExcepciones as e:
logging.info("Tiempo")
logging.info("El formato de los minutos no es el adecuado")
def comprobar_numero(numero):
numero = numero.strip()
reg_exp = "\d+$"
resultado = re.match(reg_exp, numero)
return resultado
palabras_a_eliminar = stopwords.words('spanish')
signos_puntuacion = list(punctuation)
def limpieza_texto(texto_inicial):
texto_final = ""
for palabra in texto_inicial.split():
if not palabra in palabras_a_eliminar or palabra in signos_puntuacion:
texto_final += palabra + ""
return texto_final
def eliminar_signos_puntuacion(texto_inicial):
for palabra in signos_puntuacion:
texto_inicial = texto_inicial.replace(palabra, '')
return texto_inicial
def procesar_elaboracion(elaboracion_receta):
elaboracion_receta = elaboracion_receta.lower()
elaboracion_receta = limpieza_texto(elaboracion_receta)
elaboracion_receta = eliminar_signos_puntuacion(elaboracion_receta)
return elaboracion_receta
def tf_idf(conj_recetas):
vector = TfidfVectorizer ()
X = vector.fit_transform(conj_recetas)
matriz_pesos = cosine_similarity(X,X)
return matriz_pesos
def pasar_a_gramos(peso, unidad, alimento):
dic_alimentos = leer_json_alimentos()
gramos_finales = 0
calorias = 0
for i in range(0, len(dic_alimentos)):
if(dic_alimentos[i]["nombre"] == alimento):
calorias = int(dic_alimentos[i]["nutrientes"]["ENERC_KCAL"])
if(unidad == "gr" or unidad == "gramos"):
gramos_finales = (int(peso)*calorias)/100
else:
if(unidad == "litro" or unidad == "litros" or unidad == "kg" or unidad == "kilos" or unidad == "kilo"):
gramos_finales = (int(peso)*1000*calorias)/100
else:
gramos_finales = (int(peso)*12*calorias)/100
return gramos_finales
def obtener_dataframe(alimentos):
df = pd.read_json (r'./json/recetas.json')
array_aux = []
corr_ali_cal = []
contiene = []
ids = []
for i in range(0, len(df)):
aux_alimentos = df["alimentos"][i].split(";")
array_aux.append(len(aux_alimentos))
corr_ali_cal.append(df["calorias"][i] / len(aux_alimentos))
ids.append(i)
aux = (df["alimentos"][i].replace(";", " ")).split(" ")
contador = 0
for i in range(0, len(alimentos)):
if aux.__contains__(alimentos[i]):
contador = contador +1
contiene.append(contador)
contador=0
df = df.assign(num_alimentos = array_aux)
df = df.assign(calorias_alimentos = corr_ali_cal)
df = df.assign(id = ids)
df = df.assign(contiene_alimento = contiene)
return df
def obtener_json():
with open('json/recetas.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
return json.loads(c)
def obtener_diccionario(datos_alimentos):
diccionario = {}
for i in range(0, len(datos_alimentos)):
nom = datos_alimentos[i]["nombre"]
ali = datos_alimentos[i]["alimentos"]
ela = datos_alimentos[i]["elaboracion"]
tim = datos_alimentos[i]["tiempo"]
cal = datos_alimentos[i]["calorias"]
diccionario[nom] = {"nombre":nom, "alimento":ali, "elaboracion":ela, "tiempo":tim, "calorias":cal}
return diccionario
def aniadir_receta_json(receta):
datos = {
'nombre':receta.nombre_receta,
'alimentos':receta.alimentos,
'elaboracion':receta.elaboracion,
'tiempo':receta.tiempo,
'calorias':str(receta.calorias)
}
cadena_json = json.dumps(datos)
#Lectura
with open('json/recetas.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
datosMod = json.dumps(datos)
s = json.loads(c)
s.append(datos)
sC = json.dumps(s, indent=4)
#Escritura
with open('json/recetas.json', 'w') as f:
try:
f.write(sC)
f.close()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
def nombre_no_unico(nombre_receta):
datos_alimentos = obtener_json()
recetas = obtener_diccionario(datos_alimentos)
if nombre_receta in recetas:
logging.error("El nombre no es único")
return True
else:
return False
def eliminar_receta_json():
#Lectura
with open('json/recetas.json', 'r') as f:
try:
c = json.load(f)
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
c.pop()
sC = json.dumps(c, indent=4)
#Escritura
with open('json/recetas.json', 'w') as f:
try:
f.write(sC)
f.close()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}" | recetarium/funciones.py | import json
from excepciones import *
import re
import nltk
from nltk.corpus import stopwords
from string import punctuation
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
import pandas as pd
import logging
#Descargar las stopwords, necesario para la nueva funcionalidad
nltk.download('stopwords')
def leer_json_alimentos():
with open('./json/alimentos_es.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
logging.error("Error en la lectura del Json")
datos_alimentos = json.loads(c)
return datos_alimentos
def obtener_diccionario_alimentos():
datos_alimentos = leer_json_alimentos()
array_alimentos = []
array_alimentos = [dato["nombre"] for dato in datos_alimentos]
diccionario_alimentos = {"alimentos":array_alimentos}
return diccionario_alimentos
def lanzar_excepcion(atributo, causa):
try:
raise MisExcepciones(atributo, causa)
except MisExcepciones as e:
logging.info("El campo erróneo es " + e.campo)
logging.info("El error es " + e.informacion)
def lanzar_excepcion_alimento():
try:
raise MisExcepciones("Tiempo", "El tiempo está expresado en minutos. No te líes.")
except MisExcepciones as e:
logging.info("Alimentos")
logging.info("El formato de los alimentos es incorrecto")
def lanzar_excepcion_tiempo():
try:
raise MisExcepciones("Tiempo", "El tiempo está expresado en minutos. No te líes.")
except MisExcepciones as e:
logging.info("Tiempo")
logging.info("El formato de los minutos no es el adecuado")
def comprobar_numero(numero):
numero = numero.strip()
reg_exp = "\d+$"
resultado = re.match(reg_exp, numero)
return resultado
palabras_a_eliminar = stopwords.words('spanish')
signos_puntuacion = list(punctuation)
def limpieza_texto(texto_inicial):
texto_final = ""
for palabra in texto_inicial.split():
if not palabra in palabras_a_eliminar or palabra in signos_puntuacion:
texto_final += palabra + ""
return texto_final
def eliminar_signos_puntuacion(texto_inicial):
for palabra in signos_puntuacion:
texto_inicial = texto_inicial.replace(palabra, '')
return texto_inicial
def procesar_elaboracion(elaboracion_receta):
elaboracion_receta = elaboracion_receta.lower()
elaboracion_receta = limpieza_texto(elaboracion_receta)
elaboracion_receta = eliminar_signos_puntuacion(elaboracion_receta)
return elaboracion_receta
def tf_idf(conj_recetas):
vector = TfidfVectorizer ()
X = vector.fit_transform(conj_recetas)
matriz_pesos = cosine_similarity(X,X)
return matriz_pesos
def pasar_a_gramos(peso, unidad, alimento):
dic_alimentos = leer_json_alimentos()
gramos_finales = 0
calorias = 0
for i in range(0, len(dic_alimentos)):
if(dic_alimentos[i]["nombre"] == alimento):
calorias = int(dic_alimentos[i]["nutrientes"]["ENERC_KCAL"])
if(unidad == "gr" or unidad == "gramos"):
gramos_finales = (int(peso)*calorias)/100
else:
if(unidad == "litro" or unidad == "litros" or unidad == "kg" or unidad == "kilos" or unidad == "kilo"):
gramos_finales = (int(peso)*1000*calorias)/100
else:
gramos_finales = (int(peso)*12*calorias)/100
return gramos_finales
def obtener_dataframe(alimentos):
df = pd.read_json (r'./json/recetas.json')
array_aux = []
corr_ali_cal = []
contiene = []
ids = []
for i in range(0, len(df)):
aux_alimentos = df["alimentos"][i].split(";")
array_aux.append(len(aux_alimentos))
corr_ali_cal.append(df["calorias"][i] / len(aux_alimentos))
ids.append(i)
aux = (df["alimentos"][i].replace(";", " ")).split(" ")
contador = 0
for i in range(0, len(alimentos)):
if aux.__contains__(alimentos[i]):
contador = contador +1
contiene.append(contador)
contador=0
df = df.assign(num_alimentos = array_aux)
df = df.assign(calorias_alimentos = corr_ali_cal)
df = df.assign(id = ids)
df = df.assign(contiene_alimento = contiene)
return df
def obtener_json():
with open('json/recetas.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
return json.loads(c)
def obtener_diccionario(datos_alimentos):
diccionario = {}
for i in range(0, len(datos_alimentos)):
nom = datos_alimentos[i]["nombre"]
ali = datos_alimentos[i]["alimentos"]
ela = datos_alimentos[i]["elaboracion"]
tim = datos_alimentos[i]["tiempo"]
cal = datos_alimentos[i]["calorias"]
diccionario[nom] = {"nombre":nom, "alimento":ali, "elaboracion":ela, "tiempo":tim, "calorias":cal}
return diccionario
def aniadir_receta_json(receta):
datos = {
'nombre':receta.nombre_receta,
'alimentos':receta.alimentos,
'elaboracion':receta.elaboracion,
'tiempo':receta.tiempo,
'calorias':str(receta.calorias)
}
cadena_json = json.dumps(datos)
#Lectura
with open('json/recetas.json', 'r') as f:
try:
c = f.read()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
datosMod = json.dumps(datos)
s = json.loads(c)
s.append(datos)
sC = json.dumps(s, indent=4)
#Escritura
with open('json/recetas.json', 'w') as f:
try:
f.write(sC)
f.close()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
def nombre_no_unico(nombre_receta):
datos_alimentos = obtener_json()
recetas = obtener_diccionario(datos_alimentos)
if nombre_receta in recetas:
logging.error("El nombre no es único")
return True
else:
return False
def eliminar_receta_json():
#Lectura
with open('json/recetas.json', 'r') as f:
try:
c = json.load(f)
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}"
c.pop()
sC = json.dumps(c, indent=4)
#Escritura
with open('json/recetas.json', 'w') as f:
try:
f.write(sC)
f.close()
except FileNotFoundError:
response.status = 400
return "{'Error':'404 Fichero no encontrado'}" | 0.115886 | 0.162446 |
import logging
import os
import os_utilities
from utils import zip_package
THIS_FILE = os.path.abspath(zip_package.get_main_script_path())
# Method could be a function - pylint: disable=R0201
class Bot(object):
def __init__(
self, remote, attributes, server_version, base_dir, shutdown_hook):
# Do not expose attributes nor remote for now, as attributes will be
# refactored soon and remote would have a lot of side effects if used by
# bot_config.
self._attributes = attributes
self._base_dir = base_dir
self._remote = remote
self._server_version = server_version
self._shutdown_hook = shutdown_hook
@property
def base_dir(self):
"""Returns the working directory.
It is normally the current workind directory, e.g. os.getcwd() but it is
preferable to not assume that.
"""
return self._base_dir
@property
def dimensions(self):
"""The bot's current dimensions.
Dimensions are relatively static and not expected to change much. They
should change only when it effectively affects the bot's capacity to execute
tasks.
"""
return self._attributes.get('dimensions', {}).copy()
@property
def id(self):
"""Returns the bot's ID."""
return self.dimensions.get('id', ['unknown'])[0]
@property
def remote(self):
"""XsrfClient instance to talk to the server.
Should not be normally used by bot_config.py for now.
"""
return self._remote
@property
def server_version(self):
"""Version of the server's implementation.
The form is nnn-hhhhhhh for pristine version and nnn-hhhhhhh-tainted-uuuu
for non-upstreamed code base:
nnn: revision pseudo number
hhhhhhh: git commit hash
uuuu: username
"""
return self._server_version
@property
def state(self):
return self._attributes['state']
@property
def swarming_bot_zip(self):
"""Absolute path to the swarming_bot.zip file."""
return THIS_FILE
def post_event(self, event_type, message):
"""Posts an event to the server."""
data = self._attributes.copy()
data['event'] = event_type
data['message'] = message
self._remote.url_read_json('/swarming/api/v1/bot/event', data=data)
def post_error(self, message):
"""Posts given string as a failure.
This is used in case of internal code error. It traps exception.
"""
logging.error('Error: %s\n%s', self._attributes, message)
try:
self.post_event('bot_error', message)
except Exception:
logging.exception('post_error(%s) failed.', message)
def restart(self, message):
"""Reboots the machine.
If the reboot is successful, never returns: the process should just be
killed by OS.
If reboot fails, logs the error to the server and moves the bot to
quarantined mode.
"""
self.post_event('bot_rebooting', message)
if self._shutdown_hook:
try:
self._shutdown_hook(self)
except Exception as e:
logging.exception('shutdown hook failed: %s', e)
# os_utilities.restart should never return, unless restart is not happening.
# If restart is taking longer than N minutes, it probably not going to
# finish at all. Report this to the server.
os_utilities.restart(message, timeout=15*60)
self.post_error('Bot is stuck restarting for: %s' % message)
def update_state(self, new_state):
"""Called internally to update Bot.state."""
self._attributes['state'] = new_state | appengine/swarming/swarming_bot/bot.py | import logging
import os
import os_utilities
from utils import zip_package
THIS_FILE = os.path.abspath(zip_package.get_main_script_path())
# Method could be a function - pylint: disable=R0201
class Bot(object):
def __init__(
self, remote, attributes, server_version, base_dir, shutdown_hook):
# Do not expose attributes nor remote for now, as attributes will be
# refactored soon and remote would have a lot of side effects if used by
# bot_config.
self._attributes = attributes
self._base_dir = base_dir
self._remote = remote
self._server_version = server_version
self._shutdown_hook = shutdown_hook
@property
def base_dir(self):
"""Returns the working directory.
It is normally the current workind directory, e.g. os.getcwd() but it is
preferable to not assume that.
"""
return self._base_dir
@property
def dimensions(self):
"""The bot's current dimensions.
Dimensions are relatively static and not expected to change much. They
should change only when it effectively affects the bot's capacity to execute
tasks.
"""
return self._attributes.get('dimensions', {}).copy()
@property
def id(self):
"""Returns the bot's ID."""
return self.dimensions.get('id', ['unknown'])[0]
@property
def remote(self):
"""XsrfClient instance to talk to the server.
Should not be normally used by bot_config.py for now.
"""
return self._remote
@property
def server_version(self):
"""Version of the server's implementation.
The form is nnn-hhhhhhh for pristine version and nnn-hhhhhhh-tainted-uuuu
for non-upstreamed code base:
nnn: revision pseudo number
hhhhhhh: git commit hash
uuuu: username
"""
return self._server_version
@property
def state(self):
return self._attributes['state']
@property
def swarming_bot_zip(self):
"""Absolute path to the swarming_bot.zip file."""
return THIS_FILE
def post_event(self, event_type, message):
"""Posts an event to the server."""
data = self._attributes.copy()
data['event'] = event_type
data['message'] = message
self._remote.url_read_json('/swarming/api/v1/bot/event', data=data)
def post_error(self, message):
"""Posts given string as a failure.
This is used in case of internal code error. It traps exception.
"""
logging.error('Error: %s\n%s', self._attributes, message)
try:
self.post_event('bot_error', message)
except Exception:
logging.exception('post_error(%s) failed.', message)
def restart(self, message):
"""Reboots the machine.
If the reboot is successful, never returns: the process should just be
killed by OS.
If reboot fails, logs the error to the server and moves the bot to
quarantined mode.
"""
self.post_event('bot_rebooting', message)
if self._shutdown_hook:
try:
self._shutdown_hook(self)
except Exception as e:
logging.exception('shutdown hook failed: %s', e)
# os_utilities.restart should never return, unless restart is not happening.
# If restart is taking longer than N minutes, it probably not going to
# finish at all. Report this to the server.
os_utilities.restart(message, timeout=15*60)
self.post_error('Bot is stuck restarting for: %s' % message)
def update_state(self, new_state):
"""Called internally to update Bot.state."""
self._attributes['state'] = new_state | 0.637369 | 0.181662 |
import functools
import itertools
import logging
import os
from pathlib import Path
from typing import Callable, Dict, List, Optional
from .async_call import AsyncCaller, AsyncCallsNotSupported
from .chk_manager import CheckpointManager, NULL_CHK_ID
from .consts import Continuations, Seqno, Pid, CheckpointID, MAIN_PID, INITIAL_SEQNO
from .coordinator_call import CoordinatorCall, exit_process, Exit, spawn
from .global_state import pause_ctrl
from .logging import log, log_begin, log_duration, log_at_end
from .protocol import Request, FinalizedCoordinatorCall
from .rpc import rpc, WouldBlock
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
class _CoordinatorCallBacklog(List[FinalizedCoordinatorCall]):
"""Represents coordinator calls yet to be made."""
def prune(self, next_seqno: Seqno) -> None:
"""Removes calls with seqno less than `next_seqno`."""
for i in reversed(range(len(self))):
if self[i].seqno < next_seqno:
del self[i]
def run(entry_point: Callable, *args, **kwargs):
"""Calls _run() with sensible defaults."""
return _run(select_chk_manager("command_line"), MAIN_PID, INITIAL_SEQNO, NULL_CHK_ID, None, None,
entry_point, *args, **kwargs)
def _run(chk_manager: CheckpointManager, pid: Pid, start_seqno: Seqno, start_chk_id: CheckpointID, return_value,
rpc_addr: Optional[str], entry_point: Callable, *args, **kwargs) -> Request:
"""
Resumes execution from saved state. If no saved state is found, invokes the provided entry-point callable.
If not in CPS mode, skips loading checkpoint and directly invokes the provided entry point (to keep
un-transformed code functional).
:param return_value: return value of the previous coordinator call.
"""
# If not previous checkpoint is found, start fresh.
with log_duration(pid, start_seqno, "load_chk"):
continuations: Continuations = chk_manager.load(start_chk_id) or [lambda _: entry_point(*args, **kwargs)]
async_caller: Optional[AsyncCaller] = None
try:
async_caller = AsyncCaller(rpc_addr, chk_manager, pid)
except AsyncCallsNotSupported:
pass
cc_backlog = _CoordinatorCallBacklog()
for _seqno in itertools.count(start=start_seqno):
seqno = Seqno(_seqno) # Wrap in Seqno for stricter type checking.
i = 0
try:
with log_duration(pid, seqno, "compute"):
for cont in continuations:
return_value = cont(return_value)
i += 1
exit_process(return_value)
except Exit as cc: # Special-case the "exit" coordinator call.
finalized = cc.finalize(chk_manager, pid, seqno)
return Request(pid=pid, seqno=seqno, chk_id=NULL_CHK_ID, calls=[finalized])
except CoordinatorCall as cc: # Other coordinator calls.
if cc.is_async:
log_type = "async coordinator call"
else:
log_type = "coordinator call"
log_begin(pid, seqno, log_type, timestamp=cc.start_time)
with log_at_end(pid, seqno, log_type):
# The saved continuations include the ones generated during this execution, and the ones left unrun from
# the previous execution.
continuations = cc.continuations + continuations[i+1:]
if async_caller:
cc_backlog.prune(async_caller.get_next_seqno(terminate_worker=True))
cc_backlog.append(cc.finalize(chk_manager, pid, seqno))
if cc.is_async and async_caller and async_caller.call(cc_backlog, continuations, seqno):
continue
# If we're here, we're doing the call synchronously.
chk_id = chk_manager.save(continuations, pid, seqno)
req = Request(pid=pid, seqno=seqno, chk_id=chk_id, calls=cc_backlog)
log(pid, seqno, f"sending request with {len(cc_backlog)} coordinator calls")
if rpc_addr:
try:
return_value = rpc(rpc_addr, req, pid, seqno)
cc_backlog.clear()
continue
except WouldBlock:
log(pid, seqno, "rpc blocked")
return Request.make_blocked(pid, seqno)
except Exception as e:
log(pid, seqno, f"rpc: {e}; falling back to synchronous")
# RPC failed; fall back to quitting lambda with coordinator call.
req = req._replace(err=f"RPC: {e}, falling back to synchronous (is your coordinator machine "
"publicly accessible?)")
return req
finally:
pause_ctrl.record_pause()
assert False # Unreachable.
def select_chk_manager(platform: str) -> CheckpointManager:
"""Returns a checkpoint manager corresponding to the platform. Raises ValueError if platform is not recognized."""
# Import locally so that the irrelevant checkpoint manager classes don't need to be importable.
if platform == "local":
from .chk_manager import LocalCheckpointManager
return LocalCheckpointManager(Path(os.environ["CHECKPOINT_DIR"]))
elif platform == "aws":
from .chk_manager import S3CheckpointManager
return S3CheckpointManager(bucket_name=os.environ["CHECKPOINT_BUCKET"])
# TODO(zhangwen): signal that this error is fatal?
raise ValueError("No checkpoint manager for platform: {}".format(platform))
def lambda_handler(handler):
"""
Decorator to apply to a lambda handler.
This decorator extracts Kappa runtime parameters from the `event` structure and passes any user-defined
parameters to the handler.
"""
@functools.wraps(handler)
def decorated_handler(event: Dict[str, object], context) -> str:
# TODO(zhangwen): add a way to induce artificial failures.
# FIXME(zhangwen): maybe just don't pass context to user handler... e.g., we don't want the context pickled.
context = None
_pid, _seqno, _chk_id, = event["pid"], event["seqno"], event["chk_id"]
assert isinstance(_pid, int)
pid = Pid(_pid)
assert isinstance(_seqno, int)
seqno = Seqno(_seqno)
log(pid, seqno, f"lambda started!")
assert isinstance(_chk_id, str)
chk_id = CheckpointID(_chk_id)
platform = os.environ["PLATFORM"]
rpc_addr = None
rpc_ip = os.environ.get("RPC_IP")
if rpc_ip is not None:
if os.environ["WHERE"] == "coordinator":
rpc_ip = "127.0.0.1" # If task is running on the coordinator machine, issue RPCs to localhost.
rpc_port = os.environ["RPC_PORT"] # RPC_PORT should be present in the environment iff RPC_IP is.
rpc_addr = f"{rpc_ip}:{rpc_port}"
app_event = event["app_event"]
chk_manager = select_chk_manager(platform)
last_return_value = event["coord_call_result"]
def entry_point():
return spawn(handler, (app_event, context), blocking=True)
return str(_run(chk_manager, pid, seqno, chk_id, last_return_value, rpc_addr, entry_point))
return decorated_handler | compiler/rt/run.py | import functools
import itertools
import logging
import os
from pathlib import Path
from typing import Callable, Dict, List, Optional
from .async_call import AsyncCaller, AsyncCallsNotSupported
from .chk_manager import CheckpointManager, NULL_CHK_ID
from .consts import Continuations, Seqno, Pid, CheckpointID, MAIN_PID, INITIAL_SEQNO
from .coordinator_call import CoordinatorCall, exit_process, Exit, spawn
from .global_state import pause_ctrl
from .logging import log, log_begin, log_duration, log_at_end
from .protocol import Request, FinalizedCoordinatorCall
from .rpc import rpc, WouldBlock
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
class _CoordinatorCallBacklog(List[FinalizedCoordinatorCall]):
"""Represents coordinator calls yet to be made."""
def prune(self, next_seqno: Seqno) -> None:
"""Removes calls with seqno less than `next_seqno`."""
for i in reversed(range(len(self))):
if self[i].seqno < next_seqno:
del self[i]
def run(entry_point: Callable, *args, **kwargs):
"""Calls _run() with sensible defaults."""
return _run(select_chk_manager("command_line"), MAIN_PID, INITIAL_SEQNO, NULL_CHK_ID, None, None,
entry_point, *args, **kwargs)
def _run(chk_manager: CheckpointManager, pid: Pid, start_seqno: Seqno, start_chk_id: CheckpointID, return_value,
rpc_addr: Optional[str], entry_point: Callable, *args, **kwargs) -> Request:
"""
Resumes execution from saved state. If no saved state is found, invokes the provided entry-point callable.
If not in CPS mode, skips loading checkpoint and directly invokes the provided entry point (to keep
un-transformed code functional).
:param return_value: return value of the previous coordinator call.
"""
# If not previous checkpoint is found, start fresh.
with log_duration(pid, start_seqno, "load_chk"):
continuations: Continuations = chk_manager.load(start_chk_id) or [lambda _: entry_point(*args, **kwargs)]
async_caller: Optional[AsyncCaller] = None
try:
async_caller = AsyncCaller(rpc_addr, chk_manager, pid)
except AsyncCallsNotSupported:
pass
cc_backlog = _CoordinatorCallBacklog()
for _seqno in itertools.count(start=start_seqno):
seqno = Seqno(_seqno) # Wrap in Seqno for stricter type checking.
i = 0
try:
with log_duration(pid, seqno, "compute"):
for cont in continuations:
return_value = cont(return_value)
i += 1
exit_process(return_value)
except Exit as cc: # Special-case the "exit" coordinator call.
finalized = cc.finalize(chk_manager, pid, seqno)
return Request(pid=pid, seqno=seqno, chk_id=NULL_CHK_ID, calls=[finalized])
except CoordinatorCall as cc: # Other coordinator calls.
if cc.is_async:
log_type = "async coordinator call"
else:
log_type = "coordinator call"
log_begin(pid, seqno, log_type, timestamp=cc.start_time)
with log_at_end(pid, seqno, log_type):
# The saved continuations include the ones generated during this execution, and the ones left unrun from
# the previous execution.
continuations = cc.continuations + continuations[i+1:]
if async_caller:
cc_backlog.prune(async_caller.get_next_seqno(terminate_worker=True))
cc_backlog.append(cc.finalize(chk_manager, pid, seqno))
if cc.is_async and async_caller and async_caller.call(cc_backlog, continuations, seqno):
continue
# If we're here, we're doing the call synchronously.
chk_id = chk_manager.save(continuations, pid, seqno)
req = Request(pid=pid, seqno=seqno, chk_id=chk_id, calls=cc_backlog)
log(pid, seqno, f"sending request with {len(cc_backlog)} coordinator calls")
if rpc_addr:
try:
return_value = rpc(rpc_addr, req, pid, seqno)
cc_backlog.clear()
continue
except WouldBlock:
log(pid, seqno, "rpc blocked")
return Request.make_blocked(pid, seqno)
except Exception as e:
log(pid, seqno, f"rpc: {e}; falling back to synchronous")
# RPC failed; fall back to quitting lambda with coordinator call.
req = req._replace(err=f"RPC: {e}, falling back to synchronous (is your coordinator machine "
"publicly accessible?)")
return req
finally:
pause_ctrl.record_pause()
assert False # Unreachable.
def select_chk_manager(platform: str) -> CheckpointManager:
"""Returns a checkpoint manager corresponding to the platform. Raises ValueError if platform is not recognized."""
# Import locally so that the irrelevant checkpoint manager classes don't need to be importable.
if platform == "local":
from .chk_manager import LocalCheckpointManager
return LocalCheckpointManager(Path(os.environ["CHECKPOINT_DIR"]))
elif platform == "aws":
from .chk_manager import S3CheckpointManager
return S3CheckpointManager(bucket_name=os.environ["CHECKPOINT_BUCKET"])
# TODO(zhangwen): signal that this error is fatal?
raise ValueError("No checkpoint manager for platform: {}".format(platform))
def lambda_handler(handler):
"""
Decorator to apply to a lambda handler.
This decorator extracts Kappa runtime parameters from the `event` structure and passes any user-defined
parameters to the handler.
"""
@functools.wraps(handler)
def decorated_handler(event: Dict[str, object], context) -> str:
# TODO(zhangwen): add a way to induce artificial failures.
# FIXME(zhangwen): maybe just don't pass context to user handler... e.g., we don't want the context pickled.
context = None
_pid, _seqno, _chk_id, = event["pid"], event["seqno"], event["chk_id"]
assert isinstance(_pid, int)
pid = Pid(_pid)
assert isinstance(_seqno, int)
seqno = Seqno(_seqno)
log(pid, seqno, f"lambda started!")
assert isinstance(_chk_id, str)
chk_id = CheckpointID(_chk_id)
platform = os.environ["PLATFORM"]
rpc_addr = None
rpc_ip = os.environ.get("RPC_IP")
if rpc_ip is not None:
if os.environ["WHERE"] == "coordinator":
rpc_ip = "127.0.0.1" # If task is running on the coordinator machine, issue RPCs to localhost.
rpc_port = os.environ["RPC_PORT"] # RPC_PORT should be present in the environment iff RPC_IP is.
rpc_addr = f"{rpc_ip}:{rpc_port}"
app_event = event["app_event"]
chk_manager = select_chk_manager(platform)
last_return_value = event["coord_call_result"]
def entry_point():
return spawn(handler, (app_event, context), blocking=True)
return str(_run(chk_manager, pid, seqno, chk_id, last_return_value, rpc_addr, entry_point))
return decorated_handler | 0.703957 | 0.085251 |
import numpy as np
import logging
class RecurrentNeuralNetwork(object):
"""Recurrent Neural Network.
Attributes:
dimension: 3-tuple of input, hidden, and output nodes
act_func: Networks activation function
"""
def __init__(self, dimension, act_func=None):
self.dim = dimension
self.wi = np.zeros((dimension[1], dimension[0]))
self.wh = np.zeros((dimension[1], dimension[1]))
self.wo = np.zeros((dimension[2], sum(dimension[:2])))
self.wb = np.zeros((dimension[1], dimension[2]))
self.dh = np.zeros((dimension[1], 1))
self.do = np.zeros((dimension[2], 1))
self.act_func = None
if act_func:
self.act_func = np.vectorize(act_func)
self.log = logging.getLogger('rnn')
def __input(self, node):
return node >= 0 and node < self.dim[0]
def __hidden(self, node):
return node >= self.dim[0] and node < sum(self.dim[:2])
def __output(self, node):
return node >= sum(self.dim[:2]) and node < sum(self.dim)
def __inode(self, node):
return node
def __hnode(self, node):
return node - self.dim[0]
def __onode(self, node):
return node - sum(self.dim[:2])
def add_link(self, inode, onode, weight=1.0):
"""Adds link to network.
Inserts link from inode to onode with weight. The nodes are indexed
from 0 to n where n-1 is the sum of input, hidden, and output. For
example, a link from the first input to the second output of a network
in the form of (2, 2, 2) would look like add_link(0, 5).
Args:
inode: input node index
onode: output node index
weight: weight of the link
"""
if self.__input(inode) and self.__hidden(onode):
self.wi[self.__hnode(onode), self.__inode(inode)] = weight
elif self.__input(inode) and self.__output(onode):
self.wo[self.__onode(onode), self.__inode(inode)] = weight
elif self.__hidden(inode) and self.__hidden(onode):
self.wh[self.__hnode(inode), self.__hnode(onode)] = weight
elif self.__hidden(inode) and self.__output(onode):
self.wo[self.__onode(onode), self.dim[0]+self.__hnode(inode)] = weight
elif self.__output(inode) and self.__hidden(onode):
self.wb[self.__hnode(onode), self.__onode(inode)] = weight
else:
self.log.error('Cannot create link from %d to %d',
inode, onode)
return False
return True
def activate(self, data):
"""Activates the network.
Peforms one timestep of the network.
Args:
data: input data of len(dimension[0])
Returns:
A list of output values of len(dimension[2])
"""
di = np.array(data).reshape((len(data), 1))
dtemp = np.dot(self.wi, di)+np.dot(self.wh, self.dh)+np.dot(self.wb, self.do)
if self.act_func and dtemp.shape[0] > 0:
self.dh = self.act_func(dtemp)
else:
self.dh = dtemp
dconcat = np.concatenate((di, self.dh))
self.do = np.dot(self.wo, dconcat)
if self.act_func:
self.do = self.act_func(self.do)
return self.do.squeeze().tolist() | pyneat/ann/rnn.py | import numpy as np
import logging
class RecurrentNeuralNetwork(object):
"""Recurrent Neural Network.
Attributes:
dimension: 3-tuple of input, hidden, and output nodes
act_func: Networks activation function
"""
def __init__(self, dimension, act_func=None):
self.dim = dimension
self.wi = np.zeros((dimension[1], dimension[0]))
self.wh = np.zeros((dimension[1], dimension[1]))
self.wo = np.zeros((dimension[2], sum(dimension[:2])))
self.wb = np.zeros((dimension[1], dimension[2]))
self.dh = np.zeros((dimension[1], 1))
self.do = np.zeros((dimension[2], 1))
self.act_func = None
if act_func:
self.act_func = np.vectorize(act_func)
self.log = logging.getLogger('rnn')
def __input(self, node):
return node >= 0 and node < self.dim[0]
def __hidden(self, node):
return node >= self.dim[0] and node < sum(self.dim[:2])
def __output(self, node):
return node >= sum(self.dim[:2]) and node < sum(self.dim)
def __inode(self, node):
return node
def __hnode(self, node):
return node - self.dim[0]
def __onode(self, node):
return node - sum(self.dim[:2])
def add_link(self, inode, onode, weight=1.0):
"""Adds link to network.
Inserts link from inode to onode with weight. The nodes are indexed
from 0 to n where n-1 is the sum of input, hidden, and output. For
example, a link from the first input to the second output of a network
in the form of (2, 2, 2) would look like add_link(0, 5).
Args:
inode: input node index
onode: output node index
weight: weight of the link
"""
if self.__input(inode) and self.__hidden(onode):
self.wi[self.__hnode(onode), self.__inode(inode)] = weight
elif self.__input(inode) and self.__output(onode):
self.wo[self.__onode(onode), self.__inode(inode)] = weight
elif self.__hidden(inode) and self.__hidden(onode):
self.wh[self.__hnode(inode), self.__hnode(onode)] = weight
elif self.__hidden(inode) and self.__output(onode):
self.wo[self.__onode(onode), self.dim[0]+self.__hnode(inode)] = weight
elif self.__output(inode) and self.__hidden(onode):
self.wb[self.__hnode(onode), self.__onode(inode)] = weight
else:
self.log.error('Cannot create link from %d to %d',
inode, onode)
return False
return True
def activate(self, data):
"""Activates the network.
Peforms one timestep of the network.
Args:
data: input data of len(dimension[0])
Returns:
A list of output values of len(dimension[2])
"""
di = np.array(data).reshape((len(data), 1))
dtemp = np.dot(self.wi, di)+np.dot(self.wh, self.dh)+np.dot(self.wb, self.do)
if self.act_func and dtemp.shape[0] > 0:
self.dh = self.act_func(dtemp)
else:
self.dh = dtemp
dconcat = np.concatenate((di, self.dh))
self.do = np.dot(self.wo, dconcat)
if self.act_func:
self.do = self.act_func(self.do)
return self.do.squeeze().tolist() | 0.779616 | 0.600481 |
import glob
import os
import numpy as np
import hydra
import torch
from omegaconf import DictConfig, OmegaConf
from pose_est_nets.utils.plotting_utils import (
predict_videos,
)
from pose_est_nets.utils.io import (
get_absolute_hydra_path_from_hydra_str,
ckpt_path_from_base_path,
verify_absolute_path,
)
import argparse
from pathlib import Path
from itertools import product
<EMAIL>(config_path="configs", config_name="config")
def make_predictions(extraction_method, dataset, run, model, seed):
"""this script will work with a path to a trained model's hydra folder
from that folder it'll read the info about the model, get the checkpoint, and predict on a new vid"""
"""note, by decorating with hydra, the current working directory will be become the new folder os.path.join(os.getcwd(), "/outputs/YYYY-MM-DD/hour-info")"""
# TODO: supporting only the zeroth index of cfg.eval.path_to_test_videos[0]
# go to folders up to the "outputs" folder, and search for hydra_path from cfg
absolute_cfg_path = f'outputs/{extraction_method}_{dataset}_{run}_{model}_{seed}/'
model_cfg = OmegaConf.load(
os.path.join(absolute_cfg_path, ".hydra/config.yaml")
)
ckpt_file = ckpt_path_from_base_path(
base_path=absolute_cfg_path, model_name=model_cfg.model.model_name
)
datasets_path = '/home/eivinas/dev/dlc-frame-selection/datasets/'
test_video_dir = f'{datasets_path}/{dataset}/test_video'
#test_video_dir = f'{datasets_path}/{dataset}'
predictions_csv_dir = f'/home/eivinas/dev/dlc-frame-selection/predictions/csv_{dataset}'
save_dir = f'{predictions_csv_dir}/{extraction_method}_{dataset}_{run}_{model}_{seed}'
Path(save_dir).mkdir(parents=True, exist_ok=True)
predict_videos(
video_dir=test_video_dir,
ckpt_file=ckpt_file,
cfg_file=model_cfg,
save_dir=save_dir,
sequence_length=64,
)
if __name__ == "__main__":
#extraction_methods = ['uniform', 'kmeans', 'umap']
extraction_methods = ['uniform']
dataset = 'mouse_wheel'
n_runs = 1
n_seeds = 1
runs = range(1, n_runs+1)
seeds = range(1, n_seeds+1)
models = [18] # 152
runs = [1]
seeds = [1]
rmses = {}
combs = product(extraction_methods, runs, models, seeds)
for comb in combs:
(e, r, m, s) = comb
make_predictions(e, dataset, r, m, s) | scripts/predict_new_vids.py | import glob
import os
import numpy as np
import hydra
import torch
from omegaconf import DictConfig, OmegaConf
from pose_est_nets.utils.plotting_utils import (
predict_videos,
)
from pose_est_nets.utils.io import (
get_absolute_hydra_path_from_hydra_str,
ckpt_path_from_base_path,
verify_absolute_path,
)
import argparse
from pathlib import Path
from itertools import product
<EMAIL>(config_path="configs", config_name="config")
def make_predictions(extraction_method, dataset, run, model, seed):
"""this script will work with a path to a trained model's hydra folder
from that folder it'll read the info about the model, get the checkpoint, and predict on a new vid"""
"""note, by decorating with hydra, the current working directory will be become the new folder os.path.join(os.getcwd(), "/outputs/YYYY-MM-DD/hour-info")"""
# TODO: supporting only the zeroth index of cfg.eval.path_to_test_videos[0]
# go to folders up to the "outputs" folder, and search for hydra_path from cfg
absolute_cfg_path = f'outputs/{extraction_method}_{dataset}_{run}_{model}_{seed}/'
model_cfg = OmegaConf.load(
os.path.join(absolute_cfg_path, ".hydra/config.yaml")
)
ckpt_file = ckpt_path_from_base_path(
base_path=absolute_cfg_path, model_name=model_cfg.model.model_name
)
datasets_path = '/home/eivinas/dev/dlc-frame-selection/datasets/'
test_video_dir = f'{datasets_path}/{dataset}/test_video'
#test_video_dir = f'{datasets_path}/{dataset}'
predictions_csv_dir = f'/home/eivinas/dev/dlc-frame-selection/predictions/csv_{dataset}'
save_dir = f'{predictions_csv_dir}/{extraction_method}_{dataset}_{run}_{model}_{seed}'
Path(save_dir).mkdir(parents=True, exist_ok=True)
predict_videos(
video_dir=test_video_dir,
ckpt_file=ckpt_file,
cfg_file=model_cfg,
save_dir=save_dir,
sequence_length=64,
)
if __name__ == "__main__":
#extraction_methods = ['uniform', 'kmeans', 'umap']
extraction_methods = ['uniform']
dataset = 'mouse_wheel'
n_runs = 1
n_seeds = 1
runs = range(1, n_runs+1)
seeds = range(1, n_seeds+1)
models = [18] # 152
runs = [1]
seeds = [1]
rmses = {}
combs = product(extraction_methods, runs, models, seeds)
for comb in combs:
(e, r, m, s) = comb
make_predictions(e, dataset, r, m, s) | 0.304042 | 0.221477 |
import json
import nethysdb
# I don't know if this is useful, but for now I am leaving it here just in case
class Ancestry(nethysdb.NethysDB):
def __init__(self, link, SourceBook, Page, name, description, YouMight, OthersProbably, PhysicalDescription, Society, AlignmentAndReligion, Adventurers, Names, Hitpoints, Size, Speed, AbilityBoost1, AbilityBoost2, AbilityFlaw):
super().__init__(link, SourceBook, Page)
self.name = name
self.description = description
self.YouMight = YouMight
self.OthersProbably = OthersProbably
self.PhysicalDescription = PhysicalDescription
self.Society = Society
self.AlignmentAndReligion = AlignmentAndReligion
self.Adventurers = Adventurers
self.Names = Names
self.Hitpoints = Hitpoints
self.Size = Size
self.Speed = Speed
self.AbilityBoost1 = AbilityBoost1
self.AbilityBoost2 = AbilityBoost2
self.AbilityFlaw = AbilityFlaw
with open('data/ancestries.json') as f:
data = json.load(f)
list_of_ancestries = []
# grab data from JSON, create classes and append ancestries on the list
for ancestry in data['ancestries']:
name = ancestry['race']
link = ancestry['NethysUrl']
SourceBook = ancestry['Source']
Page = ancestry['Page']
description = ancestry['Description']
YouMight = ancestry['YouMight']
OthersProbably = ancestry['OthersProbably']
PhysicalDescription = ancestry['Physical Description']
Society = ancestry['Society']
AlignmentAndReligion = ancestry['Alignment and Religion']
Adventurers = ancestry['Adventurers']
Names = ancestry['Names']
Hitpoints = ancestry['Hit Points']
Size = ancestry['Size']
Speed = ancestry['Speed']
AbilityBoost1 = ancestry['Ability Boosts'][0]
AbilityBoost2 = ancestry['Ability Boosts'][1]
AbilityFlaw = ancestry['Ability Flaw'][0]
name = Ancestry(link, SourceBook, Page, name, description, YouMight, OthersProbably, PhysicalDescription, Society, AlignmentAndReligion, Adventurers, Names, Hitpoints, Size, Speed, AbilityBoost1, AbilityBoost2, AbilityFlaw)
list_of_ancestries.append(ancestry['race'])
def get_boosts(ancestry):
if ancestry in list_of_ancestries:
index = list_of_ancestries.index(ancestry)
boosts = []
boosts.append(data['ancestries'][index]['Ability Boosts'][0])
boosts.append(data['ancestries'][index]['Ability Boosts'][1])
return boosts
else:
raise Exception("Ancestry not found in the list_of_ancestries")
def get_flaw(ancestry, n):
if ancestry in list_of_ancestries:
index = list_of_ancestries.index(ancestry)
return data['ancestries'][index]['Ability Flaw'][n]
else:
raise Exception("Ancestry not found in the list_of_ancestries")
def main():
print(list_of_ancestries)
if __name__ == '__main__':
main() | ancestries.py | import json
import nethysdb
# I don't know if this is useful, but for now I am leaving it here just in case
class Ancestry(nethysdb.NethysDB):
def __init__(self, link, SourceBook, Page, name, description, YouMight, OthersProbably, PhysicalDescription, Society, AlignmentAndReligion, Adventurers, Names, Hitpoints, Size, Speed, AbilityBoost1, AbilityBoost2, AbilityFlaw):
super().__init__(link, SourceBook, Page)
self.name = name
self.description = description
self.YouMight = YouMight
self.OthersProbably = OthersProbably
self.PhysicalDescription = PhysicalDescription
self.Society = Society
self.AlignmentAndReligion = AlignmentAndReligion
self.Adventurers = Adventurers
self.Names = Names
self.Hitpoints = Hitpoints
self.Size = Size
self.Speed = Speed
self.AbilityBoost1 = AbilityBoost1
self.AbilityBoost2 = AbilityBoost2
self.AbilityFlaw = AbilityFlaw
with open('data/ancestries.json') as f:
data = json.load(f)
list_of_ancestries = []
# grab data from JSON, create classes and append ancestries on the list
for ancestry in data['ancestries']:
name = ancestry['race']
link = ancestry['NethysUrl']
SourceBook = ancestry['Source']
Page = ancestry['Page']
description = ancestry['Description']
YouMight = ancestry['YouMight']
OthersProbably = ancestry['OthersProbably']
PhysicalDescription = ancestry['Physical Description']
Society = ancestry['Society']
AlignmentAndReligion = ancestry['Alignment and Religion']
Adventurers = ancestry['Adventurers']
Names = ancestry['Names']
Hitpoints = ancestry['Hit Points']
Size = ancestry['Size']
Speed = ancestry['Speed']
AbilityBoost1 = ancestry['Ability Boosts'][0]
AbilityBoost2 = ancestry['Ability Boosts'][1]
AbilityFlaw = ancestry['Ability Flaw'][0]
name = Ancestry(link, SourceBook, Page, name, description, YouMight, OthersProbably, PhysicalDescription, Society, AlignmentAndReligion, Adventurers, Names, Hitpoints, Size, Speed, AbilityBoost1, AbilityBoost2, AbilityFlaw)
list_of_ancestries.append(ancestry['race'])
def get_boosts(ancestry):
if ancestry in list_of_ancestries:
index = list_of_ancestries.index(ancestry)
boosts = []
boosts.append(data['ancestries'][index]['Ability Boosts'][0])
boosts.append(data['ancestries'][index]['Ability Boosts'][1])
return boosts
else:
raise Exception("Ancestry not found in the list_of_ancestries")
def get_flaw(ancestry, n):
if ancestry in list_of_ancestries:
index = list_of_ancestries.index(ancestry)
return data['ancestries'][index]['Ability Flaw'][n]
else:
raise Exception("Ancestry not found in the list_of_ancestries")
def main():
print(list_of_ancestries)
if __name__ == '__main__':
main() | 0.306942 | 0.118564 |
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.epidemiology import infection_dist
from tests.common import assert_close
def assert_dist_close(d1, d2):
x = torch.arange(float(200))
p1 = d1.log_prob(x).exp()
p2 = d2.log_prob(x).exp()
assert (p1.sum() - 1).abs() < 1e-3, "incomplete mass"
assert (p2.sum() - 1).abs() < 1e-3, "incomplete mass"
mean1 = (p1 * x).sum()
mean2 = (p2 * x).sum()
assert_close(mean1, mean2, rtol=0.05)
max_prob = torch.max(p1.max(), p2.max())
assert (p1 - p2).abs().max() / max_prob < 0.05
@pytest.mark.parametrize("R0,I", [
(1., 1),
(1., 10),
(10., 1),
(5., 5),
])
def test_binomial_vs_poisson(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=1000., population=1000.)
assert isinstance(d1, dist.Poisson)
assert isinstance(d2, dist.Binomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I,k", [
(1., 1., 0.5),
(1., 1., 1.),
(1., 1., 2.),
(1., 10., 0.5),
(1., 10., 1.),
(1., 10., 2.),
(10., 1., 0.5),
(10., 1., 1.),
(10., 1., 2.),
(5., 5, 0.5),
(5., 5, 1.),
(5., 5, 2.),
])
def test_beta_binomial_vs_negative_binomial(R0, I, k):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I, concentration=k)
d2 = infection_dist(individual_rate=R0, num_infectious=I, concentration=k,
num_susceptible=1000., population=1000.)
assert isinstance(d1, dist.NegativeBinomial)
assert isinstance(d2, dist.BetaBinomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I", [
(1., 1.),
(1., 10.),
(10., 1.),
(5., 5.),
])
def test_beta_binomial_vs_binomial(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=20., population=30.)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=20., population=30.,
concentration=200.)
assert isinstance(d1, dist.Binomial)
assert isinstance(d2, dist.BetaBinomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I", [
(1., 1.),
(1., 10.),
(10., 1.),
(5., 5.),
])
def test_negative_binomial_vs_poisson(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
concentration=200.)
assert isinstance(d1, dist.Poisson)
assert isinstance(d2, dist.NegativeBinomial)
assert_dist_close(d1, d2) | tests/contrib/epidemiology/test_distributions.py |
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.epidemiology import infection_dist
from tests.common import assert_close
def assert_dist_close(d1, d2):
x = torch.arange(float(200))
p1 = d1.log_prob(x).exp()
p2 = d2.log_prob(x).exp()
assert (p1.sum() - 1).abs() < 1e-3, "incomplete mass"
assert (p2.sum() - 1).abs() < 1e-3, "incomplete mass"
mean1 = (p1 * x).sum()
mean2 = (p2 * x).sum()
assert_close(mean1, mean2, rtol=0.05)
max_prob = torch.max(p1.max(), p2.max())
assert (p1 - p2).abs().max() / max_prob < 0.05
@pytest.mark.parametrize("R0,I", [
(1., 1),
(1., 10),
(10., 1),
(5., 5),
])
def test_binomial_vs_poisson(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=1000., population=1000.)
assert isinstance(d1, dist.Poisson)
assert isinstance(d2, dist.Binomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I,k", [
(1., 1., 0.5),
(1., 1., 1.),
(1., 1., 2.),
(1., 10., 0.5),
(1., 10., 1.),
(1., 10., 2.),
(10., 1., 0.5),
(10., 1., 1.),
(10., 1., 2.),
(5., 5, 0.5),
(5., 5, 1.),
(5., 5, 2.),
])
def test_beta_binomial_vs_negative_binomial(R0, I, k):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I, concentration=k)
d2 = infection_dist(individual_rate=R0, num_infectious=I, concentration=k,
num_susceptible=1000., population=1000.)
assert isinstance(d1, dist.NegativeBinomial)
assert isinstance(d2, dist.BetaBinomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I", [
(1., 1.),
(1., 10.),
(10., 1.),
(5., 5.),
])
def test_beta_binomial_vs_binomial(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=20., population=30.)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
num_susceptible=20., population=30.,
concentration=200.)
assert isinstance(d1, dist.Binomial)
assert isinstance(d2, dist.BetaBinomial)
assert_dist_close(d1, d2)
@pytest.mark.parametrize("R0,I", [
(1., 1.),
(1., 10.),
(10., 1.),
(5., 5.),
])
def test_negative_binomial_vs_poisson(R0, I):
R0 = torch.tensor(R0)
I = torch.tensor(I)
d1 = infection_dist(individual_rate=R0, num_infectious=I)
d2 = infection_dist(individual_rate=R0, num_infectious=I,
concentration=200.)
assert isinstance(d1, dist.Poisson)
assert isinstance(d2, dist.NegativeBinomial)
assert_dist_close(d1, d2) | 0.800575 | 0.777088 |
import typing
import jax._src.util as util
import numpy as np
from jax import lax, numpy as jnp, random
from .constants import ParallelAxes
from .context import Context
INT_OR_TUPLE = typing.Union[int, typing.Sequence[int]]
def pos_dim(inp: jnp.ndarray, dims: typing.Sequence[int]) -> typing.Sequence[int]:
return tuple([d % inp.ndim for d in dims])
def tuple_int(obj: INT_OR_TUPLE) -> typing.Sequence[int]:
if isinstance(obj, (tuple, list)):
return tuple(obj)
if isinstance(obj, int):
return obj,
raise ValueError
def sum_pool(inputs: jnp.ndarray, window_shape: typing.List[int],
padding: typing.List[typing.Tuple[int, int]]) -> jnp.ndarray:
strides = (1,) * (len(window_shape) + 2)
dims = (1,) + tuple(window_shape) + (1,)
padding = ((0, 0),) + tuple(padding) + ((0, 0),)
return lax.reduce_window(inputs, 0, lax.add, dims, strides, padding)
def conv(inp: jnp.ndarray, weight: jnp.ndarray, padding: typing.List[typing.Tuple[int, int]], groups: int):
ndim = weight.ndim
dimension_numbers = (0, ndim - 1) + tuple(range(1, ndim - 1))
dimension_numbers = lax.ConvDimensionNumbers(dimension_numbers, tuple(range(ndim)), dimension_numbers)
return lax.conv_general_dilated(inp, weight, (1,) * (ndim - 2), padding=padding, feature_group_count=groups,
dimension_numbers=dimension_numbers, precision='fastest')
def device_id(ctx: Context):
return (lax.psum_scatter(jnp.arange(ctx.dims.heads), ParallelAxes.model) / ctx.dims.heads).astype(jnp.int32)
def dot(left: jnp.ndarray, right: jnp.ndarray, left_contract_dims: INT_OR_TUPLE, right_contract_dims: INT_OR_TUPLE,
left_batch_dims: INT_OR_TUPLE = tuple(), right_batch_dims: INT_OR_TUPLE = tuple()) -> jnp.ndarray:
dims = ((pos_dim(left, tuple_int(left_contract_dims)), pos_dim(right, tuple_int(right_contract_dims))),
(pos_dim(left, tuple_int(left_batch_dims)), pos_dim(right, tuple_int(right_batch_dims))))
return lax.dot_general(left, right, dims, "fastest")
def matmul(left: jnp.ndarray, right: jnp.ndarray, reduced_dims=1):
return dot(left, right, tuple(range(-reduced_dims, 0)), tuple(range(reduced_dims)))
def prefixed_name(ctx: Context, name: str):
return ctx.add_to_prefix(name, count=False).global_prefix
def assign(ctx: Context, name: str, inp: jnp.ndarray):
name = prefixed_name(ctx, name)
ctx.parameters[name] = inp
def normal(ctx: Context, shape: typing.Sequence[int]):
ctx.prng_key, key = random.split(ctx.prng_key)
return random.normal(key, shape, ctx.model.storage_dtype)
def orthogonal_init(ctx: Context, shape: typing.List[int], column_axes=(-1,)) -> jnp.ndarray:
axes = tuple([shape[c] for c in column_axes])
n_rows, n_cols = util.prod(shape) // util.prod(axes), util.prod(axes)
matrix_shape = (n_rows, n_cols) if n_rows > n_cols else (n_cols, n_rows)
out, r = jnp.linalg.qr(normal(ctx, matrix_shape))
out *= lax.broadcast_to_rank(jnp.sign(jnp.diag(r)), rank=out.ndim)
if n_rows < n_cols:
out = out.T
return jnp.reshape(out, tuple(np.delete(shape, column_axes)) + axes).astype(ctx.model.storage_dtype)
def get_param(ctx: Context, name: str, shape: typing.Optional[typing.List[int]] = None,
std: typing.Optional[float] = None, mean: typing.Optional[float] = None, column_axes: int = 1,
scale: float = 1., post_variance_scale: float = 1,
lr_scale: float = 1, dtype: typing.Optional[jnp.float32] = None,
init_val: typing.Optional[jnp.ndarray] = None) -> jnp.ndarray:
prefix_name = prefixed_name(ctx, name)
if dtype is None:
computation_dtype = ctx.model.computation_dtype
storage_dtype = ctx.model.storage_dtype
else:
computation_dtype = dtype
storage_dtype = dtype
if prefix_name not in ctx.parameters:
if init_val is not None:
param = init_val * scale * post_variance_scale
elif std is None and mean is None:
param = orthogonal_init(ctx, shape, range(len(shape) - column_axes, len(shape)))
param *= scale * post_variance_scale
else:
param = normal(ctx, shape) * scale
if std is not None:
param *= std
if mean is not None:
param += mean
ctx.parameter_variance[prefix_name] = lr_scale * scale
param = param.astype(storage_dtype)
assign(ctx, name, param)
param = ctx.parameters[prefix_name]
return param.astype(computation_dtype)
def zero_param(ctx: Context, name: str, shape: typing.List[int], dtype: typing.Optional[jnp.dtype]) -> jnp.ndarray:
return get_param(ctx, name, shape, 0, 0, dtype=dtype)
def loop(fn: typing.Callable, fn_input: typing.Any, steps: int, unroll: int = 1):
return lax.scan(lambda *x: (fn(*x[:-1]), None), fn_input, None, steps, unroll=unroll)[0] | src/backend.py | import typing
import jax._src.util as util
import numpy as np
from jax import lax, numpy as jnp, random
from .constants import ParallelAxes
from .context import Context
INT_OR_TUPLE = typing.Union[int, typing.Sequence[int]]
def pos_dim(inp: jnp.ndarray, dims: typing.Sequence[int]) -> typing.Sequence[int]:
return tuple([d % inp.ndim for d in dims])
def tuple_int(obj: INT_OR_TUPLE) -> typing.Sequence[int]:
if isinstance(obj, (tuple, list)):
return tuple(obj)
if isinstance(obj, int):
return obj,
raise ValueError
def sum_pool(inputs: jnp.ndarray, window_shape: typing.List[int],
padding: typing.List[typing.Tuple[int, int]]) -> jnp.ndarray:
strides = (1,) * (len(window_shape) + 2)
dims = (1,) + tuple(window_shape) + (1,)
padding = ((0, 0),) + tuple(padding) + ((0, 0),)
return lax.reduce_window(inputs, 0, lax.add, dims, strides, padding)
def conv(inp: jnp.ndarray, weight: jnp.ndarray, padding: typing.List[typing.Tuple[int, int]], groups: int):
ndim = weight.ndim
dimension_numbers = (0, ndim - 1) + tuple(range(1, ndim - 1))
dimension_numbers = lax.ConvDimensionNumbers(dimension_numbers, tuple(range(ndim)), dimension_numbers)
return lax.conv_general_dilated(inp, weight, (1,) * (ndim - 2), padding=padding, feature_group_count=groups,
dimension_numbers=dimension_numbers, precision='fastest')
def device_id(ctx: Context):
return (lax.psum_scatter(jnp.arange(ctx.dims.heads), ParallelAxes.model) / ctx.dims.heads).astype(jnp.int32)
def dot(left: jnp.ndarray, right: jnp.ndarray, left_contract_dims: INT_OR_TUPLE, right_contract_dims: INT_OR_TUPLE,
left_batch_dims: INT_OR_TUPLE = tuple(), right_batch_dims: INT_OR_TUPLE = tuple()) -> jnp.ndarray:
dims = ((pos_dim(left, tuple_int(left_contract_dims)), pos_dim(right, tuple_int(right_contract_dims))),
(pos_dim(left, tuple_int(left_batch_dims)), pos_dim(right, tuple_int(right_batch_dims))))
return lax.dot_general(left, right, dims, "fastest")
def matmul(left: jnp.ndarray, right: jnp.ndarray, reduced_dims=1):
return dot(left, right, tuple(range(-reduced_dims, 0)), tuple(range(reduced_dims)))
def prefixed_name(ctx: Context, name: str):
return ctx.add_to_prefix(name, count=False).global_prefix
def assign(ctx: Context, name: str, inp: jnp.ndarray):
name = prefixed_name(ctx, name)
ctx.parameters[name] = inp
def normal(ctx: Context, shape: typing.Sequence[int]):
ctx.prng_key, key = random.split(ctx.prng_key)
return random.normal(key, shape, ctx.model.storage_dtype)
def orthogonal_init(ctx: Context, shape: typing.List[int], column_axes=(-1,)) -> jnp.ndarray:
axes = tuple([shape[c] for c in column_axes])
n_rows, n_cols = util.prod(shape) // util.prod(axes), util.prod(axes)
matrix_shape = (n_rows, n_cols) if n_rows > n_cols else (n_cols, n_rows)
out, r = jnp.linalg.qr(normal(ctx, matrix_shape))
out *= lax.broadcast_to_rank(jnp.sign(jnp.diag(r)), rank=out.ndim)
if n_rows < n_cols:
out = out.T
return jnp.reshape(out, tuple(np.delete(shape, column_axes)) + axes).astype(ctx.model.storage_dtype)
def get_param(ctx: Context, name: str, shape: typing.Optional[typing.List[int]] = None,
std: typing.Optional[float] = None, mean: typing.Optional[float] = None, column_axes: int = 1,
scale: float = 1., post_variance_scale: float = 1,
lr_scale: float = 1, dtype: typing.Optional[jnp.float32] = None,
init_val: typing.Optional[jnp.ndarray] = None) -> jnp.ndarray:
prefix_name = prefixed_name(ctx, name)
if dtype is None:
computation_dtype = ctx.model.computation_dtype
storage_dtype = ctx.model.storage_dtype
else:
computation_dtype = dtype
storage_dtype = dtype
if prefix_name not in ctx.parameters:
if init_val is not None:
param = init_val * scale * post_variance_scale
elif std is None and mean is None:
param = orthogonal_init(ctx, shape, range(len(shape) - column_axes, len(shape)))
param *= scale * post_variance_scale
else:
param = normal(ctx, shape) * scale
if std is not None:
param *= std
if mean is not None:
param += mean
ctx.parameter_variance[prefix_name] = lr_scale * scale
param = param.astype(storage_dtype)
assign(ctx, name, param)
param = ctx.parameters[prefix_name]
return param.astype(computation_dtype)
def zero_param(ctx: Context, name: str, shape: typing.List[int], dtype: typing.Optional[jnp.dtype]) -> jnp.ndarray:
return get_param(ctx, name, shape, 0, 0, dtype=dtype)
def loop(fn: typing.Callable, fn_input: typing.Any, steps: int, unroll: int = 1):
return lax.scan(lambda *x: (fn(*x[:-1]), None), fn_input, None, steps, unroll=unroll)[0] | 0.777849 | 0.486149 |
from collections import OrderedDict
import numpy as np
# print_order controls display of frame in complex_frame_dict_to_string
print_order = ['upper_left', 'top_center', 'upper_right',
'left_center', 'center_point', 'right_center',
'bottom_left', 'bottom_center', 'bottom_right']
ODD_FRAME = {'center_point': 0.0+0.0j, 'zoom': 1, 'theta': 0.0, 'n_rows': 11, 'n_cols': 11}
EVN_FRAME = {'center_point': 0.0+0.0j, 'zoom': 1, 'theta': 0.0, 'n_rows': 12, 'n_cols': 12}
def get_frame_from_dict(def_dict=ODD_FRAME):
""" complex_frame, def_dict = get_frame_from_dict(def_dict)
legacy wrapper function.
Args:
def_dict: definition dictionary with keys:
'center_point', 'zoom', 'theta', 'n_rows', 'n_cols'
Returns:
complex_frame:
def_dict:
"""
complex_frame = get_complex_frame(
def_dict['center_point'],
def_dict['zoom'],
def_dict['theta'],
def_dict['n_rows'],
def_dict['n_cols'])
return complex_frame, def_dict
def get_complex_frame(CP, ZM, theta, h=1, w=1):
""" get the complex numbers at ends and centers of a frame defined by the input parameters
Args:
CP Center Point of frame
ZM ZooM factor - scaling -
ZM > 1 ZooM in (frame point numbers shrink)
0 > ZM < 1 Zoom out (Zero is not allowed, negatives are negated)
theta radians of rotation of the whole frame
h height of frame (in pixels or height aspect)
w width of frame (in pixels or width aspect
Returns:
frame_dict: dictionary of frame points:
center_point CP
top_center middle of top edge of frame
right_center midpoint of right edge
bottom_center, left_center, upper_right, bottom_right, upper_left, bottom_left
"""
# Create the frame around the origin
frame_dict = {'center_point':CP}
# Rotate the origin by theta
if w >= h:
frame_dict['top_center'] = np.exp(1j*(np.pi/2 + theta))/ZM
frame_dict['right_center'] = (w/h) * np.exp(1j * theta) / ZM
else:
frame_dict['top_center'] = (h/w) * np.exp(1j*(np.pi/2 + theta)) / ZM
frame_dict['right_center'] = np.exp(1j * theta) / ZM
# Calculate the remaining points from the origin
frame_dict['bottom_center'] = frame_dict['top_center'] * -1
frame_dict['left_center'] = frame_dict['right_center'] * -1
frame_dict['upper_right'] = frame_dict['right_center'] + frame_dict['top_center']
frame_dict['bottom_right'] = frame_dict['right_center'] + frame_dict['bottom_center']
frame_dict['upper_left'] = frame_dict['left_center'] + frame_dict['top_center']
frame_dict['bottom_left'] = frame_dict['left_center'] + frame_dict['bottom_center']
# Shift the whole frame by the Center Point vector
for k in frame_dict.keys():
# effecicency weakness allowed for possible future version inclusion of unitized pixels
frame_dict[k] = frame_dict[k] + CP
return frame_dict
def complex_to_string(z, N_DEC=6):
""" format single complex number to string with n decimal places
Args:
z a complex number
N_DEC number of decimal places
"""
MAX_DEC = 17
MIN_DEC = 1
# error-guard the number of decimal places
n = max(min(MAX_DEC, round(N_DEC)), MIN_DEC)
# construct the format string to the number of decimal places
fs = '%%0.%df'%n
# separate real and imaginary as floats
zr = np.real(z)
zi = np.imag(z)
# balance alignment by including the plus sign before the imaginary part
if np.sign(zi) < 0:
s1 = ' ' + fs % zi + 'j'
else:
s1 = ' +' + fs % zi + 'j'
# balance the spacing of the real part by adding a space if positive (including zero)
if np.sign(zr) < 0:
z_str = fs % zr + s1
else:
z_str = ' ' + fs % zr + s1
return z_str
def get_aligned_dict_string(d, N_DEC=3):
""" print-format a dictionary of (possibly complex) numbers
pretty_string = z_plane.get_aligned_dict_string(d, N_DEC=3)
"""
INDENT = 16
out_string = ''
for k in sorted(list(d.keys())):
v = d[k]
if type(v) == str:
s = v
elif v == 0:
s = '0'
elif np.iscomplex(v):
s = complex_to_string(v, N_DEC)
elif np.round(v) == v:
s = '%d'%(v)
else:
f_str = '%s%s%d%s'%('%','0.',N_DEC,'f')
s = f_str%(v)
if len(out_string) == 0:
out_string = ' ' * max(0,(INDENT - len(k))) + k + ': ' + s
else:
out_string = out_string + '\n' + ' ' * max(0,(INDENT - len(k))) + k + ': ' + s
return out_string + '\n'
def complex_frame_dict_to_string(frame_dict, N_DEC=4):
""" get a formatted list of strings
"""
STR_L = 14
frame_string = ''
row = 0
for k in print_order:
z_str = complex_to_string(frame_dict[k], N_DEC)
PAD = ' ' * (STR_L - len(z_str))
frame_string += k + ':' + PAD + z_str
row += 1
if np.mod(row,3) == 0:
frame_string += '\n'
else:
frame_string += '\t'
return frame_string
def show_complex_matrix(Z0,N_DEC=3):
""" command line display a complex matrix or array
"""
SPC = ' ' * 2
if Z0.shape[0] == Z0.size:
row_str = ''
for col in range(0, Z0.shape[0]):
row_str += complex_to_string(Z0[col], N_DEC) + SPC + '\n'
print(row_str)
else:
for row in range(0,Z0.shape[0]):
row_str = ''
for col in range(0, Z0.shape[1]):
row_str += complex_to_string(Z0[row, col], N_DEC) + SPC
print(row_str)
def rnd_lambda(s=1):
""" random parameters s.t. a*d - b*c = 1
special case random parameter generator
"""
b = np.random.random()
c = np.random.random()
ad = b*c + 1
a = np.random.random()
d = ad / a
lamb0 = {'a': a, 'b': b, 'c': c, 'd': d}
lamb0 = np.array([a, b, c, d]) * s
return lamb0
class ComplexPlane:
""" parameterized grid of complex numbers
Args:
CP: self._center_point -- complex vector from origin to center of grid
ZM: self._zoom_factor -- Magnify (Zoom IN as ZM increases)
theta: self._theta -- Counter Clockwise rotation of the plane
h: self._n_rows -- number of rows in the grid
w: self._n_cols -- number of columns in the grid
methods:
display_self: command line printout of the self definition parameters (Args)
get_complex_axes: grid center arrays of complex vectors
get_complex_col: column array of complex vectors
get_complex_row: row array of complex vectors
get_complex_pixels: matrix of complex numbers == the grid
get_escape_bound: get an escpe distance based on the grids corner to corner vector length
get_parameters_dict: get the self definition parameters (Args) as a python dict
get_rails: top and bottom arrays of complex vectors
get_styles: left and right arrays of complex vectors
load_dict: re-initialize the object with new set of definition parameters (Args)
"""
def __init__(self, CP=0.0+0.0*1j, ZM=1.0, theta=0.0, h=5, w=5):
self._center_point = CP
self._zoom_factor = max(ZM, 1e-15)
self._theta = theta
self._n_rows = max(round(h), 1)
self._n_cols = max(round(w), 1)
def display_self(self):
pd = self.get_parameters_dict()
s = get_aligned_dict_string(pd)
print(s)
def load_dict(self, parameters_dict):
""" self.load_dict(parameters_dict) """
if 'center_point' in parameters_dict:
self._center_point = parameters_dict['center_point']
if 'zoom_factor' in parameters_dict:
self._zoom_factor = parameters_dict['zoom_factor']
if 'theta' in parameters_dict:
self._theta = parameters_dict['theta']
if 'n_rows' in parameters_dict:
self._n_rows = parameters_dict['n_rows']
if 'n_cols' in parameters_dict:
self._n_cols = parameters_dict['n_cols']
def get_parameters_dict(self):
""" parameters_dict = self.get_parameters_dict() """
parameters_dict = {}
parameters_dict['center_point'] = self._center_point
parameters_dict['zoom_factor'] = self._zoom_factor
parameters_dict['theta'] = self._theta
parameters_dict['n_rows'] = self._n_rows
parameters_dict['n_cols'] = self._n_cols
return parameters_dict
def get_escape_bound(self, boundry_scale=12):
""" escape time algorithm best infinity safe iteration distance """
corner_scale = max(self._n_rows/self._n_cols, self._n_cols/self._n_rows)
return corner_scale * boundry_scale / self._zoom_factor
def get_complex_axes(self):
""" horiz_axis, vert_axis = self.get_complex_axes() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
vert_axis = np.linspace(frame_dict['top_center'],
frame_dict['bottom_center'], self._n_cols) + 0.0j
horiz_axis = np.linspace(frame_dict['left_center'],
frame_dict['right_center'], self._n_cols) + 0.0j
return horiz_axis, vert_axis
def get_rails(self):
""" top_rail, bottom_rail = self.get_styles() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
top_rail = np.linspace(frame_dict['upper_left'],
frame_dict['upper_right'], self._n_cols) + 0.0j
bottom_rail = np.linspace(frame_dict['bottom_left'],
frame_dict['bottom_right'], self._n_cols) + 0.0j
return top_rail, bottom_rail
def get_styles(self):
""" left_style, right_style = self.get_styles() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
left_style = np.linspace(frame_dict['upper_left'],
frame_dict['bottom_left'], self._n_rows) + 0.0j
right_style = np.linspace(frame_dict['upper_right'],
frame_dict['bottom_right'], self._n_rows) + 0.0j
return left_style, right_style
def get_complex_row(self, row_number):
""" row_vectors = self.get_complex_row() """
left_style, right_style = self.get_styles()
return np.linspace(left_style[row_number], right_style[row_number], self._n_cols) + 0.0j
def get_complex_col(self, col_number):
""" col_vectors = self.get_complex_col() """
top_rail, bottom_rail = self.get_styles()
return np.linspace(top_rail[col_number], bottom_rail[col_number], self._n_rows) + 0.0j
def get_complex_pixels(self):
""" complex_pixels = self.get_complex_pixels() """
left_style, right_style = self.get_styles()
complex_pixels = np.zeros((self._n_rows,
self._n_cols)) + np.zeros((self._n_rows, self._n_cols)) * 1j
for k in range(0, self._n_rows):
complex_pixels[k, :] = np.linspace(left_style[k], right_style[k], self._n_cols)
return complex_pixels
""" class ComplexPlane: bottom line - wuf wuf """ | src/z_plane.py | from collections import OrderedDict
import numpy as np
# print_order controls display of frame in complex_frame_dict_to_string
print_order = ['upper_left', 'top_center', 'upper_right',
'left_center', 'center_point', 'right_center',
'bottom_left', 'bottom_center', 'bottom_right']
ODD_FRAME = {'center_point': 0.0+0.0j, 'zoom': 1, 'theta': 0.0, 'n_rows': 11, 'n_cols': 11}
EVN_FRAME = {'center_point': 0.0+0.0j, 'zoom': 1, 'theta': 0.0, 'n_rows': 12, 'n_cols': 12}
def get_frame_from_dict(def_dict=ODD_FRAME):
""" complex_frame, def_dict = get_frame_from_dict(def_dict)
legacy wrapper function.
Args:
def_dict: definition dictionary with keys:
'center_point', 'zoom', 'theta', 'n_rows', 'n_cols'
Returns:
complex_frame:
def_dict:
"""
complex_frame = get_complex_frame(
def_dict['center_point'],
def_dict['zoom'],
def_dict['theta'],
def_dict['n_rows'],
def_dict['n_cols'])
return complex_frame, def_dict
def get_complex_frame(CP, ZM, theta, h=1, w=1):
""" get the complex numbers at ends and centers of a frame defined by the input parameters
Args:
CP Center Point of frame
ZM ZooM factor - scaling -
ZM > 1 ZooM in (frame point numbers shrink)
0 > ZM < 1 Zoom out (Zero is not allowed, negatives are negated)
theta radians of rotation of the whole frame
h height of frame (in pixels or height aspect)
w width of frame (in pixels or width aspect
Returns:
frame_dict: dictionary of frame points:
center_point CP
top_center middle of top edge of frame
right_center midpoint of right edge
bottom_center, left_center, upper_right, bottom_right, upper_left, bottom_left
"""
# Create the frame around the origin
frame_dict = {'center_point':CP}
# Rotate the origin by theta
if w >= h:
frame_dict['top_center'] = np.exp(1j*(np.pi/2 + theta))/ZM
frame_dict['right_center'] = (w/h) * np.exp(1j * theta) / ZM
else:
frame_dict['top_center'] = (h/w) * np.exp(1j*(np.pi/2 + theta)) / ZM
frame_dict['right_center'] = np.exp(1j * theta) / ZM
# Calculate the remaining points from the origin
frame_dict['bottom_center'] = frame_dict['top_center'] * -1
frame_dict['left_center'] = frame_dict['right_center'] * -1
frame_dict['upper_right'] = frame_dict['right_center'] + frame_dict['top_center']
frame_dict['bottom_right'] = frame_dict['right_center'] + frame_dict['bottom_center']
frame_dict['upper_left'] = frame_dict['left_center'] + frame_dict['top_center']
frame_dict['bottom_left'] = frame_dict['left_center'] + frame_dict['bottom_center']
# Shift the whole frame by the Center Point vector
for k in frame_dict.keys():
# effecicency weakness allowed for possible future version inclusion of unitized pixels
frame_dict[k] = frame_dict[k] + CP
return frame_dict
def complex_to_string(z, N_DEC=6):
""" format single complex number to string with n decimal places
Args:
z a complex number
N_DEC number of decimal places
"""
MAX_DEC = 17
MIN_DEC = 1
# error-guard the number of decimal places
n = max(min(MAX_DEC, round(N_DEC)), MIN_DEC)
# construct the format string to the number of decimal places
fs = '%%0.%df'%n
# separate real and imaginary as floats
zr = np.real(z)
zi = np.imag(z)
# balance alignment by including the plus sign before the imaginary part
if np.sign(zi) < 0:
s1 = ' ' + fs % zi + 'j'
else:
s1 = ' +' + fs % zi + 'j'
# balance the spacing of the real part by adding a space if positive (including zero)
if np.sign(zr) < 0:
z_str = fs % zr + s1
else:
z_str = ' ' + fs % zr + s1
return z_str
def get_aligned_dict_string(d, N_DEC=3):
""" print-format a dictionary of (possibly complex) numbers
pretty_string = z_plane.get_aligned_dict_string(d, N_DEC=3)
"""
INDENT = 16
out_string = ''
for k in sorted(list(d.keys())):
v = d[k]
if type(v) == str:
s = v
elif v == 0:
s = '0'
elif np.iscomplex(v):
s = complex_to_string(v, N_DEC)
elif np.round(v) == v:
s = '%d'%(v)
else:
f_str = '%s%s%d%s'%('%','0.',N_DEC,'f')
s = f_str%(v)
if len(out_string) == 0:
out_string = ' ' * max(0,(INDENT - len(k))) + k + ': ' + s
else:
out_string = out_string + '\n' + ' ' * max(0,(INDENT - len(k))) + k + ': ' + s
return out_string + '\n'
def complex_frame_dict_to_string(frame_dict, N_DEC=4):
""" get a formatted list of strings
"""
STR_L = 14
frame_string = ''
row = 0
for k in print_order:
z_str = complex_to_string(frame_dict[k], N_DEC)
PAD = ' ' * (STR_L - len(z_str))
frame_string += k + ':' + PAD + z_str
row += 1
if np.mod(row,3) == 0:
frame_string += '\n'
else:
frame_string += '\t'
return frame_string
def show_complex_matrix(Z0,N_DEC=3):
""" command line display a complex matrix or array
"""
SPC = ' ' * 2
if Z0.shape[0] == Z0.size:
row_str = ''
for col in range(0, Z0.shape[0]):
row_str += complex_to_string(Z0[col], N_DEC) + SPC + '\n'
print(row_str)
else:
for row in range(0,Z0.shape[0]):
row_str = ''
for col in range(0, Z0.shape[1]):
row_str += complex_to_string(Z0[row, col], N_DEC) + SPC
print(row_str)
def rnd_lambda(s=1):
""" random parameters s.t. a*d - b*c = 1
special case random parameter generator
"""
b = np.random.random()
c = np.random.random()
ad = b*c + 1
a = np.random.random()
d = ad / a
lamb0 = {'a': a, 'b': b, 'c': c, 'd': d}
lamb0 = np.array([a, b, c, d]) * s
return lamb0
class ComplexPlane:
""" parameterized grid of complex numbers
Args:
CP: self._center_point -- complex vector from origin to center of grid
ZM: self._zoom_factor -- Magnify (Zoom IN as ZM increases)
theta: self._theta -- Counter Clockwise rotation of the plane
h: self._n_rows -- number of rows in the grid
w: self._n_cols -- number of columns in the grid
methods:
display_self: command line printout of the self definition parameters (Args)
get_complex_axes: grid center arrays of complex vectors
get_complex_col: column array of complex vectors
get_complex_row: row array of complex vectors
get_complex_pixels: matrix of complex numbers == the grid
get_escape_bound: get an escpe distance based on the grids corner to corner vector length
get_parameters_dict: get the self definition parameters (Args) as a python dict
get_rails: top and bottom arrays of complex vectors
get_styles: left and right arrays of complex vectors
load_dict: re-initialize the object with new set of definition parameters (Args)
"""
def __init__(self, CP=0.0+0.0*1j, ZM=1.0, theta=0.0, h=5, w=5):
self._center_point = CP
self._zoom_factor = max(ZM, 1e-15)
self._theta = theta
self._n_rows = max(round(h), 1)
self._n_cols = max(round(w), 1)
def display_self(self):
pd = self.get_parameters_dict()
s = get_aligned_dict_string(pd)
print(s)
def load_dict(self, parameters_dict):
""" self.load_dict(parameters_dict) """
if 'center_point' in parameters_dict:
self._center_point = parameters_dict['center_point']
if 'zoom_factor' in parameters_dict:
self._zoom_factor = parameters_dict['zoom_factor']
if 'theta' in parameters_dict:
self._theta = parameters_dict['theta']
if 'n_rows' in parameters_dict:
self._n_rows = parameters_dict['n_rows']
if 'n_cols' in parameters_dict:
self._n_cols = parameters_dict['n_cols']
def get_parameters_dict(self):
""" parameters_dict = self.get_parameters_dict() """
parameters_dict = {}
parameters_dict['center_point'] = self._center_point
parameters_dict['zoom_factor'] = self._zoom_factor
parameters_dict['theta'] = self._theta
parameters_dict['n_rows'] = self._n_rows
parameters_dict['n_cols'] = self._n_cols
return parameters_dict
def get_escape_bound(self, boundry_scale=12):
""" escape time algorithm best infinity safe iteration distance """
corner_scale = max(self._n_rows/self._n_cols, self._n_cols/self._n_rows)
return corner_scale * boundry_scale / self._zoom_factor
def get_complex_axes(self):
""" horiz_axis, vert_axis = self.get_complex_axes() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
vert_axis = np.linspace(frame_dict['top_center'],
frame_dict['bottom_center'], self._n_cols) + 0.0j
horiz_axis = np.linspace(frame_dict['left_center'],
frame_dict['right_center'], self._n_cols) + 0.0j
return horiz_axis, vert_axis
def get_rails(self):
""" top_rail, bottom_rail = self.get_styles() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
top_rail = np.linspace(frame_dict['upper_left'],
frame_dict['upper_right'], self._n_cols) + 0.0j
bottom_rail = np.linspace(frame_dict['bottom_left'],
frame_dict['bottom_right'], self._n_cols) + 0.0j
return top_rail, bottom_rail
def get_styles(self):
""" left_style, right_style = self.get_styles() """
frame_dict = get_complex_frame(self._center_point,
self._zoom_factor, self._theta, self._n_rows, self._n_cols)
left_style = np.linspace(frame_dict['upper_left'],
frame_dict['bottom_left'], self._n_rows) + 0.0j
right_style = np.linspace(frame_dict['upper_right'],
frame_dict['bottom_right'], self._n_rows) + 0.0j
return left_style, right_style
def get_complex_row(self, row_number):
""" row_vectors = self.get_complex_row() """
left_style, right_style = self.get_styles()
return np.linspace(left_style[row_number], right_style[row_number], self._n_cols) + 0.0j
def get_complex_col(self, col_number):
""" col_vectors = self.get_complex_col() """
top_rail, bottom_rail = self.get_styles()
return np.linspace(top_rail[col_number], bottom_rail[col_number], self._n_rows) + 0.0j
def get_complex_pixels(self):
""" complex_pixels = self.get_complex_pixels() """
left_style, right_style = self.get_styles()
complex_pixels = np.zeros((self._n_rows,
self._n_cols)) + np.zeros((self._n_rows, self._n_cols)) * 1j
for k in range(0, self._n_rows):
complex_pixels[k, :] = np.linspace(left_style[k], right_style[k], self._n_cols)
return complex_pixels
""" class ComplexPlane: bottom line - wuf wuf """ | 0.735831 | 0.457197 |
from colorConv import *
from colBase import ColBase as cB
import colCIELCHuv as cCIELCHuv
import colXYZ as cXYZ
class ColCIELuv(cB):
""" CIE-L*uv color class
Inherits from ColBase """
lfields = ['L', 'u', 'v']
def __init__(self, L=0.0, u=0.0, v=0.0, *args, **kwargs): # default: Black
""" Init with L*, u, v values """
cB.__init__(self, *args, **kwargs)
self.type = 'CIELuv' # can be used instead of isinstance on an object
self.L, self.u, self.v = L, u, v
# TO COLOR SPACE (DIRECT)
def toCIELuv(self):
""" :return: CIE-L*uv class from self """
return self
def toXYZ(self):
""" :return: XYZ class from self """
tmp = self.refs(), self
return cXYZ.ColXYZ(*CIELuvtoXYZ(*tmp))
def toCIELCHuv(self):
""" :return: CIE-L*CH°uv class from self """
return cCIELCHuv.ColCIELCHuv(*CIELxxtoCIELCHxx(*self.refs()))
# FROM COLOR SPACE (DIRECT)
@cB.cancel_on(TypeError)
def fromCIELuv(self, *col):
""" CIE-L*uv -> CIE-L*uv conversion
:param col: either CIE-L*uv tuple or ColCIELuv class
:return: CIE-L*uv class """
self.L, self.u, self.v = self._parse_input(ColCIELuv, *col)
return self
@cB.cancel_on(TypeError)
def fromXYZ(self, *col):
""" XYZ -> CIE-L*uv conversion
:param col: either XYZ tuple or ColXYZ class
:return: CIE-L*uv class """
tmp = self._parse_input(cXYZ.ColXYZ, *col)
tmp.append(self) # append self for observer reference
self.L, self.u, self.v = XYZtoCIELuv(*tmp)
return self
@cB.cancel_on(TypeError)
def fromCIELCHuv(self, *col):
""" CIE-L*CH°uv -> CIE-L*uv conversion
:param col: either CIE-L*CH°uv tuple or ColCIELCHuv class
:return: CIE-L*uv class """
self.L, self.u, self.v = CIELCHxxtoCIELxx(*self._parse_input(cCIELCHuv.ColCIELCHuv, *col))
return self
# CLASS ADDITIONAL METHODS
def getHue(self):
""" get Hue angle (in degrees)
:param self: ColCIELuv object
:return: Hue in degrees """
return CIELxxtoHUE(self.u, self.v)
if __name__ == "__main__":
col_Luv = ColCIELuv()
print(col_Luv)
print(str(col_Luv)) | SMFSWcolor/colCIELuv.py | from colorConv import *
from colBase import ColBase as cB
import colCIELCHuv as cCIELCHuv
import colXYZ as cXYZ
class ColCIELuv(cB):
""" CIE-L*uv color class
Inherits from ColBase """
lfields = ['L', 'u', 'v']
def __init__(self, L=0.0, u=0.0, v=0.0, *args, **kwargs): # default: Black
""" Init with L*, u, v values """
cB.__init__(self, *args, **kwargs)
self.type = 'CIELuv' # can be used instead of isinstance on an object
self.L, self.u, self.v = L, u, v
# TO COLOR SPACE (DIRECT)
def toCIELuv(self):
""" :return: CIE-L*uv class from self """
return self
def toXYZ(self):
""" :return: XYZ class from self """
tmp = self.refs(), self
return cXYZ.ColXYZ(*CIELuvtoXYZ(*tmp))
def toCIELCHuv(self):
""" :return: CIE-L*CH°uv class from self """
return cCIELCHuv.ColCIELCHuv(*CIELxxtoCIELCHxx(*self.refs()))
# FROM COLOR SPACE (DIRECT)
@cB.cancel_on(TypeError)
def fromCIELuv(self, *col):
""" CIE-L*uv -> CIE-L*uv conversion
:param col: either CIE-L*uv tuple or ColCIELuv class
:return: CIE-L*uv class """
self.L, self.u, self.v = self._parse_input(ColCIELuv, *col)
return self
@cB.cancel_on(TypeError)
def fromXYZ(self, *col):
""" XYZ -> CIE-L*uv conversion
:param col: either XYZ tuple or ColXYZ class
:return: CIE-L*uv class """
tmp = self._parse_input(cXYZ.ColXYZ, *col)
tmp.append(self) # append self for observer reference
self.L, self.u, self.v = XYZtoCIELuv(*tmp)
return self
@cB.cancel_on(TypeError)
def fromCIELCHuv(self, *col):
""" CIE-L*CH°uv -> CIE-L*uv conversion
:param col: either CIE-L*CH°uv tuple or ColCIELCHuv class
:return: CIE-L*uv class """
self.L, self.u, self.v = CIELCHxxtoCIELxx(*self._parse_input(cCIELCHuv.ColCIELCHuv, *col))
return self
# CLASS ADDITIONAL METHODS
def getHue(self):
""" get Hue angle (in degrees)
:param self: ColCIELuv object
:return: Hue in degrees """
return CIELxxtoHUE(self.u, self.v)
if __name__ == "__main__":
col_Luv = ColCIELuv()
print(col_Luv)
print(str(col_Luv)) | 0.740456 | 0.227244 |
import os
import torch
from torch.utils.cpp_extension import load
VERBOSE = False
def _resolve(name):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), name)
try:
import torch_discounted_cumsum_cpu
except ImportError:
if VERBOSE:
print('Falling back to JIT compiling torch_discounted_cumsum_cpu')
torch_discounted_cumsum_cpu = load(
name='torch_discounted_cumsum_cpu',
sources=[
_resolve('discounted_cumsum_cpu.cpp'),
],
verbose=VERBOSE,
)
try:
import torch_discounted_cumsum_cuda
except ImportError:
if VERBOSE:
print('Falling back to JIT compiling torch_discounted_cumsum_cuda')
torch_discounted_cumsum_cuda = None
if torch.cuda.is_available():
torch_discounted_cumsum_cuda = load(
name='torch_discounted_cumsum_cuda',
sources=[
_resolve('discounted_cumsum_cuda.cpp'),
_resolve('discounted_cumsum_cuda_kernel.cu'),
],
verbose=VERBOSE,
)
def _discounted_cumsum_left_dispatcher(input, gamma):
if not torch.is_tensor(input):
raise ValueError('Input must be a torch.Tensor')
if not torch.is_tensor(gamma):
raise ValueError('Gamma must be a torch.Tensor')
if input.is_cuda:
if torch_discounted_cumsum_cuda is None:
raise EnvironmentError(f'Failed to load native CUDA module')
return torch_discounted_cumsum_cuda.discounted_cumsum_left_cuda(input.contiguous(), gamma.contiguous())
else:
return torch_discounted_cumsum_cpu.discounted_cumsum_left_cpu(input, gamma)
def _discounted_cumsum_right_dispatcher(input, gamma):
if not torch.is_tensor(input):
raise ValueError('Input must be a torch.Tensor')
if not torch.is_tensor(gamma):
raise ValueError('Gamma must be a torch.Tensor')
if input.is_cuda:
if torch_discounted_cumsum_cuda is None:
raise EnvironmentError(f'Failed to load native CUDA module')
return torch_discounted_cumsum_cuda.discounted_cumsum_right_cuda(input.contiguous(), gamma.contiguous())
else:
return torch_discounted_cumsum_cpu.discounted_cumsum_right_cpu(input, gamma)
class DiscountedCumSumLeftFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, input, gamma, gamma_requires_grad):
output = _discounted_cumsum_left_dispatcher(input, gamma)
ctx.save_for_backward(output if gamma_requires_grad else None, gamma)
return output
@staticmethod
def backward(ctx, grad_output):
output, gamma = ctx.saved_tensors
grad_input = _discounted_cumsum_right_dispatcher(grad_output, gamma)
grad_gamma = None
if output is not None:
z = _discounted_cumsum_left_dispatcher(output, gamma)
z = z[:, :-1]
dLdy = grad_output[:, 1:]
grad_gamma = (z * dLdy).sum(dim=1)
return grad_input, grad_gamma, None
class DiscountedCumSumRightFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, input, gamma, gamma_requires_grad):
output = _discounted_cumsum_right_dispatcher(input, gamma)
ctx.save_for_backward(output if gamma_requires_grad else None, gamma)
return output
@staticmethod
def backward(ctx, grad_output):
output, gamma = ctx.saved_tensors
grad_input = _discounted_cumsum_left_dispatcher(grad_output, gamma)
grad_gamma = None
if output is not None:
z = _discounted_cumsum_right_dispatcher(output, gamma)
z = z[:, 1:]
dLdy = grad_output[:, :-1]
grad_gamma = (z * dLdy).sum(dim=1)
return grad_input, grad_gamma, None
def discounted_cumsum_left(input, gamma):
if not torch.is_tensor(gamma):
gamma = torch.tensor(gamma).to(input)
if gamma.dim() == 0:
gamma = gamma.reshape(-1)
return DiscountedCumSumLeftFunction.apply(input, gamma, gamma.requires_grad)
def discounted_cumsum_right(input, gamma):
if not torch.is_tensor(gamma):
gamma = torch.tensor(gamma).to(input)
if gamma.dim() == 0:
gamma = gamma.reshape(-1)
return DiscountedCumSumRightFunction.apply(input, gamma, gamma.requires_grad) | torch_discounted_cumsum/discounted_cumsum.py | import os
import torch
from torch.utils.cpp_extension import load
VERBOSE = False
def _resolve(name):
return os.path.join(os.path.dirname(os.path.realpath(__file__)), name)
try:
import torch_discounted_cumsum_cpu
except ImportError:
if VERBOSE:
print('Falling back to JIT compiling torch_discounted_cumsum_cpu')
torch_discounted_cumsum_cpu = load(
name='torch_discounted_cumsum_cpu',
sources=[
_resolve('discounted_cumsum_cpu.cpp'),
],
verbose=VERBOSE,
)
try:
import torch_discounted_cumsum_cuda
except ImportError:
if VERBOSE:
print('Falling back to JIT compiling torch_discounted_cumsum_cuda')
torch_discounted_cumsum_cuda = None
if torch.cuda.is_available():
torch_discounted_cumsum_cuda = load(
name='torch_discounted_cumsum_cuda',
sources=[
_resolve('discounted_cumsum_cuda.cpp'),
_resolve('discounted_cumsum_cuda_kernel.cu'),
],
verbose=VERBOSE,
)
def _discounted_cumsum_left_dispatcher(input, gamma):
if not torch.is_tensor(input):
raise ValueError('Input must be a torch.Tensor')
if not torch.is_tensor(gamma):
raise ValueError('Gamma must be a torch.Tensor')
if input.is_cuda:
if torch_discounted_cumsum_cuda is None:
raise EnvironmentError(f'Failed to load native CUDA module')
return torch_discounted_cumsum_cuda.discounted_cumsum_left_cuda(input.contiguous(), gamma.contiguous())
else:
return torch_discounted_cumsum_cpu.discounted_cumsum_left_cpu(input, gamma)
def _discounted_cumsum_right_dispatcher(input, gamma):
if not torch.is_tensor(input):
raise ValueError('Input must be a torch.Tensor')
if not torch.is_tensor(gamma):
raise ValueError('Gamma must be a torch.Tensor')
if input.is_cuda:
if torch_discounted_cumsum_cuda is None:
raise EnvironmentError(f'Failed to load native CUDA module')
return torch_discounted_cumsum_cuda.discounted_cumsum_right_cuda(input.contiguous(), gamma.contiguous())
else:
return torch_discounted_cumsum_cpu.discounted_cumsum_right_cpu(input, gamma)
class DiscountedCumSumLeftFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, input, gamma, gamma_requires_grad):
output = _discounted_cumsum_left_dispatcher(input, gamma)
ctx.save_for_backward(output if gamma_requires_grad else None, gamma)
return output
@staticmethod
def backward(ctx, grad_output):
output, gamma = ctx.saved_tensors
grad_input = _discounted_cumsum_right_dispatcher(grad_output, gamma)
grad_gamma = None
if output is not None:
z = _discounted_cumsum_left_dispatcher(output, gamma)
z = z[:, :-1]
dLdy = grad_output[:, 1:]
grad_gamma = (z * dLdy).sum(dim=1)
return grad_input, grad_gamma, None
class DiscountedCumSumRightFunction(torch.autograd.Function):
@staticmethod
def forward(ctx, input, gamma, gamma_requires_grad):
output = _discounted_cumsum_right_dispatcher(input, gamma)
ctx.save_for_backward(output if gamma_requires_grad else None, gamma)
return output
@staticmethod
def backward(ctx, grad_output):
output, gamma = ctx.saved_tensors
grad_input = _discounted_cumsum_left_dispatcher(grad_output, gamma)
grad_gamma = None
if output is not None:
z = _discounted_cumsum_right_dispatcher(output, gamma)
z = z[:, 1:]
dLdy = grad_output[:, :-1]
grad_gamma = (z * dLdy).sum(dim=1)
return grad_input, grad_gamma, None
def discounted_cumsum_left(input, gamma):
if not torch.is_tensor(gamma):
gamma = torch.tensor(gamma).to(input)
if gamma.dim() == 0:
gamma = gamma.reshape(-1)
return DiscountedCumSumLeftFunction.apply(input, gamma, gamma.requires_grad)
def discounted_cumsum_right(input, gamma):
if not torch.is_tensor(gamma):
gamma = torch.tensor(gamma).to(input)
if gamma.dim() == 0:
gamma = gamma.reshape(-1)
return DiscountedCumSumRightFunction.apply(input, gamma, gamma.requires_grad) | 0.684475 | 0.417271 |
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('contact_name', models.CharField(max_length=50, verbose_name='contact name', blank=True)),
('address_one', models.CharField(max_length=50, verbose_name='address one', blank=True)),
('address_two', models.CharField(max_length=50, verbose_name='address two', blank=True)),
('town', models.CharField(max_length=50, verbose_name='town', blank=True)),
('county', models.CharField(max_length=50, verbose_name='county', blank=True)),
('postcode', models.CharField(max_length=50, verbose_name='postcode', blank=True)),
('status', models.IntegerField(default=0, verbose_name='status', choices=[(0, b'Active'), (1, b'Display only'), (2, b'Deleted')])),
],
options={
'ordering': ['created'],
'get_latest_by': 'created',
'verbose_name': 'address',
'verbose_name_plural': 'addresses',
},
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('iso_code', models.CharField(unique=True, max_length=2, verbose_name='ISO code')),
('name', models.CharField(max_length=60, verbose_name='name')),
],
options={
'ordering': ['name'],
'verbose_name': 'country',
'verbose_name_plural': 'countries',
},
),
migrations.AddField(
model_name='address',
name='country',
field=models.ForeignKey(verbose_name='country', to='addressbook.Country'),
),
] | addressbook/migrations/0001_initial.py | from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', django_extensions.db.fields.CreationDateTimeField(default=django.utils.timezone.now, verbose_name='created', editable=False, blank=True)),
('modified', django_extensions.db.fields.ModificationDateTimeField(default=django.utils.timezone.now, verbose_name='modified', editable=False, blank=True)),
('contact_name', models.CharField(max_length=50, verbose_name='contact name', blank=True)),
('address_one', models.CharField(max_length=50, verbose_name='address one', blank=True)),
('address_two', models.CharField(max_length=50, verbose_name='address two', blank=True)),
('town', models.CharField(max_length=50, verbose_name='town', blank=True)),
('county', models.CharField(max_length=50, verbose_name='county', blank=True)),
('postcode', models.CharField(max_length=50, verbose_name='postcode', blank=True)),
('status', models.IntegerField(default=0, verbose_name='status', choices=[(0, b'Active'), (1, b'Display only'), (2, b'Deleted')])),
],
options={
'ordering': ['created'],
'get_latest_by': 'created',
'verbose_name': 'address',
'verbose_name_plural': 'addresses',
},
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('iso_code', models.CharField(unique=True, max_length=2, verbose_name='ISO code')),
('name', models.CharField(max_length=60, verbose_name='name')),
],
options={
'ordering': ['name'],
'verbose_name': 'country',
'verbose_name_plural': 'countries',
},
),
migrations.AddField(
model_name='address',
name='country',
field=models.ForeignKey(verbose_name='country', to='addressbook.Country'),
),
] | 0.630571 | 0.115212 |
from geolite2 import geolite2
import requests
class bColors:
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BLUE = '\033[94m'
def banner():
print(bColors.GREEN + '<<< IP-TRACKER v2.0>>>')
print(bColors.RED + r'''
_
| |
| |___
| _ \ _ _
| |_) | | (_) |
\____/ \__, |
__/ |
|___/
_ _
| | (_)
____ ____ ___| | ___ _ ______ ______ ___ _ ______ ______ _ _ ____
/ ___\ / \ / _ | / _ | | / ____| / ____| / _ | | / ____| / ____| | | | | | \
| |____ | () | | (_| | | (_|| | \_____ \ \_____ \ | (_|| | \_____ \ \_____ \ | | | | | |
\____/ \____/ \____/ \___|_| |______/ |______/ \___|_| |______/ |______/ |_| |_| |_|
''')
class IpTracker:
def __init__(self):
self.docReader = geolite2.reader()
banner()
self.r = str(bColors.RED)
self.g = str(bColors.GREEN)
self.b = str(bColors.BLUE)
self.y = str(bColors.YELLOW)
def ownTracker(self):
IpTrack = requests.get('https://api.ipify.org').text
trackLocation = self.docReader.get(IpTrack)
trackLocList = []
if 'city' in trackLocation:
city = (trackLocation['city']['names']['en'])
trackLocList.append(city)
else:
trackLocList.append("NA")
if 'continent' in trackLocation:
continent = (trackLocation['continent']['names']['en'])
trackLocList.append(continent)
else:
trackLocList.append("NA")
if 'country' in trackLocation:
country = (trackLocation['country']['names']['en'])
trackLocList.append(country)
else:
trackLocList.append("NA")
locationAccuracy = str(trackLocation['location']['accuracy_radius'])
trackLocList.append(locationAccuracy)
locationLatitude = str(trackLocation['location']['latitude'])
trackLocList.append(locationLatitude)
locationLongitude = str(trackLocation['location']['longitude'])
trackLocList.append(locationLongitude)
if 'time_zone' in trackLocation:
locationTimeZone = (trackLocation['location']['time_zone'])
trackLocList.append(locationTimeZone)
else:
trackLocList.append("NA")
if 'code' in trackLocation:
postalCode = (trackLocation['postal']['code'])
trackLocList.append(postalCode)
else:
trackLocList.append("NA")
registeredCountry = (trackLocation['registered_country']['names']['en'])
trackLocList.append(registeredCountry)
if 'subdivisions' in trackLocation:
subdivisions = (trackLocation['subdivisions'][0]['names']['en'])
trackLocList.append(subdivisions)
else:
trackLocList.append("NA")
# print(trackLocation)
# print(trackLocList)
print('\n\n' + self.b + "<<< NATIVE MACHINE IP TRACK REPORT >>>")
print(self.r + '* ' + self.b + 'public_ip: ' + self.g + IpTrack)
if 'city' in trackLocation:
print(self.r + '* ' + self.b + 'city: ' + self.g + trackLocList[0])
if 'continent' in trackLocation:
print(self.r + '* ' + self.b + 'continent: ' + self.g + trackLocList[1])
if 'country' in trackLocation:
print(self.r + '* ' + self.b + 'country: ' + self.g + trackLocList[2])
print(self.r + '* ' + self.b + 'location: ')
print('\t' + self.r + '↪ ' + self.y + 'accuracy_radius: ' + self.g + trackLocList[3])
print('\t' + self.r + '↪ ' + self.y + 'latitude: ' + self.g + trackLocList[4])
print('\t' + self.r + '↪ ' + self.y + 'longitude: ' + self.g + trackLocList[5])
print('\t' + self.r + '↪ ' + self.y + 'time_zone: ' + self.g + trackLocList[6])
print('\t' + self.r + '↪ ' + self.y + 'map: ' + self.g +
f'https://www.google.co.in/maps/@{trackLocList[4]},{trackLocList[5]},15z?hl=en')
print(self.r + '* ' + self.b + 'postal_code: ' + self.g + trackLocList[7])
print(self.r + '* ' + self.b + 'registered_country: ' + self.g + trackLocList[8])
if 'subdivisions' in trackLocation:
print(self.r + '* ' + self.b + 'subdivisions: ' + self.g + trackLocList[9])
def multiTracker(self, ipList):
list = ipList.split(',')
# print(list)
lenList = len(list)
i = 0
while i < lenList:
list[i] = str(list[i]).strip()
trackLocation = self.docReader.get(list[i])
trackLocList = []
if 'city' in trackLocation:
city = (trackLocation['city']['names']['en'])
trackLocList.append(city)
else:
trackLocList.append("NA")
if 'continent' in trackLocation:
continent = (trackLocation['continent']['names']['en'])
trackLocList.append(continent)
else:
trackLocList.append("NA")
if 'country' in trackLocation:
country = (trackLocation['country']['names']['en'])
trackLocList.append(country)
else:
trackLocList.append("NA")
locationAccuracy = str(trackLocation['location']['accuracy_radius'])
trackLocList.append(locationAccuracy)
locationLatitude = str(trackLocation['location']['latitude'])
trackLocList.append(locationLatitude)
locationLongitude = str(trackLocation['location']['longitude'])
trackLocList.append(locationLongitude)
if 'time_zone' in trackLocation:
locationTimeZone = (trackLocation['location']['time_zone'])
trackLocList.append(locationTimeZone)
else:
trackLocList.append("NA")
if 'code' in trackLocation:
postalCode = (trackLocation['postal']['code'])
trackLocList.append(postalCode)
else:
trackLocList.append("NA")
registeredCountry = (trackLocation['registered_country']['names']['en'])
trackLocList.append(registeredCountry)
if 'subdivisions' in trackLocation:
subdivisions = (trackLocation['subdivisions'][0]['names']['en'])
trackLocList.append(subdivisions)
else:
trackLocList.append("NA")
# print(trackLocation)
# print(trackLocList)
print('\n\n' + self.b + '<<< ' + str(list[i]) + ' TRACK REPORT' + ' >>>')
print(self.r + '* ' + self.b + 'public_ip: ' + self.g + list[i])
if 'city' in trackLocation:
print(self.r + '* ' + self.b + 'city: ' + self.g + trackLocList[0])
if 'continent' in trackLocation:
print(self.r + '* ' + self.b + 'continent: ' + self.g + trackLocList[1])
if 'country' in trackLocation:
print(self.r + '* ' + self.b + 'country: ' + self.g + trackLocList[2])
print(self.r + '* ' + self.b + 'location: ')
print('\t' + self.r + '↪ ' + self.y + 'accuracy_radius: ' + self.g + trackLocList[3])
print('\t' + self.r + '↪ ' + self.y + 'latitude: ' + self.g + trackLocList[4])
print('\t' + self.r + '↪ ' + self.y + 'longitude: ' + self.g + trackLocList[5])
print('\t' + self.r + '↪ ' + self.y + 'time_zone: ' + self.g + trackLocList[6])
print('\t' + self.r + '↪ ' + self.y + 'map: ' + self.g +
f'https://www.google.co.in/maps/@{trackLocList[4]},{trackLocList[5]},15z?hl=en')
print(self.r + '* ' + self.b + 'postal_code: ' + self.g + trackLocList[7])
print(self.r + '* ' + self.b + 'registered_country: ' + self.g + trackLocList[8])
if 'subdivisions' in trackLocation:
print(self.r + '* ' + self.b + 'subdivisions: ' + self.g + trackLocList[9])
i += 1
if __name__ == '__main__':
tracker = IpTracker()
print('''
\nEnter the mode of IP tracking:
1. Track IP of the native machine
2. Track custom IP(s)''')
mode = input("\t\t:> ")
if mode == '1':
tracker.ownTracker()
elif mode == '2':
ipList = str(input("\nEnter the IP address(s) (e.g. '192.168.127.12, 172.16.31.10, 172.16.31.10'): "))
tracker.multiTracker(ipList) | IpTrackerv2.0.py | from geolite2 import geolite2
import requests
class bColors:
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BLUE = '\033[94m'
def banner():
print(bColors.GREEN + '<<< IP-TRACKER v2.0>>>')
print(bColors.RED + r'''
_
| |
| |___
| _ \ _ _
| |_) | | (_) |
\____/ \__, |
__/ |
|___/
_ _
| | (_)
____ ____ ___| | ___ _ ______ ______ ___ _ ______ ______ _ _ ____
/ ___\ / \ / _ | / _ | | / ____| / ____| / _ | | / ____| / ____| | | | | | \
| |____ | () | | (_| | | (_|| | \_____ \ \_____ \ | (_|| | \_____ \ \_____ \ | | | | | |
\____/ \____/ \____/ \___|_| |______/ |______/ \___|_| |______/ |______/ |_| |_| |_|
''')
class IpTracker:
def __init__(self):
self.docReader = geolite2.reader()
banner()
self.r = str(bColors.RED)
self.g = str(bColors.GREEN)
self.b = str(bColors.BLUE)
self.y = str(bColors.YELLOW)
def ownTracker(self):
IpTrack = requests.get('https://api.ipify.org').text
trackLocation = self.docReader.get(IpTrack)
trackLocList = []
if 'city' in trackLocation:
city = (trackLocation['city']['names']['en'])
trackLocList.append(city)
else:
trackLocList.append("NA")
if 'continent' in trackLocation:
continent = (trackLocation['continent']['names']['en'])
trackLocList.append(continent)
else:
trackLocList.append("NA")
if 'country' in trackLocation:
country = (trackLocation['country']['names']['en'])
trackLocList.append(country)
else:
trackLocList.append("NA")
locationAccuracy = str(trackLocation['location']['accuracy_radius'])
trackLocList.append(locationAccuracy)
locationLatitude = str(trackLocation['location']['latitude'])
trackLocList.append(locationLatitude)
locationLongitude = str(trackLocation['location']['longitude'])
trackLocList.append(locationLongitude)
if 'time_zone' in trackLocation:
locationTimeZone = (trackLocation['location']['time_zone'])
trackLocList.append(locationTimeZone)
else:
trackLocList.append("NA")
if 'code' in trackLocation:
postalCode = (trackLocation['postal']['code'])
trackLocList.append(postalCode)
else:
trackLocList.append("NA")
registeredCountry = (trackLocation['registered_country']['names']['en'])
trackLocList.append(registeredCountry)
if 'subdivisions' in trackLocation:
subdivisions = (trackLocation['subdivisions'][0]['names']['en'])
trackLocList.append(subdivisions)
else:
trackLocList.append("NA")
# print(trackLocation)
# print(trackLocList)
print('\n\n' + self.b + "<<< NATIVE MACHINE IP TRACK REPORT >>>")
print(self.r + '* ' + self.b + 'public_ip: ' + self.g + IpTrack)
if 'city' in trackLocation:
print(self.r + '* ' + self.b + 'city: ' + self.g + trackLocList[0])
if 'continent' in trackLocation:
print(self.r + '* ' + self.b + 'continent: ' + self.g + trackLocList[1])
if 'country' in trackLocation:
print(self.r + '* ' + self.b + 'country: ' + self.g + trackLocList[2])
print(self.r + '* ' + self.b + 'location: ')
print('\t' + self.r + '↪ ' + self.y + 'accuracy_radius: ' + self.g + trackLocList[3])
print('\t' + self.r + '↪ ' + self.y + 'latitude: ' + self.g + trackLocList[4])
print('\t' + self.r + '↪ ' + self.y + 'longitude: ' + self.g + trackLocList[5])
print('\t' + self.r + '↪ ' + self.y + 'time_zone: ' + self.g + trackLocList[6])
print('\t' + self.r + '↪ ' + self.y + 'map: ' + self.g +
f'https://www.google.co.in/maps/@{trackLocList[4]},{trackLocList[5]},15z?hl=en')
print(self.r + '* ' + self.b + 'postal_code: ' + self.g + trackLocList[7])
print(self.r + '* ' + self.b + 'registered_country: ' + self.g + trackLocList[8])
if 'subdivisions' in trackLocation:
print(self.r + '* ' + self.b + 'subdivisions: ' + self.g + trackLocList[9])
def multiTracker(self, ipList):
list = ipList.split(',')
# print(list)
lenList = len(list)
i = 0
while i < lenList:
list[i] = str(list[i]).strip()
trackLocation = self.docReader.get(list[i])
trackLocList = []
if 'city' in trackLocation:
city = (trackLocation['city']['names']['en'])
trackLocList.append(city)
else:
trackLocList.append("NA")
if 'continent' in trackLocation:
continent = (trackLocation['continent']['names']['en'])
trackLocList.append(continent)
else:
trackLocList.append("NA")
if 'country' in trackLocation:
country = (trackLocation['country']['names']['en'])
trackLocList.append(country)
else:
trackLocList.append("NA")
locationAccuracy = str(trackLocation['location']['accuracy_radius'])
trackLocList.append(locationAccuracy)
locationLatitude = str(trackLocation['location']['latitude'])
trackLocList.append(locationLatitude)
locationLongitude = str(trackLocation['location']['longitude'])
trackLocList.append(locationLongitude)
if 'time_zone' in trackLocation:
locationTimeZone = (trackLocation['location']['time_zone'])
trackLocList.append(locationTimeZone)
else:
trackLocList.append("NA")
if 'code' in trackLocation:
postalCode = (trackLocation['postal']['code'])
trackLocList.append(postalCode)
else:
trackLocList.append("NA")
registeredCountry = (trackLocation['registered_country']['names']['en'])
trackLocList.append(registeredCountry)
if 'subdivisions' in trackLocation:
subdivisions = (trackLocation['subdivisions'][0]['names']['en'])
trackLocList.append(subdivisions)
else:
trackLocList.append("NA")
# print(trackLocation)
# print(trackLocList)
print('\n\n' + self.b + '<<< ' + str(list[i]) + ' TRACK REPORT' + ' >>>')
print(self.r + '* ' + self.b + 'public_ip: ' + self.g + list[i])
if 'city' in trackLocation:
print(self.r + '* ' + self.b + 'city: ' + self.g + trackLocList[0])
if 'continent' in trackLocation:
print(self.r + '* ' + self.b + 'continent: ' + self.g + trackLocList[1])
if 'country' in trackLocation:
print(self.r + '* ' + self.b + 'country: ' + self.g + trackLocList[2])
print(self.r + '* ' + self.b + 'location: ')
print('\t' + self.r + '↪ ' + self.y + 'accuracy_radius: ' + self.g + trackLocList[3])
print('\t' + self.r + '↪ ' + self.y + 'latitude: ' + self.g + trackLocList[4])
print('\t' + self.r + '↪ ' + self.y + 'longitude: ' + self.g + trackLocList[5])
print('\t' + self.r + '↪ ' + self.y + 'time_zone: ' + self.g + trackLocList[6])
print('\t' + self.r + '↪ ' + self.y + 'map: ' + self.g +
f'https://www.google.co.in/maps/@{trackLocList[4]},{trackLocList[5]},15z?hl=en')
print(self.r + '* ' + self.b + 'postal_code: ' + self.g + trackLocList[7])
print(self.r + '* ' + self.b + 'registered_country: ' + self.g + trackLocList[8])
if 'subdivisions' in trackLocation:
print(self.r + '* ' + self.b + 'subdivisions: ' + self.g + trackLocList[9])
i += 1
if __name__ == '__main__':
tracker = IpTracker()
print('''
\nEnter the mode of IP tracking:
1. Track IP of the native machine
2. Track custom IP(s)''')
mode = input("\t\t:> ")
if mode == '1':
tracker.ownTracker()
elif mode == '2':
ipList = str(input("\nEnter the IP address(s) (e.g. '192.168.127.12, 172.16.31.10, 172.16.31.10'): "))
tracker.multiTracker(ipList) | 0.157655 | 0.122497 |
import os
from typing import List, Union
from werkzeug.wrappers import Request, Response
from werkzeug.serving import run_simple
from jsonrpc import Dispatcher, JSONRPCResponseManager as JRPCRespMgr
from jsonrpc.exceptions import JSONRPCDispatchException, JSONRPCServerError, JSONRPCInternalError
import log
from APIservice import proto
from pymrtd import ef
from settings import Config
from base64 import b64decode
#before start you need to install json-rpc librarby (pip install json-rpc)
def try_deser(f):
try:
return f()
except:
raise proto.ProtoError("Bad parameter")
def _b64csigs_to_bcsigs(str_csigs: List[str]) -> List[bytes]:
""" Convert list of base64 encoded signatures to list of byte signatures """
csigs = []
for scsig in str_csigs:
csigs.append(try_deser(lambda: b64decode(scsig)))
return csigs
class PassIdApiServer:
""" PassID Api server """
api_method_prefix = "passID"
def __init__(self, db: proto.StorageAPI, config: Config):
self._conf = config.api_server
self._proto = proto.PassIdProto(db, config.challenge_ttl)
self._log = log.getLogger("passid.api")
# Register rpc api methods
self.__init_api()
def start(self):
run_simple(self._conf.host, self._conf.port, self.__create_calls, ssl_context=self._conf.ssl_ctx, threaded=True)
def passidapi(api_f):
def wrapped_api_f(self, *args, **kwargs):
self.__log_api_call(api_f, **kwargs)
ret=api_f(self, *args, **kwargs)
self.__log_api_response(api_f, ret)
return ret
return wrapped_api_f
# RPC API methods
# API: passID.ping
@passidapi
def ping(self, ping: int) -> dict:
"""
Function returns challenge that passport needs to sign.
Challenge is base64 encoded.
"""
try:
pong = int.from_bytes(os.urandom(4), 'big')
return { "pong": pong }
except Exception as e:
return self.__handle_exception(e)
# API: passID.getChallenge
@passidapi
def getChallenge(self) -> dict:
"""
Function returns challenge that passport needs to sign.
Challenge is base64 encoded.
"""
try:
c = self._proto.createNewChallenge()
return { "challenge": c.toBase64() }
except Exception as e:
return self.__handle_exception(e)
# API: passID.cancelChallenge
@passidapi
def cancelChallenge(self, challenge: str) -> Union[None, dict]:
"""
Function erases challenge from server.
:param challenge: base64 encoded string
:return:
Nothing if success, else error
"""
try:
challenge = try_deser(lambda: proto.Challenge.fromBase64(challenge))
self._proto.cancelChallenge(challenge.id)
return None
except Exception as e:
return self.__handle_exception(e)
# API: passID.register
@passidapi
def register(self, dg15: str, sod: str, cid: str, csigs: List[str], dg14: str = None) -> dict:
"""
Register new user. It returns back to the client userId which is publicKey address,
session key and session expiration time.
:param dg15: eMRTD DG15 file
:param sod: eMRTD SOD file
:param cid: Challenge id
:param csigs: Challenge signatures
:param dg14: eMRTD DG14 file (optional)
:return:
'uid' - base64 encoded user id
'session_key' - base64 encoded session key
'expires' - unix timestamp of time when session will expire
"""
try:
dg15 = try_deser(lambda: ef.DG15.load(b64decode(dg15)))
sod = try_deser(lambda: ef.SOD.load(b64decode(sod)))
cid = try_deser(lambda: proto.CID.fromhex(cid))
csigs = _b64csigs_to_bcsigs(csigs)
if dg14 is not None:
dg14 = try_deser(lambda: ef.DG14.load(b64decode(dg14)))
uid, sk, set = self._proto.register(dg15, sod, cid, csigs, dg14)
return { "uid": uid.toBase64(), "session_key": sk.toBase64(), "expires": int(set.timestamp()) }
except Exception as e:
return self.__handle_exception(e)
# API: passID.login
@passidapi
def login(self, uid: str, cid: str, csigs: List[str], dg1: str = None) -> dict:
"""
It returns back session key and session expiration time.
:param uid: User id
:param cid: Challenge id
:param csigs: Challenge signatures
:return:
'session_key' - base64 encoded session key
'expires' - unix timestamp of time when session will expire
"""
try:
uid = try_deser(lambda: proto.UserId.fromBase64(uid))
cid = try_deser(lambda: proto.CID.fromhex(cid))
csigs = _b64csigs_to_bcsigs(csigs)
if dg1 is not None:
dg1 = try_deser(lambda: ef.DG1.load(b64decode(dg1)))
sk, set = self._proto.login(uid, cid, csigs, dg1)
return { "session_key": sk.toBase64(), "expires": int(set.timestamp()) }
except Exception as e:
return self.__handle_exception(e)
# API: passID.sayHello
@passidapi
def sayHello(self, uid: str, mac: str) -> dict:
"""
It returns back greeting message based on whether user is anonymous or not.
:param uid: User id
:param mac: session mac over api name and uid
:return:
'msg' - greeting message
"""
try:
uid = try_deser(lambda: proto.UserId.fromBase64(uid))
mac = try_deser(lambda: b64decode(mac))
msg = self._proto.sayHello(uid, mac)
return { "msg": msg }
except Exception as e:
return self.__handle_exception(e)
# Request handler
@Request.application
def __create_calls(self, request):
"""Create API calls"""
response = JRPCRespMgr.handle(
request.data,
self._req_disp
)
if response is not None:
return Response(response.json, mimetype='application/json')
return Response()
def __handle_exception(self, e: Exception)-> dict:
if isinstance(e, proto.ProtoError):
self._log.debug("Request proto error: {}".format(e))
raise JSONRPCDispatchException(e.code, str(e))
if isinstance(e, proto.SeEntryNotFound):
self._log.debug("Request storage error: {}".format(e))
raise JSONRPCDispatchException(404, str(e))
self._log.error("Unhandled exception encountered, e={}".format(e))
raise JSONRPCDispatchException(500, "Internal Server Error")
def __init_api(self):
self._req_disp = Dispatcher()
def add_api_meth(api_f, name):
# method format: <api_prefix>.<methodName>
passid_api_f = lambda *args, **kwargs: api_f(self, *args, **kwargs)
self._req_disp.add_method(passid_api_f, "{}.{}".format(PassIdApiServer.api_method_prefix, name))
# register methods with @passidapi decorator as rpc api handler
import inspect
meths = inspect.getmembers(PassIdApiServer, predicate=inspect.isfunction)
for m in meths:
if m[1].__name__ == "wrapped_api_f":
add_api_meth(m[1], m[0])
def __log_api_call(self, f, **kwargs):
if self._log.level <= log.VERBOSE:
self._log.debug(":{}() ==>".format(f.__name__))
for a, v in kwargs.items():
self._log.verbose(" {}: {}".format(a, v))
def __log_api_response(self, f, resp: dict):
if self._log.level <= log.VERBOSE:
self._log.debug(":{}() <==".format(f.__name__))
if(resp is not None):
for a, v in resp.items():
self._log.verbose(" {}: {}".format(a, v)) | tools/libs/PassID-Server/src/APIservice/api.py | import os
from typing import List, Union
from werkzeug.wrappers import Request, Response
from werkzeug.serving import run_simple
from jsonrpc import Dispatcher, JSONRPCResponseManager as JRPCRespMgr
from jsonrpc.exceptions import JSONRPCDispatchException, JSONRPCServerError, JSONRPCInternalError
import log
from APIservice import proto
from pymrtd import ef
from settings import Config
from base64 import b64decode
#before start you need to install json-rpc librarby (pip install json-rpc)
def try_deser(f):
try:
return f()
except:
raise proto.ProtoError("Bad parameter")
def _b64csigs_to_bcsigs(str_csigs: List[str]) -> List[bytes]:
""" Convert list of base64 encoded signatures to list of byte signatures """
csigs = []
for scsig in str_csigs:
csigs.append(try_deser(lambda: b64decode(scsig)))
return csigs
class PassIdApiServer:
""" PassID Api server """
api_method_prefix = "passID"
def __init__(self, db: proto.StorageAPI, config: Config):
self._conf = config.api_server
self._proto = proto.PassIdProto(db, config.challenge_ttl)
self._log = log.getLogger("passid.api")
# Register rpc api methods
self.__init_api()
def start(self):
run_simple(self._conf.host, self._conf.port, self.__create_calls, ssl_context=self._conf.ssl_ctx, threaded=True)
def passidapi(api_f):
def wrapped_api_f(self, *args, **kwargs):
self.__log_api_call(api_f, **kwargs)
ret=api_f(self, *args, **kwargs)
self.__log_api_response(api_f, ret)
return ret
return wrapped_api_f
# RPC API methods
# API: passID.ping
@passidapi
def ping(self, ping: int) -> dict:
"""
Function returns challenge that passport needs to sign.
Challenge is base64 encoded.
"""
try:
pong = int.from_bytes(os.urandom(4), 'big')
return { "pong": pong }
except Exception as e:
return self.__handle_exception(e)
# API: passID.getChallenge
@passidapi
def getChallenge(self) -> dict:
"""
Function returns challenge that passport needs to sign.
Challenge is base64 encoded.
"""
try:
c = self._proto.createNewChallenge()
return { "challenge": c.toBase64() }
except Exception as e:
return self.__handle_exception(e)
# API: passID.cancelChallenge
@passidapi
def cancelChallenge(self, challenge: str) -> Union[None, dict]:
"""
Function erases challenge from server.
:param challenge: base64 encoded string
:return:
Nothing if success, else error
"""
try:
challenge = try_deser(lambda: proto.Challenge.fromBase64(challenge))
self._proto.cancelChallenge(challenge.id)
return None
except Exception as e:
return self.__handle_exception(e)
# API: passID.register
@passidapi
def register(self, dg15: str, sod: str, cid: str, csigs: List[str], dg14: str = None) -> dict:
"""
Register new user. It returns back to the client userId which is publicKey address,
session key and session expiration time.
:param dg15: eMRTD DG15 file
:param sod: eMRTD SOD file
:param cid: Challenge id
:param csigs: Challenge signatures
:param dg14: eMRTD DG14 file (optional)
:return:
'uid' - base64 encoded user id
'session_key' - base64 encoded session key
'expires' - unix timestamp of time when session will expire
"""
try:
dg15 = try_deser(lambda: ef.DG15.load(b64decode(dg15)))
sod = try_deser(lambda: ef.SOD.load(b64decode(sod)))
cid = try_deser(lambda: proto.CID.fromhex(cid))
csigs = _b64csigs_to_bcsigs(csigs)
if dg14 is not None:
dg14 = try_deser(lambda: ef.DG14.load(b64decode(dg14)))
uid, sk, set = self._proto.register(dg15, sod, cid, csigs, dg14)
return { "uid": uid.toBase64(), "session_key": sk.toBase64(), "expires": int(set.timestamp()) }
except Exception as e:
return self.__handle_exception(e)
# API: passID.login
@passidapi
def login(self, uid: str, cid: str, csigs: List[str], dg1: str = None) -> dict:
"""
It returns back session key and session expiration time.
:param uid: User id
:param cid: Challenge id
:param csigs: Challenge signatures
:return:
'session_key' - base64 encoded session key
'expires' - unix timestamp of time when session will expire
"""
try:
uid = try_deser(lambda: proto.UserId.fromBase64(uid))
cid = try_deser(lambda: proto.CID.fromhex(cid))
csigs = _b64csigs_to_bcsigs(csigs)
if dg1 is not None:
dg1 = try_deser(lambda: ef.DG1.load(b64decode(dg1)))
sk, set = self._proto.login(uid, cid, csigs, dg1)
return { "session_key": sk.toBase64(), "expires": int(set.timestamp()) }
except Exception as e:
return self.__handle_exception(e)
# API: passID.sayHello
@passidapi
def sayHello(self, uid: str, mac: str) -> dict:
"""
It returns back greeting message based on whether user is anonymous or not.
:param uid: User id
:param mac: session mac over api name and uid
:return:
'msg' - greeting message
"""
try:
uid = try_deser(lambda: proto.UserId.fromBase64(uid))
mac = try_deser(lambda: b64decode(mac))
msg = self._proto.sayHello(uid, mac)
return { "msg": msg }
except Exception as e:
return self.__handle_exception(e)
# Request handler
@Request.application
def __create_calls(self, request):
"""Create API calls"""
response = JRPCRespMgr.handle(
request.data,
self._req_disp
)
if response is not None:
return Response(response.json, mimetype='application/json')
return Response()
def __handle_exception(self, e: Exception)-> dict:
if isinstance(e, proto.ProtoError):
self._log.debug("Request proto error: {}".format(e))
raise JSONRPCDispatchException(e.code, str(e))
if isinstance(e, proto.SeEntryNotFound):
self._log.debug("Request storage error: {}".format(e))
raise JSONRPCDispatchException(404, str(e))
self._log.error("Unhandled exception encountered, e={}".format(e))
raise JSONRPCDispatchException(500, "Internal Server Error")
def __init_api(self):
self._req_disp = Dispatcher()
def add_api_meth(api_f, name):
# method format: <api_prefix>.<methodName>
passid_api_f = lambda *args, **kwargs: api_f(self, *args, **kwargs)
self._req_disp.add_method(passid_api_f, "{}.{}".format(PassIdApiServer.api_method_prefix, name))
# register methods with @passidapi decorator as rpc api handler
import inspect
meths = inspect.getmembers(PassIdApiServer, predicate=inspect.isfunction)
for m in meths:
if m[1].__name__ == "wrapped_api_f":
add_api_meth(m[1], m[0])
def __log_api_call(self, f, **kwargs):
if self._log.level <= log.VERBOSE:
self._log.debug(":{}() ==>".format(f.__name__))
for a, v in kwargs.items():
self._log.verbose(" {}: {}".format(a, v))
def __log_api_response(self, f, resp: dict):
if self._log.level <= log.VERBOSE:
self._log.debug(":{}() <==".format(f.__name__))
if(resp is not None):
for a, v in resp.items():
self._log.verbose(" {}: {}".format(a, v)) | 0.656548 | 0.095771 |
import textwrap
import grafanalib.core as GCore
import grafanacommon as GCommon
def dash(myuid, agginfo, nodesel, **kwargs):
return GCommon.Dashboard(
title = "RSSAC sources",
tags = [
agginfo['graph_tag']
],
uid = myuid,
rows = [
GCore.Row(
panels = [
GCommon.BarChart(
title = 'Unique source addresses',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
title = 'Number of unique sources',
),
yaxis = GCommon.BarChartAxis(
tickmargin = 55,
title = 'IP Version/Aggregation',
),
),
traces = [
GCommon.BarChartTrace(
name = 'IPv6/64',
color = '#33B5E5',
x = 'IPv664Cnt',
y = 'IPv664Proto',
text = 'IPv664Cnt',
),
GCommon.BarChartTrace(
name = 'IPv6',
color = '#1F60C4',
x = 'IPv6Cnt',
y = 'IPv6Proto',
text = 'IPv6Cnt',
),
GCommon.BarChartTrace(
name = 'IPv4',
color = '#8877D9',
x = 'IPv4Cnt',
y = 'IPv4Proto',
text = 'IPv4Cnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6/64' AS IPv664Proto,
uniqMerge(IPv664Addr) AS IPv664Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'A'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6' AS IPv6Proto,
uniqMerge(IPv6Addr) AS IPv6Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'B'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv4Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv4' AS IPv4Proto,
uniqMerge(IPv4Addr) AS IPv4Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'C'
),
],
),
],
),
]
) | grafana/common/dashboards/aggregated/rssac_sources.py |
import textwrap
import grafanalib.core as GCore
import grafanacommon as GCommon
def dash(myuid, agginfo, nodesel, **kwargs):
return GCommon.Dashboard(
title = "RSSAC sources",
tags = [
agginfo['graph_tag']
],
uid = myuid,
rows = [
GCore.Row(
panels = [
GCommon.BarChart(
title = 'Unique source addresses',
orientation = GCommon.BAR_CHART_ORIENTATION_HORIZONTAL,
layout = GCommon.BarChartLayout(
xaxis = GCommon.BarChartAxis(
title = 'Number of unique sources',
),
yaxis = GCommon.BarChartAxis(
tickmargin = 55,
title = 'IP Version/Aggregation',
),
),
traces = [
GCommon.BarChartTrace(
name = 'IPv6/64',
color = '#33B5E5',
x = 'IPv664Cnt',
y = 'IPv664Proto',
text = 'IPv664Cnt',
),
GCommon.BarChartTrace(
name = 'IPv6',
color = '#1F60C4',
x = 'IPv6Cnt',
y = 'IPv6Proto',
text = 'IPv6Cnt',
),
GCommon.BarChartTrace(
name = 'IPv4',
color = '#8877D9',
x = 'IPv4Cnt',
y = 'IPv4Proto',
text = 'IPv4Cnt',
),
],
targets = [
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6/64' AS IPv664Proto,
uniqMerge(IPv664Addr) AS IPv664Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'A'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv6Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv6' AS IPv6Proto,
uniqMerge(IPv6Addr) AS IPv6Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'B'
),
GCommon.ClickHouseTableTarget(
database = agginfo['database'],
table = 'UniqueIPv4Addr' + agginfo['table_suffix'],
round = agginfo['round'],
query = textwrap.dedent("""\
SELECT
'IPv4' AS IPv4Proto,
uniqMerge(IPv4Addr) AS IPv4Cnt
FROM $table
WHERE $timeFilter
AND NodeID IN {nodesel}""".format(
nodesel=nodesel)),
refId = 'C'
),
],
),
],
),
]
) | 0.355775 | 0.096706 |
import numpy as np
import matplotlib.pyplot as plt
import seaborn.apionly as sns
def display_aia_response_control_flow():
"""Show the control flow of the IDL programs used to compute AIA response functions"""
def plot_aia_response_functions(raw_response_file,fix_response_file):
"""Plot AIA temperature response functions as computed by SSW"""
#Load data
raw_tresp,fix_tresp = np.loadtxt(raw_response_file),np.loadtxt(fix_response_file)
#set labels
aia_labs = [r'$94\,\,\AA$',r'$131\,\,\AA$',r'$171\,\,\AA$',r'$193\,\,\AA$',r'$211\,\,\AA$',r'$335\,\,\AA$']
#Create figure
fig,ax = plt.subplots(1,2,figsize=(16,8))
for i in range(1,7):
#unnormalized
ax[0].plot(10**raw_tresp[:,0],raw_tresp[:,i],linewidth=2,linestyle='-',color=sns.color_palette('deep')[i-1],label=aia_labs[i-1])
ax[0].plot(10**fix_tresp[:,0],fix_tresp[:,i],linewidth=2,linestyle='--',color=sns.color_palette('deep')[i-1])
#normalized
ax[1].plot(raw_tresp[:,0],raw_tresp[:,i]/np.max(raw_tresp[:,i]),linewidth=2,linestyle='-',color=sns.color_palette('deep')[i-1])
ax[1].plot(fix_tresp[:,0],fix_tresp[:,i]/np.max(fix_tresp[:,i]),linewidth=2,linestyle='--',color=sns.color_palette('deep')[i-1])
#set plot options
ax[0].set_xscale('log')
ax[0].set_yscale('log')
ax[0].set_xlim([10**5.,10**8.])
ax[0].set_ylim([1e-28,1e-23])
ax[1].set_xlim([5,8])
ax[1].set_ylim([0,1])
#labels
ax[0].set_xlabel(r'$T\,\,\mathrm{(K)}$',fontsize=22)
ax[0].set_ylabel(r'Temperature Response $(\mathrm{DN}\,\mathrm{cm}^{-5}\,\mathrm{s}^{-1}\,\mathrm{pix}^{-1})$',fontsize=22)
ax[1].set_xlabel(r'$\log{T}\,\,\mathrm{(K)}$',fontsize=22)
ax[1].set_ylabel(r'Normalized Temperature Response',fontsize=22)
#legend
ax[0].legend(loc='best',fontsize=14)
plt.tight_layout()
plt.savefig('figures/aia_response_functions.eps',format='eps')
if __name__=='__main__':
plot_aia_response_functions('aia_sample_data/aia_tresponse_raw.dat','aia_sample_data/aia_tresponse_fix.dat') | make_figures.py |
import numpy as np
import matplotlib.pyplot as plt
import seaborn.apionly as sns
def display_aia_response_control_flow():
"""Show the control flow of the IDL programs used to compute AIA response functions"""
def plot_aia_response_functions(raw_response_file,fix_response_file):
"""Plot AIA temperature response functions as computed by SSW"""
#Load data
raw_tresp,fix_tresp = np.loadtxt(raw_response_file),np.loadtxt(fix_response_file)
#set labels
aia_labs = [r'$94\,\,\AA$',r'$131\,\,\AA$',r'$171\,\,\AA$',r'$193\,\,\AA$',r'$211\,\,\AA$',r'$335\,\,\AA$']
#Create figure
fig,ax = plt.subplots(1,2,figsize=(16,8))
for i in range(1,7):
#unnormalized
ax[0].plot(10**raw_tresp[:,0],raw_tresp[:,i],linewidth=2,linestyle='-',color=sns.color_palette('deep')[i-1],label=aia_labs[i-1])
ax[0].plot(10**fix_tresp[:,0],fix_tresp[:,i],linewidth=2,linestyle='--',color=sns.color_palette('deep')[i-1])
#normalized
ax[1].plot(raw_tresp[:,0],raw_tresp[:,i]/np.max(raw_tresp[:,i]),linewidth=2,linestyle='-',color=sns.color_palette('deep')[i-1])
ax[1].plot(fix_tresp[:,0],fix_tresp[:,i]/np.max(fix_tresp[:,i]),linewidth=2,linestyle='--',color=sns.color_palette('deep')[i-1])
#set plot options
ax[0].set_xscale('log')
ax[0].set_yscale('log')
ax[0].set_xlim([10**5.,10**8.])
ax[0].set_ylim([1e-28,1e-23])
ax[1].set_xlim([5,8])
ax[1].set_ylim([0,1])
#labels
ax[0].set_xlabel(r'$T\,\,\mathrm{(K)}$',fontsize=22)
ax[0].set_ylabel(r'Temperature Response $(\mathrm{DN}\,\mathrm{cm}^{-5}\,\mathrm{s}^{-1}\,\mathrm{pix}^{-1})$',fontsize=22)
ax[1].set_xlabel(r'$\log{T}\,\,\mathrm{(K)}$',fontsize=22)
ax[1].set_ylabel(r'Normalized Temperature Response',fontsize=22)
#legend
ax[0].legend(loc='best',fontsize=14)
plt.tight_layout()
plt.savefig('figures/aia_response_functions.eps',format='eps')
if __name__=='__main__':
plot_aia_response_functions('aia_sample_data/aia_tresponse_raw.dat','aia_sample_data/aia_tresponse_fix.dat') | 0.559771 | 0.548371 |
from .flash import Flash
class SWM320(object):
CHIP_CORE = 'Cortex-M4'
PAGE_SIZE = 4096
SECT_SIZE = 4096
CHIP_SIZE = 0x80000 # 512K
def __init__(self, jlink):
super(SWM320, self).__init__()
self.jlink = jlink
self.jlink.reset()
self.flash = Flash(self.jlink, SWM320_flash_algo)
self.jlink.write_U32(0x40031000, 1) # HRC select 20MHz
self.jlink.write_U32(0x40000000, 4) # Core Clock select HRC
def sect_erase(self, addr, size):
self.flash.Init(0, 0, 1)
for i in range(0, (size + self.SECT_SIZE - 1)//self.SECT_SIZE):
self.flash.EraseSector(addr + self.SECT_SIZE * i)
self.flash.UnInit(1)
def chip_write(self, addr, data):
self.sect_erase(addr, len(data))
self.flash.Init(0, 0, 2)
for i in range(0, len(data)//self.PAGE_SIZE):
self.flash.ProgramPage(addr + self.PAGE_SIZE * i, data[self.PAGE_SIZE*i : self.PAGE_SIZE*(i+1)])
self.flash.UnInit(2)
def chip_read(self, addr, size, buff):
c_char_Array = self.jlink.read_mem(addr, size)
buff.extend(list(bytes(c_char_Array)))
SWM320_flash_algo = {
'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4770BA40, 0x4770BAC0, 0x47702000, 0x47702000, 0x482DB508, 0x492B2200, 0x60819200, 0x07C96A01,
0x6A01D0FC, 0xD1FC07C9, 0x20006082, 0xF040BD08, 0x48254100, 0x6A016081, 0xD0FC07C9, 0x07C96A01,
0x6081D1FC, 0x47704608, 0x2300B51F, 0x3102E9CD, 0x2404491D, 0x604860CC, 0xE01C9803, 0x93019300,
0x9C019302, 0x0B01F812, 0x40A000E4, 0x43209C02, 0x98019002, 0x90011C40, 0xDBF22804, 0x60089802,
0x1D009800, 0x28109000, 0x6A08DBE9, 0xD5FC0700, 0x38109803, 0x28009003, 0x60CBDCE0, 0x60C82008,
0xB00460CB, 0xBD102000, 0x4604B570, 0xE0052300, 0x78265CD5, 0xD10342B5, 0x1C641C5B, 0xD3F7428B,
0xBD704418, 0x8007FFFF, 0x4001F000, 0x00000000
],
'pc_Init' : 0x20000029,
'pc_UnInit' : 0x2000002D,
'pc_EraseSector' : 0x2000004F,
'pc_ProgramPage' : 0x20000069,
'pc_Verify' : 0x200000C9,
'pc_EraseChip' : 0x20000031,
'pc_BlankCheck' : 0x12000001F,
'static_base' : 0x20000600,
'begin_data' : 0x20000800,
'begin_stack' : 0x20001C00,
'analyzer_supported' : False,
# Relative region addresses and sizes
'ro_start' : 0x00000000,
'ro_size' : 0x000000CC,
'rw_start' : 0x000000CC,
'rw_size' : 0x00000004,
'zi_start' : 0x000000D0,
'zi_size' : 0x00000000,
# Flash information
'flash_start' : 0x00000000,
'flash_size' : 0x00080000,
'flash_page_size' : 0x00001000,
} | device/SWM320.py | from .flash import Flash
class SWM320(object):
CHIP_CORE = 'Cortex-M4'
PAGE_SIZE = 4096
SECT_SIZE = 4096
CHIP_SIZE = 0x80000 # 512K
def __init__(self, jlink):
super(SWM320, self).__init__()
self.jlink = jlink
self.jlink.reset()
self.flash = Flash(self.jlink, SWM320_flash_algo)
self.jlink.write_U32(0x40031000, 1) # HRC select 20MHz
self.jlink.write_U32(0x40000000, 4) # Core Clock select HRC
def sect_erase(self, addr, size):
self.flash.Init(0, 0, 1)
for i in range(0, (size + self.SECT_SIZE - 1)//self.SECT_SIZE):
self.flash.EraseSector(addr + self.SECT_SIZE * i)
self.flash.UnInit(1)
def chip_write(self, addr, data):
self.sect_erase(addr, len(data))
self.flash.Init(0, 0, 2)
for i in range(0, len(data)//self.PAGE_SIZE):
self.flash.ProgramPage(addr + self.PAGE_SIZE * i, data[self.PAGE_SIZE*i : self.PAGE_SIZE*(i+1)])
self.flash.UnInit(2)
def chip_read(self, addr, size, buff):
c_char_Array = self.jlink.read_mem(addr, size)
buff.extend(list(bytes(c_char_Array)))
SWM320_flash_algo = {
'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4770BA40, 0x4770BAC0, 0x47702000, 0x47702000, 0x482DB508, 0x492B2200, 0x60819200, 0x07C96A01,
0x6A01D0FC, 0xD1FC07C9, 0x20006082, 0xF040BD08, 0x48254100, 0x6A016081, 0xD0FC07C9, 0x07C96A01,
0x6081D1FC, 0x47704608, 0x2300B51F, 0x3102E9CD, 0x2404491D, 0x604860CC, 0xE01C9803, 0x93019300,
0x9C019302, 0x0B01F812, 0x40A000E4, 0x43209C02, 0x98019002, 0x90011C40, 0xDBF22804, 0x60089802,
0x1D009800, 0x28109000, 0x6A08DBE9, 0xD5FC0700, 0x38109803, 0x28009003, 0x60CBDCE0, 0x60C82008,
0xB00460CB, 0xBD102000, 0x4604B570, 0xE0052300, 0x78265CD5, 0xD10342B5, 0x1C641C5B, 0xD3F7428B,
0xBD704418, 0x8007FFFF, 0x4001F000, 0x00000000
],
'pc_Init' : 0x20000029,
'pc_UnInit' : 0x2000002D,
'pc_EraseSector' : 0x2000004F,
'pc_ProgramPage' : 0x20000069,
'pc_Verify' : 0x200000C9,
'pc_EraseChip' : 0x20000031,
'pc_BlankCheck' : 0x12000001F,
'static_base' : 0x20000600,
'begin_data' : 0x20000800,
'begin_stack' : 0x20001C00,
'analyzer_supported' : False,
# Relative region addresses and sizes
'ro_start' : 0x00000000,
'ro_size' : 0x000000CC,
'rw_start' : 0x000000CC,
'rw_size' : 0x00000004,
'zi_start' : 0x000000D0,
'zi_size' : 0x00000000,
# Flash information
'flash_start' : 0x00000000,
'flash_size' : 0x00080000,
'flash_page_size' : 0x00001000,
} | 0.345216 | 0.163079 |
import pytest
from sanic_routing import BaseRouter
from sanic_routing.exceptions import NotFound
@pytest.fixture
def handler():
def handler(**kwargs):
return list(kwargs.values())[0]
return handler
class Router(BaseRouter):
def get(self, path, method, extra=None):
return self.resolve(path=path, method=method, extra=extra)
def test_alpha_does_cast(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler)
router.finalize()
_, handler, params = router.get("/foobar", "BASE")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar"
def test_alpha_does_not_cast(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler)
router.finalize()
with pytest.raises(NotFound):
router.get("/notfound123", "BASE")
def test_correct_alpha_v_string(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler, methods=["alpha"])
router.add("/<anystring:str>", handler, methods=["str"])
router.finalize()
_, handler, params = router.get("/foobar", "alpha")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar"
_, handler, params = router.get("/foobar123", "str")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar123"
def test_use_string_raises_deprecation_warning(handler):
router = Router()
with pytest.warns(DeprecationWarning) as record:
router.add("/<foo:string>", handler)
assert len(record) == 1
assert record[0].message.args[0] == (
"Use of 'string' as a path parameter type is deprected, and will be "
"removed in Sanic v21.12. Instead, use <foo:str>."
)
def test_use_number_raises_deprecation_warning(handler):
router = Router()
with pytest.warns(DeprecationWarning) as record:
router.add("/<foo:number>", handler)
assert len(record) == 1
assert record[0].message.args[0] == (
"Use of 'number' as a path parameter type is deprected, and will be "
"removed in Sanic v21.12. Instead, use <foo:float>."
)
@pytest.mark.parametrize(
"value", ("foo-bar", "foobar", "foo-bar-thing123", "foobar123", "123")
)
def test_slug_does_cast(handler, value):
router = Router()
router.add("/<slug:slug>", handler)
router.finalize()
_, handler, params = router.get(f"/{value}", "BASE")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == value
@pytest.mark.parametrize("value", ("-aaa", "FooBar", "Foo-Bar"))
def test_slug_does_not_cast(handler, value):
router = Router()
router.add("/<slug:slug>", handler)
router.finalize()
with pytest.raises(NotFound):
router.get(f"/{value}", "BASE")
def test_correct_slug_v_string(handler):
router = Router()
router.add("/<slug:slug>", handler, methods=["slug"])
router.add("/<anystring:str>", handler, methods=["str"])
router.finalize()
_, handler, params = router.get("/foo-bar", "slug")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foo-bar"
_, handler, params = router.get("/FooBar", "str")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "FooBar" | tests/test_builtin_param_types.py | import pytest
from sanic_routing import BaseRouter
from sanic_routing.exceptions import NotFound
@pytest.fixture
def handler():
def handler(**kwargs):
return list(kwargs.values())[0]
return handler
class Router(BaseRouter):
def get(self, path, method, extra=None):
return self.resolve(path=path, method=method, extra=extra)
def test_alpha_does_cast(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler)
router.finalize()
_, handler, params = router.get("/foobar", "BASE")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar"
def test_alpha_does_not_cast(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler)
router.finalize()
with pytest.raises(NotFound):
router.get("/notfound123", "BASE")
def test_correct_alpha_v_string(handler):
router = Router()
router.add("/<alphaonly:alpha>", handler, methods=["alpha"])
router.add("/<anystring:str>", handler, methods=["str"])
router.finalize()
_, handler, params = router.get("/foobar", "alpha")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar"
_, handler, params = router.get("/foobar123", "str")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foobar123"
def test_use_string_raises_deprecation_warning(handler):
router = Router()
with pytest.warns(DeprecationWarning) as record:
router.add("/<foo:string>", handler)
assert len(record) == 1
assert record[0].message.args[0] == (
"Use of 'string' as a path parameter type is deprected, and will be "
"removed in Sanic v21.12. Instead, use <foo:str>."
)
def test_use_number_raises_deprecation_warning(handler):
router = Router()
with pytest.warns(DeprecationWarning) as record:
router.add("/<foo:number>", handler)
assert len(record) == 1
assert record[0].message.args[0] == (
"Use of 'number' as a path parameter type is deprected, and will be "
"removed in Sanic v21.12. Instead, use <foo:float>."
)
@pytest.mark.parametrize(
"value", ("foo-bar", "foobar", "foo-bar-thing123", "foobar123", "123")
)
def test_slug_does_cast(handler, value):
router = Router()
router.add("/<slug:slug>", handler)
router.finalize()
_, handler, params = router.get(f"/{value}", "BASE")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == value
@pytest.mark.parametrize("value", ("-aaa", "FooBar", "Foo-Bar"))
def test_slug_does_not_cast(handler, value):
router = Router()
router.add("/<slug:slug>", handler)
router.finalize()
with pytest.raises(NotFound):
router.get(f"/{value}", "BASE")
def test_correct_slug_v_string(handler):
router = Router()
router.add("/<slug:slug>", handler, methods=["slug"])
router.add("/<anystring:str>", handler, methods=["str"])
router.finalize()
_, handler, params = router.get("/foo-bar", "slug")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "foo-bar"
_, handler, params = router.get("/FooBar", "str")
retval = handler(**params)
assert isinstance(retval, str)
assert retval == "FooBar" | 0.658198 | 0.433562 |
import sys
from collections.abc import Iterable
import numpy as np
import pandas as pd
from pysgrs import errors
from pysgrs.settings import settings
class Shaper:
@staticmethod
def get_shapes(n, shape=None, score=None):
def _score(x):
return (((1/2 + x["padding"])/n)**3)*(1 + x["shape_diff"]**4)
score = score or _score
# Explore:
m = np.sqrt(n)
mmin = int(np.floor(m))
mmax = int(np.ceil(m))
shapes = [
{"id": "min-square", "shape": (mmin, mmin)},
{"id": "opt-rect-1", "shape": (mmin, mmax)},
{"id": "opt-rect-2", "shape": (mmax, mmin)},
{"id": "max-square", "shape": (mmax, mmax)},
]
if shape:
modekey = "user"
shapes.append({"id": modekey, "shape": shape})
else:
modekey = "auto"
for i in range(2, n):
shapes.append({"id": "rect-{:d}".format(i), "shape": (i, int(np.ceil(n/i)))})
df = pd.DataFrame(shapes)
# Arrange:
df["size"] = df["shape"].apply(np.prod)
df["padding"] = df["size"] - n
df["shape_diff"] = df["shape"].apply(lambda x: np.abs(x[0] - x[1]))
df["score"] = df.apply(score, axis=1)
df = df.set_index("id")
df = df.sort_values(["score", "padding", "shape_diff"])
df.loc["auto", :] = df.loc[(df["score"] > 0) & (df.index.str.contains("-square|-rect")), :].iloc[0, :]
df = df.sort_values(["score", "padding", "shape_diff"])
settings.logger.debug("Shaper: {}={}".format(modekey, df.loc[modekey].to_dict()))
return df
@staticmethod
def pad(s, n, padding=" "):
m = n - len(s)
if m >= 0:
settings.logger.debug("Shaper: {}-pad string of length {} with '{}'".format(m, len(s), padding))
return s + padding*m
else:
raise errors.IllegalParameter(
"Final size (max_ngram={}) must be greater or equal to string length ({})".format(n, len(s)))
@staticmethod
def to_matrix(s, shape=None, mode="auto", padding=" ", row_separator="\n"):
if isinstance(s, str):
if shape:
s = s.replace(row_separator, "")
if row_separator in s:
x = s.split(row_separator)
x[-1] = Shaper.pad(x[-1], len(x[0]), padding=padding)
x = [list(t) for t in x]
if not all([len(s) == len(x[0]) for s in x]):
raise errors.IllegalParameter(
"All rows must have the same length unless the last which may be padded")
else:
shape = shape or Shaper.get_shapes(len(s), shape=shape).loc[mode, "shape"]
n = np.prod(shape)
s = Shaper.pad(s, n, padding=padding)
x = list(s)
x = np.array(x)
elif isinstance(s, Iterable):
x = np.array(s)
else:
raise errors.IllegalParameter("String or array expected, received {} instead".format(type(s)))
if len(x.shape) < 2 or shape:
x = x.reshape(shape)
settings.logger.debug("Shaper: {} of size {} shaped to {}-matrix.".format(type(s), len(s), x.shape))
return x
@staticmethod
def to_vector(x):
return np.array(x).flatten()
@staticmethod
def to_str(x):
return "".join(Shaper.to_vector(x))
def main():
sys.exit(0)
if __name__ == "__main__":
main() | pysgrs/toolbox/shaper.py | import sys
from collections.abc import Iterable
import numpy as np
import pandas as pd
from pysgrs import errors
from pysgrs.settings import settings
class Shaper:
@staticmethod
def get_shapes(n, shape=None, score=None):
def _score(x):
return (((1/2 + x["padding"])/n)**3)*(1 + x["shape_diff"]**4)
score = score or _score
# Explore:
m = np.sqrt(n)
mmin = int(np.floor(m))
mmax = int(np.ceil(m))
shapes = [
{"id": "min-square", "shape": (mmin, mmin)},
{"id": "opt-rect-1", "shape": (mmin, mmax)},
{"id": "opt-rect-2", "shape": (mmax, mmin)},
{"id": "max-square", "shape": (mmax, mmax)},
]
if shape:
modekey = "user"
shapes.append({"id": modekey, "shape": shape})
else:
modekey = "auto"
for i in range(2, n):
shapes.append({"id": "rect-{:d}".format(i), "shape": (i, int(np.ceil(n/i)))})
df = pd.DataFrame(shapes)
# Arrange:
df["size"] = df["shape"].apply(np.prod)
df["padding"] = df["size"] - n
df["shape_diff"] = df["shape"].apply(lambda x: np.abs(x[0] - x[1]))
df["score"] = df.apply(score, axis=1)
df = df.set_index("id")
df = df.sort_values(["score", "padding", "shape_diff"])
df.loc["auto", :] = df.loc[(df["score"] > 0) & (df.index.str.contains("-square|-rect")), :].iloc[0, :]
df = df.sort_values(["score", "padding", "shape_diff"])
settings.logger.debug("Shaper: {}={}".format(modekey, df.loc[modekey].to_dict()))
return df
@staticmethod
def pad(s, n, padding=" "):
m = n - len(s)
if m >= 0:
settings.logger.debug("Shaper: {}-pad string of length {} with '{}'".format(m, len(s), padding))
return s + padding*m
else:
raise errors.IllegalParameter(
"Final size (max_ngram={}) must be greater or equal to string length ({})".format(n, len(s)))
@staticmethod
def to_matrix(s, shape=None, mode="auto", padding=" ", row_separator="\n"):
if isinstance(s, str):
if shape:
s = s.replace(row_separator, "")
if row_separator in s:
x = s.split(row_separator)
x[-1] = Shaper.pad(x[-1], len(x[0]), padding=padding)
x = [list(t) for t in x]
if not all([len(s) == len(x[0]) for s in x]):
raise errors.IllegalParameter(
"All rows must have the same length unless the last which may be padded")
else:
shape = shape or Shaper.get_shapes(len(s), shape=shape).loc[mode, "shape"]
n = np.prod(shape)
s = Shaper.pad(s, n, padding=padding)
x = list(s)
x = np.array(x)
elif isinstance(s, Iterable):
x = np.array(s)
else:
raise errors.IllegalParameter("String or array expected, received {} instead".format(type(s)))
if len(x.shape) < 2 or shape:
x = x.reshape(shape)
settings.logger.debug("Shaper: {} of size {} shaped to {}-matrix.".format(type(s), len(s), x.shape))
return x
@staticmethod
def to_vector(x):
return np.array(x).flatten()
@staticmethod
def to_str(x):
return "".join(Shaper.to_vector(x))
def main():
sys.exit(0)
if __name__ == "__main__":
main() | 0.371023 | 0.360517 |
import igraph
import numpy as np
def community_ecg(self, weights=None, ens_size = 16, min_weight = 0.05):
"""
Runs an ensemble of single-level randomized Louvain;
each member of the ensemble gets a "vote" to determine if the edges
are intra-community or not;
the votes are aggregated into an "ECG edge weights" in range [0,1];
a final (full depth) Louvain (using the louvain package) is run
using those edge weights;
Parameters
----------
self : :class:`igraph.Graph`
Graph to define the partition on.
weights: list of double, optional
the edge weights
ens_size: int
the size of the ensemble of single-level Louvain.
min_weight: double in range [0,1]
the ECG edge weight for edges with zero votes from the ensemble.
Returns
-------
partition
The optimised partition, of class `igraph.clustering.VertexClustering`.
partition.W
The ECG edge weights
partition.CSI
The community strength index
Notes
-----
The ECG edge weight function is defined as:
min_weight + ( 1 - min_weight ) x (#votes_in_ensemble) / ens_size
The weights are linear in terms of the #votes, in the range [min_weight,1].
Examples
--------
>>> g = igraph.Graph.Famous('Zachary')
>>> part = g.community_ecg(ens_size=25, min_weight = .1)
"""
W = [0]*self.ecount()
## Ensemble of level-1 Louvain
for i in range(ens_size):
p = np.random.permutation(self.vcount()).tolist()
g = self.permute_vertices(p)
l1 = g.community_multilevel(weights=weights, return_levels=True)[0].membership
b = [l1[p[x.tuple[0]]]==l1[p[x.tuple[1]]] for x in self.es]
W = [W[i]+b[i] for i in range(len(W))]
W = [min_weight + (1-min_weight)*W[i]/ens_size for i in range(len(W))]
## Force min_weight outside 2-core
core = self.shell_index()
ecore = [min(core[x.tuple[0]],core[x.tuple[1]]) for x in self.es]
w = [W[i] if ecore[i]>1 else min_weight for i in range(len(ecore))]
part = self.community_multilevel(weights=w)
part.W = w
part.CSI = 1-2*np.sum([min(1-i,i) for i in w])/len(w)
return part
igraph.Graph.community_ecg = community_ecg | ecg.py | import igraph
import numpy as np
def community_ecg(self, weights=None, ens_size = 16, min_weight = 0.05):
"""
Runs an ensemble of single-level randomized Louvain;
each member of the ensemble gets a "vote" to determine if the edges
are intra-community or not;
the votes are aggregated into an "ECG edge weights" in range [0,1];
a final (full depth) Louvain (using the louvain package) is run
using those edge weights;
Parameters
----------
self : :class:`igraph.Graph`
Graph to define the partition on.
weights: list of double, optional
the edge weights
ens_size: int
the size of the ensemble of single-level Louvain.
min_weight: double in range [0,1]
the ECG edge weight for edges with zero votes from the ensemble.
Returns
-------
partition
The optimised partition, of class `igraph.clustering.VertexClustering`.
partition.W
The ECG edge weights
partition.CSI
The community strength index
Notes
-----
The ECG edge weight function is defined as:
min_weight + ( 1 - min_weight ) x (#votes_in_ensemble) / ens_size
The weights are linear in terms of the #votes, in the range [min_weight,1].
Examples
--------
>>> g = igraph.Graph.Famous('Zachary')
>>> part = g.community_ecg(ens_size=25, min_weight = .1)
"""
W = [0]*self.ecount()
## Ensemble of level-1 Louvain
for i in range(ens_size):
p = np.random.permutation(self.vcount()).tolist()
g = self.permute_vertices(p)
l1 = g.community_multilevel(weights=weights, return_levels=True)[0].membership
b = [l1[p[x.tuple[0]]]==l1[p[x.tuple[1]]] for x in self.es]
W = [W[i]+b[i] for i in range(len(W))]
W = [min_weight + (1-min_weight)*W[i]/ens_size for i in range(len(W))]
## Force min_weight outside 2-core
core = self.shell_index()
ecore = [min(core[x.tuple[0]],core[x.tuple[1]]) for x in self.es]
w = [W[i] if ecore[i]>1 else min_weight for i in range(len(ecore))]
part = self.community_multilevel(weights=w)
part.W = w
part.CSI = 1-2*np.sum([min(1-i,i) for i in w])/len(w)
return part
igraph.Graph.community_ecg = community_ecg | 0.736021 | 0.569673 |
from __future__ import division, print_function, absolute_import
import unittest
from tempfile import NamedTemporaryFile
import os
import numpy as np
import deepdish as dd
from contextlib import contextmanager
class TestCore(unittest.TestCase):
def test_multi_range(self):
x0 = [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]
x1 = list(dd.multi_range(2, 3))
assert x0 == x1
def test_bytesize(self):
assert dd.humanize_bytesize(1) == '1 B'
assert dd.humanize_bytesize(2 * 1024) == '2 KB'
assert dd.humanize_bytesize(3 * 1024**2) == '3 MB'
assert dd.humanize_bytesize(4 * 1024**3) == '4 GB'
assert dd.humanize_bytesize(5 * 1024**4) == '5 TB'
assert dd.bytesize(np.ones((5, 2), dtype=np.int16)) == 20
assert dd.memsize(np.ones((5, 2), dtype=np.int16)) == '20 B'
def test_span(self):
assert dd.span(np.array([0, -10, 20])) == (-10, 20)
def test_apply_once(self):
x = np.arange(3 * 4 * 5).reshape((3, 4, 5))
np.testing.assert_array_almost_equal(dd.apply_once(np.std, x, [0, -1]),
16.39105447 * np.ones((1, 4, 1)))
x = np.arange(2 * 3).reshape((2, 3))
np.testing.assert_array_equal(dd.apply_once(np.sum, x, 1, keepdims=False),
np.array([3, 12]))
def test_tupled_argmax(self):
x = np.zeros((3, 4, 5))
x[1, 2, 3] = 10
assert dd.tupled_argmax(x) == (1, 2, 3)
def test_slice(self):
s = [slice(None, 3), slice(None), slice(2, None), slice(3, 4), Ellipsis, [1, 2, 3]]
assert dd.aslice[:3, :, 2:, 3:4, ..., [1, 2, 3]]
def test_timed(self):
# These tests only make sure it does not cause errors
with dd.timed():
pass
times = []
with dd.timed(callback=times.append):
pass
assert len(times) == 1
x = np.zeros(1)
x[:] = np.nan
with dd.timed(file=x):
pass
assert not np.isnan(x[0])
if __name__ == '__main__':
unittest.main() | deepdish/tests/test_core.py | from __future__ import division, print_function, absolute_import
import unittest
from tempfile import NamedTemporaryFile
import os
import numpy as np
import deepdish as dd
from contextlib import contextmanager
class TestCore(unittest.TestCase):
def test_multi_range(self):
x0 = [(0, 0), (0, 1), (0, 2), (1, 0), (1, 1), (1, 2)]
x1 = list(dd.multi_range(2, 3))
assert x0 == x1
def test_bytesize(self):
assert dd.humanize_bytesize(1) == '1 B'
assert dd.humanize_bytesize(2 * 1024) == '2 KB'
assert dd.humanize_bytesize(3 * 1024**2) == '3 MB'
assert dd.humanize_bytesize(4 * 1024**3) == '4 GB'
assert dd.humanize_bytesize(5 * 1024**4) == '5 TB'
assert dd.bytesize(np.ones((5, 2), dtype=np.int16)) == 20
assert dd.memsize(np.ones((5, 2), dtype=np.int16)) == '20 B'
def test_span(self):
assert dd.span(np.array([0, -10, 20])) == (-10, 20)
def test_apply_once(self):
x = np.arange(3 * 4 * 5).reshape((3, 4, 5))
np.testing.assert_array_almost_equal(dd.apply_once(np.std, x, [0, -1]),
16.39105447 * np.ones((1, 4, 1)))
x = np.arange(2 * 3).reshape((2, 3))
np.testing.assert_array_equal(dd.apply_once(np.sum, x, 1, keepdims=False),
np.array([3, 12]))
def test_tupled_argmax(self):
x = np.zeros((3, 4, 5))
x[1, 2, 3] = 10
assert dd.tupled_argmax(x) == (1, 2, 3)
def test_slice(self):
s = [slice(None, 3), slice(None), slice(2, None), slice(3, 4), Ellipsis, [1, 2, 3]]
assert dd.aslice[:3, :, 2:, 3:4, ..., [1, 2, 3]]
def test_timed(self):
# These tests only make sure it does not cause errors
with dd.timed():
pass
times = []
with dd.timed(callback=times.append):
pass
assert len(times) == 1
x = np.zeros(1)
x[:] = np.nan
with dd.timed(file=x):
pass
assert not np.isnan(x[0])
if __name__ == '__main__':
unittest.main() | 0.692122 | 0.636833 |
import json
import re
import scrapy
from scrapy import Request
from MinkchSpider.items import MinkchItemLoader
from MinkchSpider.items import MinkchItem
from MinkchSpider.utils import common
from MinkchSpider import settings
class MinkchSpider(scrapy.Spider):
name = 'minkch'
allowed_domains = ['minkch.com']
start_urls = ['https://minkch.com/page/{}'.format(settings.START_PAGE)]
archive_crawl_count = 0 # archive爬取数量计数器
# 爬取start_url,也即列表开始页的第一条archive的链接并yield给parse_archives
def parse(self, response):
first_archive_url = response.xpath('//a[@rel="bookmark" and not(@target="_blank")][1]/@href').extract_first('')
yield Request(url=first_archive_url, callback=self.parse_archives)
# 爬取详情页(archive页)
def parse_archives(self, response):
# 设置计数器,通过settings.py中的ARCHIVE_CRAWL_LIMIT项来确定爬取archives的上限
MinkchSpider.archive_crawl_count += 1
# 日志:打印当前页码和当前archive
current_page = settings.START_PAGE + MinkchSpider.archive_crawl_count // settings.APP
current_archive = MinkchSpider.archive_crawl_count % settings.APP
print('[My Log] Crawling page {}, archive {}'.format(current_page, current_archive))
# 爬取图片的两种xpath,分别可以解析源图片和网页上显示的图片
img_xpath = {
'original': '//div[@class="entry-content clearfix"][last()]//img[@class="pict"]/parent::a/attribute::href',
'displaying': '//div[@class="entry-content clearfix"][last()]//img[@class="pict"]/attribute::src'}
# 通过自定义的ItemLoader来解析各项数据到item
item_loader = MinkchItemLoader(item=MinkchItem(), response=response)
item_loader.add_value('id', response.url)
item_loader.add_xpath('title', '//h2[@class="h2 entry-title "]/span/text()')
item_loader.add_xpath('date_time', '//*[@class="entry-meta-default"]//time/attribute::datetime')
item_loader.add_xpath('tags', '//div[@class="entry-utility entry-meta"]/a/text()')
item_loader.add_xpath('comments', '//*[@class="entry-meta-default"]//em/text()')
item_loader.add_xpath('pre_url', '//*[@id="nav-below"]//a[@rel="prev"]/attribute::href')
item_loader.add_xpath('next_url', '//*[@id="nav-below"]//a[@rel="next"]/attribute::href')
item_loader.add_xpath('img_scalar', '//div[@class="entry-content clearfix"][last()]//strong/text()')
item_loader.add_xpath('video_scalar', '//div[@class="entry-content clearfix"][last()]//strong/text()')
item_loader.add_xpath('img_urls', img_xpath['original'])
item_loader.add_xpath('video_urls', '//video//source/attribute::src')
archive_item = item_loader.load_item()
# 使用百度翻译api
yield Request(url=common.get_trans_url(archive_item['title']),
meta={'archive_item': archive_item},
callback=self.parse_rest,
dont_filter=True)
# 计数比较
if MinkchSpider.archive_crawl_count < settings.ARCHIVE_CRAWL_LIMIT:
yield Request(url=archive_item['pre_url'], callback=self.parse_archives)
# 爬取剩余的:①中文翻译的标题 ②媒体资源汇总 ③资源获取率 ④资源所在文件夹相对路径;并且最终yield item
def parse_rest(self, response):
# 中文翻译的标题
archive_item = response.meta.get('archive_item', '')
json_data = json.loads(response.text)
if 'trans_result' in json_data:
archive_item['title_zh'] = json_data['trans_result'][0]['dst']
else:
print('[My Log] 未成功获得翻译结果,在.\\MinkchSpider\\minkch.py')
# 媒体资源汇总
archive_item['media_urls'] = []
if settings.DL_IMG:
archive_item['media_urls'] += archive_item.get('img_urls', [])
if settings.DL_VIDEO:
archive_item['media_urls'] += archive_item.get('video_urls', [])
# 资源获取率
if int(archive_item.get('img_scalar', 0)) != 0:
archive_item['img_acquisition_rate'] = \
len(archive_item.get('img_urls', [])) / int(archive_item.get('img_scalar', 0))
else:
archive_item['img_acquisition_rate'] = 1.0
if int(archive_item.get('video_scalar', 0)) != 0:
archive_item['video_acquisition_rate'] = \
len(archive_item.get('video_urls', [])) / int(archive_item.get('video_scalar', 0))
else:
archive_item['video_acquisition_rate'] = 1.0
# 资源所在文件夹相对路径,相对在MinkchSpider\medias\下,实际即为文件夹名称
# 资源路径仅作参考,由于网站设计原因,并不准确
archive_item['src_path'] = ''
for media_url in archive_item.get('media_urls', ['']):
match = re.match(r'.*/(\d+)/\d+.+', media_url)
if match:
archive_item['src_path'] = \
match.group(1)[:4] + '\\' + match.group(1)[4:6] + '\\' + match.group(1)[6:] + '\\'
break
if archive_item['src_path'] == '':
for media_url in archive_item.get('media_urls', ['']):
match = re.match(r'https://.*com/(.*)', media_url)
if match:
archive_item['src_path'] += match.group(1) + ', '
yield archive_item | minkch_spider/MinkchSpider/spiders/minkch.py | import json
import re
import scrapy
from scrapy import Request
from MinkchSpider.items import MinkchItemLoader
from MinkchSpider.items import MinkchItem
from MinkchSpider.utils import common
from MinkchSpider import settings
class MinkchSpider(scrapy.Spider):
name = 'minkch'
allowed_domains = ['minkch.com']
start_urls = ['https://minkch.com/page/{}'.format(settings.START_PAGE)]
archive_crawl_count = 0 # archive爬取数量计数器
# 爬取start_url,也即列表开始页的第一条archive的链接并yield给parse_archives
def parse(self, response):
first_archive_url = response.xpath('//a[@rel="bookmark" and not(@target="_blank")][1]/@href').extract_first('')
yield Request(url=first_archive_url, callback=self.parse_archives)
# 爬取详情页(archive页)
def parse_archives(self, response):
# 设置计数器,通过settings.py中的ARCHIVE_CRAWL_LIMIT项来确定爬取archives的上限
MinkchSpider.archive_crawl_count += 1
# 日志:打印当前页码和当前archive
current_page = settings.START_PAGE + MinkchSpider.archive_crawl_count // settings.APP
current_archive = MinkchSpider.archive_crawl_count % settings.APP
print('[My Log] Crawling page {}, archive {}'.format(current_page, current_archive))
# 爬取图片的两种xpath,分别可以解析源图片和网页上显示的图片
img_xpath = {
'original': '//div[@class="entry-content clearfix"][last()]//img[@class="pict"]/parent::a/attribute::href',
'displaying': '//div[@class="entry-content clearfix"][last()]//img[@class="pict"]/attribute::src'}
# 通过自定义的ItemLoader来解析各项数据到item
item_loader = MinkchItemLoader(item=MinkchItem(), response=response)
item_loader.add_value('id', response.url)
item_loader.add_xpath('title', '//h2[@class="h2 entry-title "]/span/text()')
item_loader.add_xpath('date_time', '//*[@class="entry-meta-default"]//time/attribute::datetime')
item_loader.add_xpath('tags', '//div[@class="entry-utility entry-meta"]/a/text()')
item_loader.add_xpath('comments', '//*[@class="entry-meta-default"]//em/text()')
item_loader.add_xpath('pre_url', '//*[@id="nav-below"]//a[@rel="prev"]/attribute::href')
item_loader.add_xpath('next_url', '//*[@id="nav-below"]//a[@rel="next"]/attribute::href')
item_loader.add_xpath('img_scalar', '//div[@class="entry-content clearfix"][last()]//strong/text()')
item_loader.add_xpath('video_scalar', '//div[@class="entry-content clearfix"][last()]//strong/text()')
item_loader.add_xpath('img_urls', img_xpath['original'])
item_loader.add_xpath('video_urls', '//video//source/attribute::src')
archive_item = item_loader.load_item()
# 使用百度翻译api
yield Request(url=common.get_trans_url(archive_item['title']),
meta={'archive_item': archive_item},
callback=self.parse_rest,
dont_filter=True)
# 计数比较
if MinkchSpider.archive_crawl_count < settings.ARCHIVE_CRAWL_LIMIT:
yield Request(url=archive_item['pre_url'], callback=self.parse_archives)
# 爬取剩余的:①中文翻译的标题 ②媒体资源汇总 ③资源获取率 ④资源所在文件夹相对路径;并且最终yield item
def parse_rest(self, response):
# 中文翻译的标题
archive_item = response.meta.get('archive_item', '')
json_data = json.loads(response.text)
if 'trans_result' in json_data:
archive_item['title_zh'] = json_data['trans_result'][0]['dst']
else:
print('[My Log] 未成功获得翻译结果,在.\\MinkchSpider\\minkch.py')
# 媒体资源汇总
archive_item['media_urls'] = []
if settings.DL_IMG:
archive_item['media_urls'] += archive_item.get('img_urls', [])
if settings.DL_VIDEO:
archive_item['media_urls'] += archive_item.get('video_urls', [])
# 资源获取率
if int(archive_item.get('img_scalar', 0)) != 0:
archive_item['img_acquisition_rate'] = \
len(archive_item.get('img_urls', [])) / int(archive_item.get('img_scalar', 0))
else:
archive_item['img_acquisition_rate'] = 1.0
if int(archive_item.get('video_scalar', 0)) != 0:
archive_item['video_acquisition_rate'] = \
len(archive_item.get('video_urls', [])) / int(archive_item.get('video_scalar', 0))
else:
archive_item['video_acquisition_rate'] = 1.0
# 资源所在文件夹相对路径,相对在MinkchSpider\medias\下,实际即为文件夹名称
# 资源路径仅作参考,由于网站设计原因,并不准确
archive_item['src_path'] = ''
for media_url in archive_item.get('media_urls', ['']):
match = re.match(r'.*/(\d+)/\d+.+', media_url)
if match:
archive_item['src_path'] = \
match.group(1)[:4] + '\\' + match.group(1)[4:6] + '\\' + match.group(1)[6:] + '\\'
break
if archive_item['src_path'] == '':
for media_url in archive_item.get('media_urls', ['']):
match = re.match(r'https://.*com/(.*)', media_url)
if match:
archive_item['src_path'] += match.group(1) + ', '
yield archive_item | 0.144269 | 0.096408 |