content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
import cv2
import numpy as np
from PIL import Image, ImageDraw
from scipy.spatial import ConvexHull
from skimage import filters
import tensorflow as tf
from monopsr.core import evaluation
from monopsr.datasets.kitti import instance_utils, calib_utils
from monopsr.visualization import vis_utils
def np_proj_error(points_uv, points_mask, exp_grid_uv):
"""Calculates projection error of instance points with a 2D box
Args:
points_uv: (2, N) Points in u, v coordinates
points_mask: (N,) Mask of valid points
exp_grid_uv: expected [u, v] grid projection
Returns:
proj_err_norm: projection error normalized by the number of valid pixels
"""
# Calculation projection error
pred_grid_uv = points_uv.reshape(2, *exp_grid_uv[0].shape)
points_mask = points_mask.reshape(1, *exp_grid_uv[0].shape)
pred_proj_err_uv = pred_grid_uv - exp_grid_uv
pred_proj_err = np.sum(np.abs(pred_proj_err_uv) * points_mask)
proj_err_norm = pred_proj_err / np.count_nonzero(points_mask)
return proj_err_norm
def scipy_proj_error(x, args):
"""Calculates projection error of instance points with a 2D box.
Used for minimizing projection error when varying xz_dist and centroid_y.
Args:
x: array of inputs
xz_dist: distance along viewing angle
centroid_y: box centroid y
args: dict with additional data
'viewing_angle': viewing angle
'inst_points' = (N, 3) instance points
'cam_p' = (3, 4) camera projection matrix
'exp_grid_uv' = expected [u, v] grid projection
'rotate_view' = bool of whether to rotate by viewing angle
Returns:
proj_err_norm: projection error normalized by the number of valid pixels
"""
# Parse inputs from x
xz_dist = x[0]
centroid_y = x[1]
# Parse inputs from args
viewing_angle = args['viewing_angle']
inst_points = args['inst_points']
cam_p = args['cam_p']
exp_grid_uv = args['exp_grid_uv']
rotate_view = args['rotate_view']
pred_points_in_img, valid_points_mask = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, inst_points, cam_p, rotate_view=rotate_view)
proj_err_norm = np_proj_error(pred_points_in_img, valid_points_mask, exp_grid_uv)
return proj_err_norm
def scipy_proj_error_with_viewing_angle(x, args):
"""Calculates projection error of instance points with a 2D box.
Used for minimizing projection error when varying xz_dist, centroid_y, and viewing_angle.
Args:
x: array of inputs
xz_dist: distance along viewing angle
centroid_y: box centroid y
viewing_angle: viewing angle
args: dict with additional data
'inst_points' = (N, 3) instance points
'cam_p' = (3, 4) camera projection matrix
'exp_grid_uv' = expected [u, v] grid projection
'rotate_view' = bool of whether to rotate by viewing angle
Returns:
proj_err_norm: projection error normalized by the number of valid pixels
"""
# Parse inputs from x
xz_dist = x[0]
centroid_y = x[1]
viewing_angle = x[2]
# Parse inputs from args
inst_points = args['inst_points']
cam_p = args['cam_p']
exp_grid_uv = args['exp_grid_uv']
rotate_view = args['rotate_view']
pred_points_in_img, valid_points_mask = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, inst_points, cam_p, rotate_view=rotate_view)
proj_err_norm = np_proj_error(pred_points_in_img, valid_points_mask, exp_grid_uv)
return proj_err_norm
def tf_proj_error(points_uv, points_mask, exp_grid_uv):
"""
Args:
points_uv:
points_mask:
exp_grid_uv:
Returns:
"""
# return tf.zeros(32)
raise NotImplementedError('Not implemented yet')
def np_proj_err_rgb_images(xz_dist, centroid_y, viewing_angle,
cam2_inst_points_local, cam_p,
inst_rgb, inst_mask, image, valid_mask_map, box_2d,
guess_row_col, show_images=False):
"""(Work in progress) Calculates the projection error based on RGB similarity and shows
images for comparison.
Args:
xz_dist: Distance along viewing angle
centroid_y: Object centroid y
viewing_angle: Viewing angle
cam2_inst_points_local: (N, 3) Instance points in local frame
cam_p: (3, 4) Camera projection matrix
inst_rgb: List of instance RGB values
image: Image of sample
valid_mask_map: (H, W) Map mask of valid values
guess_row_col: Guess index, used for numbering images
show_images: (optional) Whether to show comparison images
Returns:
image_diff_total: Lowest image difference
"""
# Get projection into image
proj_uv, valid_points_mask = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, cam2_inst_points_local, cam_p)
# Get RGB values of projected pixels
proj_uv_int = np.round(proj_uv).astype(np.int32)
guess_rgb = image[proj_uv_int[1], proj_uv_int[0]]
guess_rgb_map = guess_rgb.reshape(48, 48, 3) * np.expand_dims(valid_mask_map, 2)
# Estimated image
est_image = np.copy(image) * np.expand_dims(~inst_mask, 2)
est_image[proj_uv_int[1], proj_uv_int[0]] = inst_rgb
est_image[proj_uv_int[1]-1, proj_uv_int[0]] = inst_rgb
est_image[proj_uv_int[1]+1, proj_uv_int[0]] = inst_rgb
est_image[proj_uv_int[1], proj_uv_int[0]-1] = inst_rgb
est_image[proj_uv_int[1], proj_uv_int[0]+1] = inst_rgb
box_2d_int = np.round(box_2d).astype(np.int32)
est_inst_rgb = est_image[box_2d_int[0]:box_2d_int[2], box_2d_int[1]:box_2d_int[3]]
est_inst_rgb_resized = cv2.resize(est_inst_rgb, (48, 48))
# Check image similarity
inst_rgb_map = inst_rgb.reshape(48, 48, 3)
# image_diff_map = abs(inst_rgb_map - guess_rgb_map)
image_diff_map = abs(inst_rgb_map - est_inst_rgb_resized)
image_diff_map_norm = np.sum(image_diff_map, axis=2) / 255.0
image_diff_total = np.sum(image_diff_map_norm)
if show_images:
# cv2_size = (160, 160)
cv2_size = (90, 90)
cv2_size = (120, 120)
# # Show instance RGB for comparison
# inst_rgb_map_resized = cv2.resize(inst_rgb_map, cv2_size)
# vis_utils.cv2_imshow('inst_rgb_map_resized {}'.format(guess_row_col),
# inst_rgb_map_resized,
# size_wh=cv2_size, row_col=guess_row_col)
#
# # Show guess
# guess_rgb_map_resized = cv2.resize(guess_rgb_map, (200, 200))
# vis_utils.cv2_imshow('guess_rgb_map_resized {}'.format(guess_row_col),
# guess_rgb_map_resized,
# size_wh=cv2_size, row_col=guess_row_col)
vis_utils.cv2_imshow('est_inst_rgb_resized {}'.format(guess_row_col),
est_inst_rgb_resized,
size_wh=cv2_size, row_col=guess_row_col)
# combined = cv2.addWeighted(inst_rgb_map, 0.5, est_inst_rgb_resized, 0.5, 0.0)
# vis_utils.cv2_imshow('combined {}'.format(guess_row_col),
# combined,
# size_wh=cv2_size, row_col=guess_row_col)
# vis_utils.cv2_imshow('image_diff_map_norm {}'.format(guess_row_col),
# image_diff_map_norm,
# size_wh=cv2_size, row_col=guess_row_col)
# vis_utils.cv2_imshow('valid_mask {}'.format(centroid_y),
# (valid_mask_map * 255).astype(np.uint8),
# size_wh=cv2_size, row_col=guess_row_col)
return image_diff_total
def np_proj_err_rgb(xz_dist, centroid_y, viewing_angle, cam2_inst_points_local, cam_p,
inst_rgb, image, valid_mask_map):
# Get instance RGB
inst_rgb_map = inst_rgb.reshape(48, 48, 3)
# Project points to image
proj_uv, _ = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, cam2_inst_points_local, cam_p)
# Get RGB values of projected pixels
proj_uv_int = np.round(proj_uv).astype(np.int32)
guess_rgb = image[proj_uv_int[1], proj_uv_int[0]]
guess_rgb_map = guess_rgb.reshape(48, 48, 3) * np.expand_dims(valid_mask_map, 2)
# Check image similarity
image_diff_map = abs(inst_rgb_map - guess_rgb_map)
image_diff_map_norm = np.sum(image_diff_map, axis=2) / 255.0
image_diff_total = np.sum(image_diff_map_norm) / np.count_nonzero(valid_mask_map)
return image_diff_total
def scipy_proj_err_rgb(x, args):
"""Calculates projection error based on RGB similarity.
(Minimization with this doesn't seem to work since
large patches will be matched at incorrect positions)
"""
# Parse inputs from x
xz_dist = x[0]
centroid_y = x[1]
if len(x) == 3:
viewing_angle = x[2]
else:
viewing_angle = args['viewing_angle']
# Parse inputs from args
inst_points = args['inst_points']
cam_p = args['cam_p']
inst_rgb = args['inst_rgb']
image = args['image']
valid_mask_map = args['valid_mask_map']
proj_err_rgb = np_proj_err_rgb(
xz_dist=xz_dist,
centroid_y=centroid_y,
viewing_angle=viewing_angle,
cam2_inst_points_local=inst_points,
cam_p=cam_p,
inst_rgb=inst_rgb,
image=image,
valid_mask_map=valid_mask_map,
)
return proj_err_rgb
def convex_hull_mask_iou(points_uv, im_shape, gt_hull_mask):
"""Computes masks by calculating a convex hull from points. Creates two masks (if possible),
one for the estimated foreground pixels and one for the estimated background pixels.
Args:
points_uv: (2, N) Points in u, v coordinates
im_shape: image shape [image_height, im_width]
gt_hull_mask: mask created by calculating convex hull
Returns:
best_iou: best mask iou calculated from the calculated hull masks and the ground truth hull
mask
"""
im_height, im_width = im_shape
# Segment the points into background and foreground
if len(set(points_uv[0])) > 1:
thresh = filters.threshold_li(points_uv[0])
pred_seg_1 = points_uv[0] > thresh
pred_seg_2 = points_uv[0] < thresh
segs = [pred_seg_1, pred_seg_2]
else:
# There is only one unique point so a threshold cannot be made
segs = [np.full(points_uv[0].shape, True, dtype=bool)]
mask_list = []
# Loop over both segments since it is uncertain which segment is foreground or background
for seg in segs:
# Obtain the coordinates of the pixels
pred_u = np.int32(points_uv[0][seg])
pred_v = np.int32(points_uv[1][seg])
# Remove duplicate coordinates by forming a set
coords = set(zip(pred_u, pred_v))
# Convex hull calculation requires a numpy array
coords = np.array(list(coords))
# Need at least 3 points to create convex hull
if len(coords) < 3:
continue
# Points must not lie along a single line in order to create convex hull
elif any(np.all(coords == coords[0, :], axis=0)):
continue
else:
hull = ConvexHull(coords)
img = Image.new('L', (im_width, im_height), 0)
vertices = list(zip(coords[hull.vertices, 0], coords[hull.vertices, 1]))
ImageDraw.Draw(img).polygon(vertices, outline=1, fill=1)
mask = np.array(img)
mask_list.append(mask)
best_iou = 0
for mask in mask_list:
iou = evaluation.mask_iou(mask, gt_hull_mask)
if iou > best_iou:
best_iou = iou
return best_iou
def scipy_convex_hull_mask_inv_iou(x, args):
"""Computes masks by calculating a convex hull from points. Creates two masks (if possible),
one for the estimated foreground pixels and one for the estimated background pixels.
Minimizes inverted IoU by varying xz_dist and centroid_y.
Args:
x: array of inputs
xz_dist: distance along viewing angle
centroid_y: box centroid y
args: dict with additional data
'viewing_angle': viewing angle
'inst_points' = (N, 3) instance points
'cam_p' = (3, 4) camera projection matrix
'im_shape' = image shape [im_height, im_width]
'gt_hull_mask' = expected mask created from instance mask
Returns:
inverted_iou: 1.0 - IoU of the mask computed from the convex hull and the gt hull mask
"""
# Parse inputs from x
xz_dist = x[0]
centroid_y = x[1]
# Parse inputs from args
viewing_angle = args['viewing_angle']
inst_points = args['inst_points']
cam_p = args['cam_p']
im_shape = args['im_shape']
gt_hull_mask = args['gt_hull_mask']
pred_points_in_img, valid_points_mask = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, inst_points, cam_p)
iou = convex_hull_mask_iou(pred_points_in_img, im_shape, gt_hull_mask)
# Invert IoU so it can be minimized
inverted_iou = 1.0 - iou
return inverted_iou
def scipy_convex_hull_mask_inv_iou_with_viewing_angle(x, args):
"""Computes masks by calculating a convex hull from points. Creates two masks (if possible),
one for the estimated foreground pixels and one for the estimated background pixels.
Minimizes inverted IoU by varying xz_dist, centroid_y, and viewing angle.
Args:
x: array of inputs
xz_dist: distance along viewing angle
centroid_y: box centroid y
viewing_angle: viewing angle
args: dict with additional data
'viewing_angle': viewing angle
'inst_points' = (N, 3) instance points
'cam_p' = (3, 4) camera projection matrix
'im_shape' = image shape [im_height, im_width]
'gt_hull_mask' = expected mask created from instance mask
Returns:
inverted_iou: 1.0 - IoU of the mask computed from the convex hull and the gt hull mask
"""
# Parse inputs from x
xz_dist = x[0]
centroid_y = x[1]
viewing_angle = x[2]
# Parse inputs from args
inst_points = args['inst_points']
cam_p = args['cam_p']
im_shape = args['im_shape']
gt_hull_mask = args['gt_hull_mask']
pred_points_in_img, valid_points_mask = instance_utils.proj_points(
xz_dist, centroid_y, viewing_angle, inst_points, cam_p)
iou = convex_hull_mask_iou(pred_points_in_img, im_shape, gt_hull_mask)
# Invert IoU so it can be minimized
inverted_iou = 1.0 - iou
return inverted_iou
|
nilq/baby-python
|
python
|
#
# Copyright 2021 Red Hat Inc.
# SPDX-License-Identifier: Apache-2.0
#
"""Test the ExpiredDataRemover object."""
import logging
import re
from datetime import datetime
from unittest.mock import patch
from uuid import uuid4
import pytz
from dateutil import relativedelta
from api.provider.models import Provider
from masu.external.date_accessor import DateAccessor
from masu.processor.expired_data_remover import ExpiredDataRemover
from masu.processor.expired_data_remover import ExpiredDataRemoverError
from masu.test import MasuTestCase
from masu.test.database.helpers import ManifestCreationHelper
from reporting_common.models import CostUsageReportManifest
class ExpiredDataRemoverTest(MasuTestCase):
"""Test Cases for the ExpiredDataRemover object."""
def test_initializer(self):
"""Test to init."""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
self.assertEqual(remover._months_to_keep, 3)
self.assertEqual(remover._line_items_months, 1)
remover2 = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS, 2, 2)
self.assertEqual(remover2._months_to_keep, 2)
self.assertEqual(remover2._line_items_months, 2)
def test_initializer_ocp(self):
"""Test to init for OCP."""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_OCP)
self.assertEqual(remover._months_to_keep, 3)
self.assertEqual(remover._line_items_months, 1)
def test_initializer_azure(self):
"""Test to init for Azure."""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AZURE)
self.assertEqual(remover._months_to_keep, 3)
self.assertEqual(remover._line_items_months, 1)
def test_initializer_invalid_provider(self):
"""Test to init with unknown provider."""
with self.assertRaises(ExpiredDataRemoverError):
ExpiredDataRemover(self.schema, "BAD")
@patch("masu.processor.aws.aws_report_db_cleaner.AWSReportDBCleaner.__init__", side_effect=Exception)
def test_initializer_provider_exception(self, mock_aws_cleaner):
"""Test to init."""
with self.assertRaises(ExpiredDataRemoverError):
ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
def test_calculate_expiration_date(self):
"""Test that the expiration date is correctly calculated."""
date_matrix = [
{
"current_date": datetime(year=2018, month=7, day=1),
"expected_expire": datetime(year=2018, month=4, day=1, tzinfo=pytz.UTC),
"months_to_keep": None,
},
{
"current_date": datetime(year=2018, month=7, day=31),
"expected_expire": datetime(year=2018, month=4, day=1, tzinfo=pytz.UTC),
"months_to_keep": None,
},
{
"current_date": datetime(year=2018, month=3, day=20),
"expected_expire": datetime(year=2017, month=12, day=1, tzinfo=pytz.UTC),
"months_to_keep": None,
},
{
"current_date": datetime(year=2018, month=7, day=1),
"expected_expire": datetime(year=2017, month=7, day=1, tzinfo=pytz.UTC),
"months_to_keep": 12,
},
{
"current_date": datetime(year=2018, month=7, day=31),
"expected_expire": datetime(year=2017, month=7, day=1, tzinfo=pytz.UTC),
"months_to_keep": 12,
},
{
"current_date": datetime(year=2018, month=3, day=20),
"expected_expire": datetime(year=2016, month=3, day=1, tzinfo=pytz.UTC),
"months_to_keep": 24,
},
]
for test_case in date_matrix:
with patch.object(DateAccessor, "today", return_value=test_case.get("current_date")):
retention_policy = test_case.get("months_to_keep")
if retention_policy:
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS, retention_policy)
else:
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
expire_date = remover._calculate_expiration_date()
self.assertEqual(expire_date, test_case.get("expected_expire"))
def test_remove(self):
"""Test that removes the expired data based on the retention policy."""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
removed_data = remover.remove()
self.assertEqual(len(removed_data), 0)
@patch("masu.processor.expired_data_remover.AWSReportDBCleaner.purge_expired_report_data")
def test_remove_provider(self, mock_purge):
"""Test that remove is called with provider_uuid."""
provider_uuid = self.aws_provider_uuid
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
remover.remove(provider_uuid=provider_uuid)
mock_purge.assert_called_with(simulate=False, provider_uuid=provider_uuid)
@patch("masu.processor.expired_data_remover.AWSReportDBCleaner.purge_expired_line_item")
def test_remove_provider_items_only(self, mock_purge):
"""Test that remove is called with provider_uuid items only."""
provider_uuid = self.aws_provider_uuid
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
date = remover._calculate_expiration_date(line_items_only=True)
remover.remove(provider_uuid=provider_uuid, line_items_only=True)
mock_purge.assert_called_with(expired_date=date, simulate=False, provider_uuid=provider_uuid)
@patch("masu.processor.expired_data_remover.AWSReportDBCleaner.purge_expired_line_item")
def test_remove_items_only(self, mock_purge):
"""Test that remove is called with provider_uuid items only."""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
date = remover._calculate_expiration_date(line_items_only=True)
remover.remove(line_items_only=True)
mock_purge.assert_called_with(expired_date=date, simulate=False)
def test_delete_expired_cost_usage_report_manifest(self):
"""
Test that expired CostUsageReportManifests are removed.
This test inserts CostUsageReportManifest objects,
And then deletes CostUsageReportManifest objects older than
the calculated expiration_date.
"""
provider_type_dict = {
Provider.PROVIDER_AWS_LOCAL: self.aws_provider_uuid,
Provider.PROVIDER_AZURE_LOCAL: self.azure_provider_uuid,
Provider.PROVIDER_OCP: self.ocp_provider_uuid,
}
for provider_type in provider_type_dict:
remover = ExpiredDataRemover(self.schema, provider_type)
expiration_date = remover._calculate_expiration_date()
current_month = datetime.today().replace(day=1)
day_before_cutoff = expiration_date - relativedelta.relativedelta(days=1)
dates = [current_month, day_before_cutoff, expiration_date]
uuids = []
uuids_to_be_deleted = []
for date in dates:
manifest_creation_datetime = current_month
manifest_updated_datetime = manifest_creation_datetime + relativedelta.relativedelta(days=2)
uuid = uuid4()
data = {
"assembly_id": uuid,
"manifest_creation_datetime": manifest_creation_datetime,
"manifest_updated_datetime": manifest_updated_datetime,
"billing_period_start_datetime": date,
"num_total_files": 1,
"provider_id": provider_type_dict[provider_type],
}
uuids.append(uuid)
if date == day_before_cutoff:
uuids_to_be_deleted.append(uuid)
manifest_entry = CostUsageReportManifest(**data)
manifest_entry.save()
remover.remove()
for uuid in uuids:
record_count = CostUsageReportManifest.objects.filter(assembly_id=uuid).count()
if uuid in uuids_to_be_deleted:
self.assertEqual(0, record_count)
else:
self.assertEqual(1, record_count)
def test_simulate_delete_expired_cost_usage_report_manifest(self):
"""
Test that expired CostUsageReportManifest is not removed during simulation.
Test that the number of records that would have been deleted is logged.
"""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
expiration_date = remover._calculate_expiration_date()
day_before_cutoff = expiration_date - relativedelta.relativedelta(days=1)
day_before_cutoff_data = {
"assembly_id": uuid4(),
"manifest_creation_datetime": None,
"manifest_updated_datetime": None,
"billing_period_start_datetime": day_before_cutoff,
"num_total_files": 1,
"provider_id": self.aws_provider_uuid,
}
CostUsageReportManifest(**day_before_cutoff_data).save()
with self.assertLogs(logger="masu.processor.expired_data_remover", level="INFO") as cm:
logging.disable(logging.NOTSET)
remover.remove(simulate=True)
expected_log_message = "Removed CostUsageReportManifest"
# Check if the log message exists in the log output:
self.assertTrue(
any(match is not None for match in [re.search(expected_log_message, line) for line in cm.output]),
"Expected to see log message: "
+ expected_log_message
+ "in the list of log messages"
+ " but the list of log messages was instead : "
+ str(cm.output),
)
# Re-enable log suppression
logging.disable(logging.CRITICAL)
def test_remove_cost_usage_manifests_by_provider_uuid(self):
"""
Test that calling remove(provider_uuid) deletes CostUsageReportManifests.
CostUsageReportManifests that are associated with the provider_uuid
should be deleted.
"""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS_LOCAL)
expiration_date = remover._calculate_expiration_date()
current_month = datetime.today().replace(day=1)
day_before_cutoff = expiration_date - relativedelta.relativedelta(days=1)
fixture_records = [
(self.aws_provider_uuid, expiration_date), # not expired, should not delete
(self.aws_provider_uuid, day_before_cutoff), # expired, should delete
(self.azure_provider_uuid, day_before_cutoff), # expired, should not delete
]
manifest_uuids = []
manifest_uuids_to_be_deleted = []
manifest_creation_datetime = current_month
manifest_updated_datetime = manifest_creation_datetime + relativedelta.relativedelta(days=2)
for fixture_record in fixture_records:
manifest_uuid = uuid4()
data = {
"assembly_id": manifest_uuid,
"manifest_creation_datetime": manifest_creation_datetime,
"manifest_updated_datetime": manifest_updated_datetime,
"billing_period_start_datetime": fixture_record[1],
"num_total_files": 1,
"provider_id": fixture_record[0],
}
CostUsageReportManifest(**data).save()
manifest_uuids.append(manifest_uuid)
if fixture_record[1] == day_before_cutoff and fixture_record[0] == self.aws_provider_uuid:
manifest_uuids_to_be_deleted.append(manifest_uuid)
remover.remove(provider_uuid=self.aws_provider_uuid)
for manifest_uuid in manifest_uuids:
record_count = CostUsageReportManifest.objects.filter(assembly_id=manifest_uuid).count()
if manifest_uuid in manifest_uuids_to_be_deleted:
self.assertEqual(0, record_count)
else:
self.assertEqual(1, record_count)
def test_simulate_delete_expired_cost_usage_report_manifest_by_provider_uuid(self):
"""
Test simulating the deletion of expired CostUsageReportManifests.
using remove(provider_uuid)
"""
remover = ExpiredDataRemover(self.schema, Provider.PROVIDER_AWS)
expiration_date = remover._calculate_expiration_date()
day_before_cutoff = expiration_date - relativedelta.relativedelta(days=1)
manifest_id = 7766
day_before_cutoff_data = {
"id": manifest_id,
"assembly_id": uuid4(),
"manifest_creation_datetime": None,
"manifest_updated_datetime": None,
"billing_period_start_datetime": day_before_cutoff,
"num_total_files": 1,
"provider_id": self.aws_provider_uuid,
}
manifest_entry = CostUsageReportManifest(**day_before_cutoff_data)
manifest_entry.save()
manifest_helper = ManifestCreationHelper(
manifest_id, manifest_entry.num_total_files, manifest_entry.assembly_id
)
manifest_helper.generate_test_report_files()
manifest_helper.process_all_files()
count_records = CostUsageReportManifest.objects.count()
with self.assertLogs(logger="masu.processor.expired_data_remover", level="INFO") as cm:
logging.disable(logging.NOTSET)
remover.remove(simulate=True, provider_uuid=self.aws_provider_uuid)
expected_log_message = "Removed CostUsageReportManifest"
# Check if the log message exists in the log output:
self.assertTrue(
any(match is not None for match in [re.search(expected_log_message, line) for line in cm.output]),
"Expected to see log message: "
+ expected_log_message
+ "in the list of log messages"
+ " but the list of log messages was instead : "
+ str(cm.output),
)
# Re-enable log suppression
logging.disable(logging.CRITICAL)
self.assertEqual(count_records, CostUsageReportManifest.objects.count())
def test_remove_items_only_azure(self):
"""Test that remove is called with provider_uuid items only."""
azure_types = [Provider.PROVIDER_AZURE, Provider.PROVIDER_AZURE_LOCAL]
for az_type in azure_types:
remover = ExpiredDataRemover(self.schema, az_type)
result_no_provider = remover.remove(line_items_only=True)
self.assertIsNone(result_no_provider)
result_with_provider = remover.remove(line_items_only=True, provider_uuid="1234")
self.assertIsNone(result_with_provider)
|
nilq/baby-python
|
python
|
import threading
import time
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.figure import Figure
from matplotlib.widgets import Slider, Button
import logging
logging.basicConfig(level=logging.DEBUG,
format='(%(threadName)-9s) %(message)s',)
class MyFigure(Figure):
def __init__(self, *args, **kwargs):
"""
custom kwarg figtitle is a figure title
"""
figtitle = kwargs.pop('figtitle', 'hi mom')
Figure.__init__(self, *args, **kwargs)
self.text(0.5, 0.95, figtitle, ha='center')
self.y = np.zeros(100, dtype=float)
self.x = range(100)
self.subplots_adjust(left=0.25, bottom=0.25)
self.lock = threading.Lock()
def init_gui(self):
self.ax_top = fig.add_subplot(111)
self.ax_top.set_ylim([0,1])
self.ax_btn_update = plt.axes([0.8, 0.025, 0.1, 0.04])
self.ax_sld_value = plt.axes([0.25, 0.1, 0.65, 0.03])
self.btn_update = Button(self.ax_btn_update, 'Update')
self.btn_update.on_clicked(self.rand)
self.sld_value = Slider(self.ax_sld_value, 'Value', 0., 1.0, valinit=0.5)
self.sld_value.on_changed(self.value)
def rand(self, event):
self.y = np.delete(self.y, 0)
self.y = np.append(self.y, [np.random.random_sample()])
self.ax_top.cla()
self.ax_top.set_ylim([0,1])
self.ax_top.plot(self.x, self.y, 'b')
self.canvas.draw_idle()
def value(self, val):
self.y = np.delete(self.y, 0)
self.y = np.append(self.y, [float(val)])
self.ax_top.cla()
self.ax_top.set_ylim([0,1])
self.ax_top.plot(self.x, self.y, 'b')
self.canvas.draw_idle()
def async_event(self ):
while True:
self.rand(0)
time.sleep(0.1)
if __name__ == '__main__':
fig = plt.figure(FigureClass=MyFigure, figtitle='my title')
fig.init_gui()
t = threading.Thread(target=fig.async_event,args=())
t.start()
plt.show()
|
nilq/baby-python
|
python
|
# MIT License
#
# Copyright (C) 2021. Huawei Technologies Co., Ltd. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
from ultra.utils.ray import default_ray_kwargs
# Set environment to better support Ray
os.environ["MKL_NUM_THREADS"] = "1"
import time
import psutil, pickle, dill
import gym, ray, torch, argparse
from smarts.zoo.registry import make
from ultra.utils.episode import episodes
from ultra.evaluate import evaluation_check
num_gpus = 1 if torch.cuda.is_available() else 0
# @ray.remote(num_gpus=num_gpus / 2, max_calls=1)
@ray.remote(num_gpus=num_gpus / 2)
def train(task, num_episodes, policy_class, eval_info, timestep_sec, headless, seed):
torch.set_num_threads(1)
total_step = 0
finished = False
# --------------------------------------------------------
# Initialize Agent and social_vehicle encoding method
# -------------------------------------------------------
AGENT_ID = "007"
spec = make(locator=policy_class)
env = gym.make(
"ultra.env:ultra-v0",
agent_specs={AGENT_ID: spec},
scenario_info=task,
headless=headless,
timestep_sec=timestep_sec,
seed=seed,
)
agent = spec.build_agent()
for episode in episodes(num_episodes, etag=policy_class):
observations = env.reset()
state = observations[AGENT_ID]
dones, infos = {"__all__": False}, None
episode.reset()
experiment_dir = episode.experiment_dir
# save entire spec [ policy_params, reward_adapter, observation_adapter]
if not os.path.exists(f"{experiment_dir}/spec.pkl"):
if not os.path.exists(experiment_dir):
os.makedirs(experiment_dir)
with open(f"{experiment_dir}/spec.pkl", "wb") as spec_output:
dill.dump(spec, spec_output, pickle.HIGHEST_PROTOCOL)
while not dones["__all__"]:
if episode.get_itr(AGENT_ID) >= 1000000: # 1M observation break
finished = True
break
evaluation_check(
agent=agent,
agent_id=AGENT_ID,
policy_class=policy_class,
episode=episode,
**eval_info,
**env.info,
)
action = agent.act(state, explore=True)
observations, rewards, dones, infos = env.step({AGENT_ID: action})
next_state = observations[AGENT_ID]
loss_output = agent.step(
state=state,
action=action,
reward=rewards[AGENT_ID],
next_state=next_state,
done=dones[AGENT_ID],
)
episode.record_step(
agent_id=AGENT_ID,
infos=infos,
rewards=rewards,
total_step=total_step,
loss_output=loss_output,
)
total_step += 1
state = next_state
episode.record_episode()
episode.record_tensorboard(agent_id=AGENT_ID)
if finished:
break
env.close()
if __name__ == "__main__":
parser = argparse.ArgumentParser("intersection-single-agent")
parser.add_argument(
"--task", help="Tasks available : [0, 1, 2, 3]", type=str, default="1"
)
parser.add_argument(
"--level",
help="Tasks available : [easy, medium, hard, no-traffic]",
type=str,
default="easy",
)
parser.add_argument(
"--episodes", help="number of training episodes", type=int, default=1000000
)
parser.add_argument(
"--timestep", help="environment timestep (sec)", type=float, default=0.1
)
parser.add_argument(
"--headless", help="run without envision", type=bool, default=False
)
parser.add_argument(
"--eval-episodes", help="number of evaluation episodes", type=int, default=200
)
parser.add_argument(
"--eval-rate",
help="evaluation rate based on number of observations",
type=int,
default=10000,
)
parser.add_argument(
"--seed", help="environment seed", default=2, type=int,
)
args = parser.parse_args()
num_cpus = max(
1, psutil.cpu_count(logical=False) - 1
) # remove `logical=False` to use all cpus
policy_class = "ultra.baselines.sac:sac-v0"
# ray_kwargs = default_ray_kwargs(num_cpus=num_cpus, num_gpus=num_gpus)
ray.init() # **ray_kwargs)
# try:
ray.wait(
[
train.remote(
task=(args.task, args.level),
num_episodes=int(args.episodes),
eval_info={
"eval_rate": float(args.eval_rate),
"eval_episodes": int(args.eval_episodes),
},
timestep_sec=float(args.timestep),
headless=args.headless,
policy_class=policy_class,
seed=args.seed,
)
]
)
# finally:
# time.sleep(1)
# ray.shutdown()
|
nilq/baby-python
|
python
|
#!/usr/local/bin/python3.3
def echo(message):
print(message)
return
echo('Direct Call')
x = echo
x('Indirect Call')
def indirect(func, arg):
func(arg)
indirect(echo, "Argument Call")
schedule = [(echo, 'Spam'), (echo, 'Ham')]
for (func, arg) in schedule:
func(arg)
def make(label):
def echo(message):
print(label + ': ' + message)
return echo
F = make('Spam')
F('Eggs')
F('Ham')
def func(a):
b = 'spam'
return b * a
print(func(8))
print(dir(func))
func.handles = 'Bottom-Press'
func.count = 0
print(dir(func))
def func(a: 'spam', b: (1, 10), c: float) -> int:
return a+b+c
print(func.__annotations__)
|
nilq/baby-python
|
python
|
# Copyright (c) 2015
#
# All rights reserved.
#
# This file is distributed under the Clear BSD license.
# The full text can be found in LICENSE in the root directory.
from boardfarm.devices import prompt
from boardfarm.tests import rootfs_boot
class NetperfRFC2544(rootfs_boot.RootFSBootTest):
"""Single test to simulate RFC2544."""
def runTest(self):
"""Single test to simulate RFC2544."""
board = self.dev.board
lan = self.dev.lan
for sz in ["74", "128", "256", "512", "1024", "1280", "1518"]:
print("running %s UDP test" % sz)
lan.sendline(
"netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s" % sz)
lan.expect_exact(
"netperf -H 192.168.0.1 -t UDP_STREAM -l 60 -- -m %s" % sz)
lan.expect("UDP UNIDIRECTIONAL")
lan.expect(prompt, timeout=90)
board.sendline()
board.expect(prompt)
|
nilq/baby-python
|
python
|
import SimpleITK as sitk
import numpy as np
def reshape_by_padding_upper_coords(image, new_shape, pad_value=None):
shape = tuple(list(image.shape))
new_shape = tuple(np.max(np.concatenate((shape, new_shape)).reshape((2,len(shape))), axis=0))
if pad_value is None:
if len(shape)==2:
pad_value = image[0,0]
elif len(shape)==3:
pad_value = image[0, 0, 0]
else:
raise ValueError("Image must be either 2 or 3 dimensional")
res = np.ones(list(new_shape), dtype=image.dtype) * pad_value
if len(shape) == 2:
res[0:0+int(shape[0]), 0:0+int(shape[1])] = image
elif len(shape) == 3:
res[0:0+int(shape[0]), 0:0+int(shape[1]), 0:0+int(shape[2])] = image
return res
def random_crop_3D_image_batched(img, crop_size):
if type(crop_size) not in (tuple, list):
crop_size = [crop_size] * (len(img.shape) - 2)
else:
assert len(crop_size) == (len(img.shape) - 2), "If you provide a list/tuple as center crop make sure it has the same len as your data has dims (3d)"
if crop_size[0] < img.shape[2]:
lb_x = np.random.randint(0, img.shape[2] - crop_size[0])
elif crop_size[0] == img.shape[2]:
lb_x = 0
else:
raise ValueError("crop_size[0] must be smaller or equal to the images x dimension")
if crop_size[1] < img.shape[3]:
lb_y = np.random.randint(0, img.shape[3] - crop_size[1])
elif crop_size[1] == img.shape[3]:
lb_y = 0
else:
raise ValueError("crop_size[1] must be smaller or equal to the images y dimension")
if crop_size[2] < img.shape[4]:
lb_z = np.random.randint(0, img.shape[4] - crop_size[2])
elif crop_size[2] == img.shape[4]:
lb_z = 0
else:
raise ValueError("crop_size[2] must be smaller or equal to the images z dimension")
return img[:, :, lb_x:lb_x + crop_size[0], lb_y:lb_y + crop_size[1], lb_z:lb_z + crop_size[2]]
|
nilq/baby-python
|
python
|
#
# PySNMP MIB module SUN-SNMP-NETRA-CT-RSC-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SUN-SNMP-NETRA-CT-RSC-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:12:10 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "SingleValueConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Counter64, ModuleIdentity, NotificationType, enterprises, iso, TimeTicks, MibIdentifier, Gauge32, IpAddress, Bits, Integer32, ObjectIdentity, Unsigned32, Counter32, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "Counter64", "ModuleIdentity", "NotificationType", "enterprises", "iso", "TimeTicks", "MibIdentifier", "Gauge32", "IpAddress", "Bits", "Integer32", "ObjectIdentity", "Unsigned32", "Counter32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn")
DisplayString, MacAddress, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "MacAddress", "TextualConvention")
netraCtRscMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 42, 2, 65, 2))
netraCtRscMIB.setRevisions(('1900-04-18 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: netraCtRscMIB.setRevisionsDescriptions(('First version of MIB module SUN-SNMP-NETRA-CT-RSC-MIB.',))
if mibBuilder.loadTexts: netraCtRscMIB.setLastUpdated('0004181200Z')
if mibBuilder.loadTexts: netraCtRscMIB.setOrganization('')
if mibBuilder.loadTexts: netraCtRscMIB.setContactInfo('')
if mibBuilder.loadTexts: netraCtRscMIB.setDescription('The MIB module for the Netra ct 400/800 Remote System Control Products')
netraCtRscObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1))
netraCtRscEvents = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 2))
netraCtRscExpmnt = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3))
netraCtRscAdminObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1))
netraCtRscConfigObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2))
netraCtRscSerial2Objs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3))
netraCtRscModemObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4))
netraCtRscEnetObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5))
netraCtRscEnvObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6))
netraCtRscLogObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7))
netraCtRscRccConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3, 1))
netraCtRscTrapPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 2, 0))
class DateAndTime(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(11, 11)
fixedLength = 11
netraCtRscAdminRscReset = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("set", 1), ("clear", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAdminRscReset.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAdminRscReset.setDescription('Setting this will soft Reset only the RSC (Remote System Controller) card. The clear(2) setting is read-only for this variable. This variable will always read as clear(2).')
netraCtRscAdminHostReset = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("set", 1), ("clear", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAdminHostReset.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAdminHostReset.setDescription('Setting netraCtRscAdminHostReset will cause either send a break to the host, or will toggle a hard reset line. A break will be sent if netraCtRscPanicDump conatins the value of on(1).Otherwise, a hard reset will occur. The clear(2) setting is read-only for this variable. This variable will always reads as clear(2).')
netraCtRscAdminXir = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("set", 1), ("clear", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAdminXir.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAdminXir.setDescription('Sends a hardware xir pulse to the host when set to true. This variable resets itself to clear(2) after the negation of the pulse.')
netraCtRscAdminNmi = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("set", 1), ("clear", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAdminNmi.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAdminNmi.setDescription('Sends a hardware nmi pulse to the host when set to true. This variable resets itself to clear(2) after the negation of the pulse.')
netraCtRscAdminBreak = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("set", 1), ("clear", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAdminBreak.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAdminBreak.setDescription('Sends a break to the Host when this is set. The clear(2) setting is read-only for this variable. This variable will always read as clear(2).')
netraCtRscGlobalPageFlag = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscGlobalPageFlag.setStatus('current')
if mibBuilder.loadTexts: netraCtRscGlobalPageFlag.setDescription('An on(1) to this variable will enable paging for RSC alerts. An off(2) will disable paging.')
netraCtRscGlobalEmailFlag = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscGlobalEmailFlag.setStatus('current')
if mibBuilder.loadTexts: netraCtRscGlobalEmailFlag.setDescription('An on(1) to this variable will enable email for RSC alerts. An off(2) will disable email.')
netraCtRscGlobalIPModeFlag = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("disabled", 1), ("config", 2), ("dhcp", 3), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscGlobalIPModeFlag.setStatus('current')
if mibBuilder.loadTexts: netraCtRscGlobalIPModeFlag.setDescription('IP mode global flag')
netraCtRscGlobalPPPFlag = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscGlobalPPPFlag.setStatus('current')
if mibBuilder.loadTexts: netraCtRscGlobalPPPFlag.setDescription('serial configuration flag indicating whether PPP should be the default.')
netraCtRscHostname = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscHostname.setStatus('current')
if mibBuilder.loadTexts: netraCtRscHostname.setDescription('name of Host connected to RSC')
netraCtRscCustomerInfo = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscCustomerInfo.setStatus('current')
if mibBuilder.loadTexts: netraCtRscCustomerInfo.setDescription('Customer information used in the message generated for a pager or email alert')
netraCtRscVersionBootMajor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionBootMajor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionBootMajor.setDescription('RSC Firmware BootMonitor Revision Major Number')
netraCtRscVersionBootMinor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionBootMinor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionBootMinor.setDescription('RSC Firmware BootMonitor Revision Minor Number')
netraCtRscVersionBootMicro = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionBootMicro.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionBootMicro.setDescription('RSC Firmware BootMonitor Revision Micro Number')
netraCtRscVersionMainMajor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionMainMajor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionMainMajor.setDescription('RSC Core RSC Revision Major Number')
netraCtRscVersionMainMinor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionMainMinor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionMainMinor.setDescription('RSC Core RSC Revision Minor Number')
netraCtRscVersionMainMicro = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionMainMicro.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionMainMicro.setDescription('RSC Core RSC Revision Micro Number')
netraCtRscVersionFirmwareMajor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMajor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMajor.setDescription('RSC Core Firmware Revision Major Number')
netraCtRscVersionFirmwareMinor = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMinor.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMinor.setDescription('RSC Core Firmware Revision Minor Number')
netraCtRscVersionFirmwareMicro = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMicro.setStatus('current')
if mibBuilder.loadTexts: netraCtRscVersionFirmwareMicro.setDescription('RSC Core Firmware Revision Micro Number')
netraCtRscTOD = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 16), DateAndTime()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscTOD.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTOD.setDescription('RSC time of day')
netraCtRscEscape = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 17), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscEscape.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEscape.setDescription('set the first character of the 2-character escape sequence')
netraCtRscHostWatchDogReboot = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscHostWatchDogReboot.setStatus('current')
if mibBuilder.loadTexts: netraCtRscHostWatchDogReboot.setDescription('RSC monitors a heartbeat from the Solaris host. If this heartbeat is late, then a message will be logged, and an alarm will be set. If netraCtRscHostWatchDogReboot is on, then the solaris host will also be rebooted.')
netraCtRscHostWatchDogTimeout = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscHostWatchDogTimeout.setStatus('current')
if mibBuilder.loadTexts: netraCtRscHostWatchDogTimeout.setDescription('RSC monitors a heartbeat from the Solaris host. This variable indicates the maximum tolerable number of seconds between heartbeats, before RSC will set alarm0. A setting of 0 indicates that the heartbeat should not be monitored.')
netraCtRscPanicDump = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 2, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscPanicDump.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPanicDump.setDescription('If set to true when netraCtRscAdminHostReset is set, then a break will be sent to the host, causing a core dump to be saved on the host. Otherwise, the setting of netraCtRscAdminHostReset will cause a hardware reset.')
netraCtRscSerial2Mode = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 2147483647))).clone(namedValues=NamedValues(("rcc", 1), ("modem", 2), ("tty", 3), ("disabled", 4), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Mode.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Mode.setDescription('serial port 2 configuration mode.')
netraCtRscSerial2Parity = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("none", 1), ("odd", 2), ("even", 3), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Parity.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Parity.setDescription('serial port 2 parity mode.')
netraCtRscSerial2Stop = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Stop.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Stop.setDescription('serial port 2 stop bits.')
netraCtRscSerial2Data = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("seven", 1), ("eight", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Data.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Data.setDescription('serial port 2 data bits.')
netraCtRscSerial2Baud = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 2147483647))).clone(namedValues=NamedValues(("b300", 1), ("b1200", 2), ("b1800", 3), ("b2400", 4), ("b4800", 5), ("b9600", 6), ("b19200", 7), ("b38400", 8), ("b57600", 9), ("b115200", 10), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Baud.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Baud.setDescription('serial port 2 baud rate.')
netraCtRscSerial2HwFlowcontrol = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2HwFlowcontrol.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2HwFlowcontrol.setDescription('serial port 2 Hardware Flowcontrol.')
netraCtRscSerial2Inactivity = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2Inactivity.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2Inactivity.setDescription('serial port 2 inactivity.')
netraCtRscSerial2PagerOneConfig = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneConfig.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneConfig.setDescription('primary pager number for RSC.')
netraCtRscSerial2PagerTwoConfig = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoConfig.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoConfig.setDescription('secondary pager number for RSC')
netraCtRscSerial2PagerOneBaud = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 2147483647))).clone(namedValues=NamedValues(("b300", 1), ("b1200", 2), ("b2400", 3), ("b4800", 4), ("b9600", 5), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneBaud.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneBaud.setDescription('primary pager baud rate.')
netraCtRscSerial2PagerTwoBaud = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 2147483647))).clone(namedValues=NamedValues(("b300", 1), ("b1200", 2), ("b2400", 3), ("b4800", 4), ("b9600", 5), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoBaud.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoBaud.setDescription('secondary pager baud rate.')
netraCtRscSerial2PagerOneParity = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("none", 1), ("odd", 2), ("even", 3), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneParity.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneParity.setDescription('primary pager parity mode.')
netraCtRscSerial2PagerTwoParity = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("none", 1), ("odd", 2), ("even", 3), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoParity.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoParity.setDescription('secondary pager parity mode.')
netraCtRscSerial2PagerOneStop = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneStop.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneStop.setDescription('primary pager stop bits.')
netraCtRscSerial2PagerTwoStop = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoStop.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoStop.setDescription('secondary pager stop bits.')
netraCtRscSerial2PagerOneData = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("seven", 1), ("eight", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneData.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneData.setDescription('primary pager data bits.')
netraCtRscSerial2PagerTwoData = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("seven", 1), ("eight", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoData.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoData.setDescription('secondary pager data bits.')
netraCtRscSerial2PagerOneInit = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 18), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneInit.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOneInit.setDescription('primary pager modem init string.')
netraCtRscSerial2PagerTwoInit = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 19), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoInit.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoInit.setDescription('secondary pager modem init string.')
netraCtRscSerial2PagerOnePassword = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 20), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerOnePassword.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerOnePassword.setDescription('primary pager TAP password.')
netraCtRscSerial2PagerTwoPassword = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 3, 21), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoPassword.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSerial2PagerTwoPassword.setDescription('secondary pager TAP password.')
netraCtRscModemParity = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("none", 1), ("odd", 2), ("even", 3), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscModemParity.setStatus('current')
if mibBuilder.loadTexts: netraCtRscModemParity.setDescription('modem parity mode.')
netraCtRscModemStop = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("one", 1), ("two", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscModemStop.setStatus('current')
if mibBuilder.loadTexts: netraCtRscModemStop.setDescription('modem stop bits.')
netraCtRscModemData = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("seven", 1), ("eight", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscModemData.setStatus('current')
if mibBuilder.loadTexts: netraCtRscModemData.setDescription('modem data bits.')
netraCtRscCountryCode = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscCountryCode.setStatus('current')
if mibBuilder.loadTexts: netraCtRscCountryCode.setDescription('country specified using CCITT internaional dial-plan number.')
netraCtRscModemModel = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 4, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscModemModel.setStatus('current')
if mibBuilder.loadTexts: netraCtRscModemModel.setDescription('the modem type used on the netraCtRsc card.')
netraCtRscMacAddress = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscMacAddress.setStatus('current')
if mibBuilder.loadTexts: netraCtRscMacAddress.setDescription('Ethernet address for RSC.')
netraCtRscEnetTpeLinkTest = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscEnetTpeLinkTest.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEnetTpeLinkTest.setDescription("Determines whether tpe-link-test should be set of clear. This should be off when interfacing with older-style hubs that don't support heartbeat.")
netraCtRscIPAddress = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscIPAddress.setStatus('current')
if mibBuilder.loadTexts: netraCtRscIPAddress.setDescription('configured IP address for RSC.')
netraCtRscIpMask = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscIpMask.setStatus('current')
if mibBuilder.loadTexts: netraCtRscIpMask.setDescription('configured IP netmask for RSC.')
netraCtRscIpGateway = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscIpGateway.setStatus('current')
if mibBuilder.loadTexts: netraCtRscIpGateway.setDescription('configured IP gateway for RSC.')
netraCtRscSNMPHostAddress = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscSNMPHostAddress.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSNMPHostAddress.setDescription('configured SNMP server for RSC alerts.')
netraCtRscMailHostAddress = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 7), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscMailHostAddress.setStatus('current')
if mibBuilder.loadTexts: netraCtRscMailHostAddress.setDescription('Address of Mail Server.')
netraCtRscMailUser = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 40))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscMailUser.setStatus('current')
if mibBuilder.loadTexts: netraCtRscMailUser.setDescription('email address for RSC alerts.')
netraCtRscPPPLocalIP = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 9), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscPPPLocalIP.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPPPLocalIP.setDescription('PPP local IP address.')
netraCtRscPPPRemoteIP = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 10), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscPPPRemoteIP.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPPPRemoteIP.setDescription('PPP remote IP address.')
netraCtRscMailHostAddressBackup = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 5, 11), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscMailHostAddressBackup.setStatus('current')
if mibBuilder.loadTexts: netraCtRscMailHostAddressBackup.setDescription(' Backup Address of Mail Server. when no IP address is configured for mail server, back up IP address becomes first IP address of mailhost ')
netraCtRscSystemType = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscSystemType.setStatus('current')
if mibBuilder.loadTexts: netraCtRscSystemType.setDescription('System Type. This will currently return one of the following 2 strings: NetraCt400 , or NetraCt800.')
netraCtRscPowerSupplyCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscPowerSupplyCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyCount.setDescription('The total number of Power Supply FRUs in the platform.')
netraCtRscPowerSupplyTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3), )
if mibBuilder.loadTexts: netraCtRscPowerSupplyTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyTable.setDescription('A table listing the characteristics of the Power-Supply FRU.')
netraCtRscPowerSupplyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscPowerSupplyIndex"))
if mibBuilder.loadTexts: netraCtRscPowerSupplyEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyEntry.setDescription('an entry (conceptual row) in the netraCtRscPowerSupplyTable')
netraCtRscPowerSupplyIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscPowerSupplyIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyIndex.setDescription('Entry number for this Power-Supply FRU row.')
netraCtRscPowerSupplyPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("true", 1), ("false", 2), ("unknown", 3), ("notimpl", 2147483647)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscPowerSupplyPresent.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyPresent.setDescription('Power-Supply FRU is present.')
netraCtRscPowerSupplyOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 2147483647))).clone(namedValues=NamedValues(("okay", 1), ("failed", 2), ("unknown", 3), ("offline", 4), ("notimpl", 2147483647)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscPowerSupplyOperState.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyOperState.setDescription('The current status of the Power-Supply FRU.')
netraCtRscPowerSupplyAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscPowerSupplyAdminState.setStatus('current')
if mibBuilder.loadTexts: netraCtRscPowerSupplyAdminState.setDescription('Setting this value to on(1) requests that the power supply be turned on. Setting this value to off(2) requests that the power supply be turned off.')
netraCtRscAlarmCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscAlarmCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmCount.setDescription('Number of Alarm Ports in this Platform')
netraCtRscAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5), )
if mibBuilder.loadTexts: netraCtRscAlarmTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmTable.setDescription('a table listing the available Alarm Ports.')
netraCtRscAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscAlarmIndex"))
if mibBuilder.loadTexts: netraCtRscAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmEntry.setDescription('an entry (conceptual row) in the netraCtRscAlarmTable.')
netraCtRscAlarmIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64)))
if mibBuilder.loadTexts: netraCtRscAlarmIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmIndex.setDescription('row index into Alarm Port table')
netraCtRscAlarmID = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscAlarmID.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmID.setDescription('Alarm identifier. The current Netra ct 400/800 Alarm Cards currently supports Alarm IDs of 0,1,2, and 3.')
netraCtRscAlarmOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscAlarmOperState.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmOperState.setDescription('This returns the current value of the Alarm, which may have been set by either software due to environmental conditions, such as Fan failure, or manually, by the setting of netraCtRscAlarmAdminState.')
netraCtRscAlarmAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscAlarmAdminState.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmAdminState.setDescription('This reflects the requested state of an alarm port by the SNMP manager.')
netraCtRscAlarmPrefix = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscAlarmPrefix.setStatus('current')
if mibBuilder.loadTexts: netraCtRscAlarmPrefix.setDescription('The Alarm Prefix that will associate an FRU type to an alarm. Currently supported definitions are: cpuunit(1), software(2), powersupply(3), temperature(4), fan(5). 6 through 9 are reserved for future expansion. 10 through 255 are undefined, and can be configured for user-defined messages and alarming.')
netraCtRscFanCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscFanCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanCount.setDescription('Maximum number of Fan Tray FRUs in Platform')
netraCtRscFanTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 7), )
if mibBuilder.loadTexts: netraCtRscFanTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanTable.setDescription('a table listing the characteristics of the Fan Tray FRU.')
netraCtRscFanEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 7, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscFanIndex"))
if mibBuilder.loadTexts: netraCtRscFanEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanEntry.setDescription('an entry (conceptual row) in the netraCtRscFanTable.')
netraCtRscFanIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscFanIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanIndex.setDescription('row index into Fan FRU table')
netraCtRscFanPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 7, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("true", 1), ("false", 2), ("unknown", 3), ("notimpl", 2147483647)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscFanPresent.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanPresent.setDescription('Fan FRU is present')
netraCtRscFanStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 7, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 2147483647))).clone(namedValues=NamedValues(("okay", 1), ("failed", 2), ("unknown", 3), ("notimpl", 2147483647)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscFanStatus.setStatus('current')
if mibBuilder.loadTexts: netraCtRscFanStatus.setDescription('The current status of the Fan FRU. unknown will be returned if the status is unknown.')
netraCtRscTemperatureCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 63))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureCount.setDescription('Maximum number of temperature sensors on Platform')
netraCtRscTemperatureTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9), )
if mibBuilder.loadTexts: netraCtRscTemperatureTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureTable.setDescription('a table listing the contents of the event log as an array of strings.')
netraCtRscTemperatureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscTemperatureIndex"))
if mibBuilder.loadTexts: netraCtRscTemperatureEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureEntry.setDescription('an entry (conceptual row) in the netraCtRscTemperatureTable.')
netraCtRscTemperatureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureIndex.setDescription('current row of the Temperature sensor')
netraCtRscTemperatureValid = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("true", 1), ("false", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureValid.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureValid.setDescription('The current temperature at this sensor is known. If the temperature cannot be obtained, then false will be returned, and netraCtRscTemperatureValue should not be considered to be valid.')
netraCtRscTemperatureValue = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureValue.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureValue.setDescription('current temperature at this sensor')
netraCtRscTemperatureLowWarn = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureLowWarn.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureLowWarn.setDescription('low warning threshold for this temperature sensor')
netraCtRscTemperatureHighWarn = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureHighWarn.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureHighWarn.setDescription('high warning threshold for this temperature sensor')
netraCtRscTemperatureDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 6, 9, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscTemperatureDesc.setStatus('current')
if mibBuilder.loadTexts: netraCtRscTemperatureDesc.setDescription('textual description of the Temperature sensor')
netraCtRscEventLogCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscEventLogCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogCount.setDescription('current number of lines in the event log')
netraCtRscEventLogTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 2), )
if mibBuilder.loadTexts: netraCtRscEventLogTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogTable.setDescription('a table listing the contents of the event log as an array of strings.')
netraCtRscEventLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 2, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscEventLogIndex"))
if mibBuilder.loadTexts: netraCtRscEventLogEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogEntry.setDescription('an entry (conceptual row) in the netraCtRscEventLogTable.')
netraCtRscEventLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscEventLogIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogIndex.setDescription('The index of the entry. The oldest entry will start at index 0. The most recent entry will be found at the index equal to netraCtRscEventLogCount - 1.')
netraCtRscEventLogTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 2, 1, 2), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscEventLogTimeStamp.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogTimeStamp.setDescription('event timestamp')
netraCtRscEventLogMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscEventLogMessage.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEventLogMessage.setDescription('textual description of an event.')
netraCtRscOrigConsoleLogCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogCount.setDescription('current number of lines in the original console log.')
netraCtRscOrigConsoleLogTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 4), )
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogTable.setDescription('a table listing the contents of the original console log as an array of strings.')
netraCtRscOrigConsoleLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 4, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscOrigConsoleLogIndex"))
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogEntry.setDescription('an entry (conceptual row) in the netraCtRscOrigConsoleLogTable.')
netraCtRscOrigConsoleLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogIndex.setDescription('The index of the entry. The oldest entry will start at index 0. The most recent entry will be found at the index equal to netraCtRscOrigConsoleLogCount - 1.')
netraCtRscOrigConsoleLogTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 4, 1, 2), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogTimeStamp.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogTimeStamp.setDescription('Original Console Log timestamp')
netraCtRscOrigConsoleLogMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 4, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogMessage.setStatus('current')
if mibBuilder.loadTexts: netraCtRscOrigConsoleLogMessage.setDescription('textual description of an event.')
netraCtRscConsoleLogCount = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscConsoleLogCount.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogCount.setDescription('current number of lines in the console log.')
netraCtRscConsoleLogTable = MibTable((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 6), )
if mibBuilder.loadTexts: netraCtRscConsoleLogTable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogTable.setDescription('a table listing the contents of the console log as an array of strings.')
netraCtRscConsoleLogEntry = MibTableRow((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 6, 1), ).setIndexNames((0, "SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscConsoleLogIndex"))
if mibBuilder.loadTexts: netraCtRscConsoleLogEntry.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogEntry.setDescription('an entry (conceptual row) in the netraCtRscConsoleLogTable.')
netraCtRscConsoleLogIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscConsoleLogIndex.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogIndex.setDescription('The index of the entry. The oldest entry will start at index 0. The most recent entry will be found at the index equal to netraCtRscConsoleLogCount - 1.')
netraCtRscConsoleLogTimeStamp = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 6, 1, 2), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscConsoleLogTimeStamp.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogTimeStamp.setDescription('Console Log timestamp')
netraCtRscConsoleLogMessage = MibTableColumn((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 6, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netraCtRscConsoleLogMessage.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleLogMessage.setDescription('textual description of an event.')
netraCtRscConsoleReset = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 1, 7, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("set", 1), ("clear", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscConsoleReset.setStatus('current')
if mibBuilder.loadTexts: netraCtRscConsoleReset.setDescription('When set, the console log is reset so that the current log is copied over to the original console log, and the the console log will be cleared. All new console messages will then go to the console log.clear(2) setting is read-only.')
netraCtRscRCCPowerOnEnable = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscRCCPowerOnEnable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscRCCPowerOnEnable.setDescription('If set to false, masks the RCC PowerOn command.')
netraCtRscRCCPowerOffEnable = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscRCCPowerOffEnable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscRCCPowerOffEnable.setDescription('If set to false, masks the RCC PowerOff command.')
netraCtRscRCCResetEnable = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 2147483647))).clone(namedValues=NamedValues(("on", 1), ("off", 2), ("notimpl", 2147483647)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscRCCResetEnable.setStatus('current')
if mibBuilder.loadTexts: netraCtRscRCCResetEnable.setDescription('If set to false, masks the RCC Reset command.')
netraCtRscRCCLinkNum = MibScalar((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 3, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: netraCtRscRCCLinkNum.setStatus('current')
if mibBuilder.loadTexts: netraCtRscRCCLinkNum.setDescription('2-byte string that defines the RCC Linknum address.')
netraCtRscEvent = NotificationType((1, 3, 6, 1, 4, 1, 42, 2, 65, 2, 2, 0, 1)).setObjects(("SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscAlarmID"), ("SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscAlarmOperState"), ("SUN-SNMP-NETRA-CT-RSC-MIB", "netraCtRscAlarmPrefix"))
if mibBuilder.loadTexts: netraCtRscEvent.setStatus('current')
if mibBuilder.loadTexts: netraCtRscEvent.setDescription('Event used to notify the snmp manager of a new RSC event. An event is generated when one of the values: netraCtRscAlarmOperState, in the Table:netraCtRscAlarmTable changes state.')
mibBuilder.exportSymbols("SUN-SNMP-NETRA-CT-RSC-MIB", netraCtRscExpmnt=netraCtRscExpmnt, netraCtRscMailUser=netraCtRscMailUser, netraCtRscSerial2PagerOneConfig=netraCtRscSerial2PagerOneConfig, netraCtRscOrigConsoleLogTimeStamp=netraCtRscOrigConsoleLogTimeStamp, netraCtRscVersionFirmwareMicro=netraCtRscVersionFirmwareMicro, netraCtRscEventLogTimeStamp=netraCtRscEventLogTimeStamp, netraCtRscPanicDump=netraCtRscPanicDump, netraCtRscMailHostAddressBackup=netraCtRscMailHostAddressBackup, netraCtRscOrigConsoleLogIndex=netraCtRscOrigConsoleLogIndex, netraCtRscCountryCode=netraCtRscCountryCode, netraCtRscRccConfig=netraCtRscRccConfig, netraCtRscSerial2PagerTwoBaud=netraCtRscSerial2PagerTwoBaud, netraCtRscPowerSupplyAdminState=netraCtRscPowerSupplyAdminState, netraCtRscIPAddress=netraCtRscIPAddress, netraCtRscEventLogTable=netraCtRscEventLogTable, netraCtRscSNMPHostAddress=netraCtRscSNMPHostAddress, netraCtRscPowerSupplyOperState=netraCtRscPowerSupplyOperState, netraCtRscEventLogEntry=netraCtRscEventLogEntry, netraCtRscCustomerInfo=netraCtRscCustomerInfo, netraCtRscFanTable=netraCtRscFanTable, netraCtRscEvent=netraCtRscEvent, netraCtRscGlobalPPPFlag=netraCtRscGlobalPPPFlag, netraCtRscConfigObjs=netraCtRscConfigObjs, netraCtRscTemperatureIndex=netraCtRscTemperatureIndex, netraCtRscObjs=netraCtRscObjs, netraCtRscAlarmID=netraCtRscAlarmID, netraCtRscEnetTpeLinkTest=netraCtRscEnetTpeLinkTest, DateAndTime=DateAndTime, netraCtRscVersionFirmwareMinor=netraCtRscVersionFirmwareMinor, netraCtRscSystemType=netraCtRscSystemType, netraCtRscAlarmOperState=netraCtRscAlarmOperState, netraCtRscAdminXir=netraCtRscAdminXir, netraCtRscConsoleLogCount=netraCtRscConsoleLogCount, netraCtRscPowerSupplyEntry=netraCtRscPowerSupplyEntry, netraCtRscSerial2Data=netraCtRscSerial2Data, netraCtRscOrigConsoleLogEntry=netraCtRscOrigConsoleLogEntry, netraCtRscHostname=netraCtRscHostname, netraCtRscEnvObjs=netraCtRscEnvObjs, netraCtRscSerial2PagerOneStop=netraCtRscSerial2PagerOneStop, netraCtRscVersionMainMinor=netraCtRscVersionMainMinor, netraCtRscSerial2Parity=netraCtRscSerial2Parity, netraCtRscSerial2PagerTwoPassword=netraCtRscSerial2PagerTwoPassword, netraCtRscSerial2Objs=netraCtRscSerial2Objs, netraCtRscVersionMainMicro=netraCtRscVersionMainMicro, netraCtRscConsoleLogTable=netraCtRscConsoleLogTable, netraCtRscSerial2PagerOneInit=netraCtRscSerial2PagerOneInit, netraCtRscVersionFirmwareMajor=netraCtRscVersionFirmwareMajor, netraCtRscEnetObjs=netraCtRscEnetObjs, netraCtRscSerial2PagerTwoInit=netraCtRscSerial2PagerTwoInit, netraCtRscAlarmTable=netraCtRscAlarmTable, netraCtRscOrigConsoleLogTable=netraCtRscOrigConsoleLogTable, netraCtRscVersionBootMinor=netraCtRscVersionBootMinor, netraCtRscVersionBootMajor=netraCtRscVersionBootMajor, netraCtRscSerial2PagerOneParity=netraCtRscSerial2PagerOneParity, netraCtRscAdminBreak=netraCtRscAdminBreak, netraCtRscSerial2Stop=netraCtRscSerial2Stop, netraCtRscFanPresent=netraCtRscFanPresent, netraCtRscIpGateway=netraCtRscIpGateway, netraCtRscTrapPrefix=netraCtRscTrapPrefix, netraCtRscVersionBootMicro=netraCtRscVersionBootMicro, netraCtRscTemperatureEntry=netraCtRscTemperatureEntry, netraCtRscSerial2Mode=netraCtRscSerial2Mode, netraCtRscMacAddress=netraCtRscMacAddress, netraCtRscFanIndex=netraCtRscFanIndex, netraCtRscEscape=netraCtRscEscape, netraCtRscFanEntry=netraCtRscFanEntry, netraCtRscAlarmIndex=netraCtRscAlarmIndex, netraCtRscAlarmAdminState=netraCtRscAlarmAdminState, netraCtRscPPPRemoteIP=netraCtRscPPPRemoteIP, netraCtRscPowerSupplyCount=netraCtRscPowerSupplyCount, netraCtRscTemperatureValue=netraCtRscTemperatureValue, netraCtRscAdminObjs=netraCtRscAdminObjs, netraCtRscGlobalEmailFlag=netraCtRscGlobalEmailFlag, netraCtRscPowerSupplyTable=netraCtRscPowerSupplyTable, netraCtRscConsoleLogMessage=netraCtRscConsoleLogMessage, netraCtRscConsoleReset=netraCtRscConsoleReset, netraCtRscTemperatureDesc=netraCtRscTemperatureDesc, netraCtRscSerial2Baud=netraCtRscSerial2Baud, netraCtRscTemperatureValid=netraCtRscTemperatureValid, netraCtRscSerial2HwFlowcontrol=netraCtRscSerial2HwFlowcontrol, netraCtRscModemStop=netraCtRscModemStop, PYSNMP_MODULE_ID=netraCtRscMIB, netraCtRscRCCResetEnable=netraCtRscRCCResetEnable, netraCtRscTemperatureTable=netraCtRscTemperatureTable, netraCtRscConsoleLogIndex=netraCtRscConsoleLogIndex, netraCtRscLogObjs=netraCtRscLogObjs, netraCtRscRCCLinkNum=netraCtRscRCCLinkNum, netraCtRscPowerSupplyPresent=netraCtRscPowerSupplyPresent, netraCtRscOrigConsoleLogMessage=netraCtRscOrigConsoleLogMessage, netraCtRscSerial2Inactivity=netraCtRscSerial2Inactivity, netraCtRscOrigConsoleLogCount=netraCtRscOrigConsoleLogCount, netraCtRscSerial2PagerOneData=netraCtRscSerial2PagerOneData, netraCtRscPPPLocalIP=netraCtRscPPPLocalIP, netraCtRscTOD=netraCtRscTOD, netraCtRscConsoleLogEntry=netraCtRscConsoleLogEntry, netraCtRscTemperatureHighWarn=netraCtRscTemperatureHighWarn, netraCtRscModemModel=netraCtRscModemModel, netraCtRscConsoleLogTimeStamp=netraCtRscConsoleLogTimeStamp, netraCtRscEventLogCount=netraCtRscEventLogCount, netraCtRscSerial2PagerOneBaud=netraCtRscSerial2PagerOneBaud, netraCtRscAlarmPrefix=netraCtRscAlarmPrefix, netraCtRscSerial2PagerTwoConfig=netraCtRscSerial2PagerTwoConfig, netraCtRscMailHostAddress=netraCtRscMailHostAddress, netraCtRscHostWatchDogTimeout=netraCtRscHostWatchDogTimeout, netraCtRscTemperatureCount=netraCtRscTemperatureCount, netraCtRscModemData=netraCtRscModemData, netraCtRscSerial2PagerTwoData=netraCtRscSerial2PagerTwoData, netraCtRscAdminNmi=netraCtRscAdminNmi, netraCtRscAlarmEntry=netraCtRscAlarmEntry, netraCtRscSerial2PagerTwoParity=netraCtRscSerial2PagerTwoParity, netraCtRscEvents=netraCtRscEvents, netraCtRscAdminRscReset=netraCtRscAdminRscReset, netraCtRscFanCount=netraCtRscFanCount, netraCtRscPowerSupplyIndex=netraCtRscPowerSupplyIndex, netraCtRscFanStatus=netraCtRscFanStatus, netraCtRscTemperatureLowWarn=netraCtRscTemperatureLowWarn, netraCtRscAlarmCount=netraCtRscAlarmCount, netraCtRscIpMask=netraCtRscIpMask, netraCtRscGlobalPageFlag=netraCtRscGlobalPageFlag, netraCtRscRCCPowerOnEnable=netraCtRscRCCPowerOnEnable, netraCtRscMIB=netraCtRscMIB, netraCtRscHostWatchDogReboot=netraCtRscHostWatchDogReboot, netraCtRscGlobalIPModeFlag=netraCtRscGlobalIPModeFlag, netraCtRscModemParity=netraCtRscModemParity, netraCtRscRCCPowerOffEnable=netraCtRscRCCPowerOffEnable, netraCtRscModemObjs=netraCtRscModemObjs, netraCtRscEventLogMessage=netraCtRscEventLogMessage, netraCtRscVersionMainMajor=netraCtRscVersionMainMajor, netraCtRscSerial2PagerOnePassword=netraCtRscSerial2PagerOnePassword, netraCtRscAdminHostReset=netraCtRscAdminHostReset, netraCtRscSerial2PagerTwoStop=netraCtRscSerial2PagerTwoStop, netraCtRscEventLogIndex=netraCtRscEventLogIndex)
|
nilq/baby-python
|
python
|
from django.core.management.base import BaseCommand
from django.conf import settings
from ..utilities.modelwriter import *
class Command(BaseCommand):
help = 'Add a new model to an app.'
def add_arguments(self, parser):
parser.add_argument(
'app_name',
action='store',
help='App name',
)
def handle(self, *args, **options):
context={
'app_name': options['app_name'],
}
ModelsFile().write(context)
|
nilq/baby-python
|
python
|
from test.vim_test_case import VimTestCase as _VimTest
from test.constant import *
# Recursive (Nested) Snippets {{{#
class RecTabStops_SimpleCase_ExpectCorrectResult(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'm' + EX + 'hello' + \
JF + 'world' + JF + 'ups' + JF + 'end'
wanted = '[ [ hello world ]ups end ]'
class RecTabStops_SimpleCaseLeaveSecondSecond_ExpectCorrectResult(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'm' + EX + 'hello' + JF + 'world' + JF + JF + JF + 'end'
wanted = '[ [ hello world ] sec ]end'
class RecTabStops_SimpleCaseLeaveFirstSecond_ExpectCorrectResult(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'm' + EX + 'hello' + JF + JF + JF + 'world' + JF + 'end'
wanted = '[ [ hello sec ] world ]end'
class RecTabStops_InnerWOTabStop_ECR(_VimTest):
snippets = (
('m1', 'Just some Text'),
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm1' + EX + 'hi' + JF + 'two' + JF + 'end'
wanted = '[ Just some Texthi two ]end'
class RecTabStops_InnerWOTabStopTwiceDirectly_ECR(_VimTest):
snippets = (
('m1', 'JST'),
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm1' + EX + ' m1' + EX + 'hi' + JF + 'two' + JF + 'end'
wanted = '[ JST JSThi two ]end'
class RecTabStops_InnerWOTabStopTwice_ECR(_VimTest):
snippets = (
('m1', 'JST'),
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm1' + EX + JF + 'm1' + EX + 'hi' + JF + 'end'
wanted = '[ JST JSThi ]end'
class RecTabStops_OuterOnlyWithZeroTS_ECR(_VimTest):
snippets = (
('m', 'A $0 B'),
('m1', 'C $1 D $0 E'),
)
keys = 'm' + EX + 'm1' + EX + 'CD' + JF + 'DE'
wanted = 'A C CD D DE E B'
class RecTabStops_OuterOnlyWithZero_ECR(_VimTest):
snippets = (
('m', 'A $0 B'),
('m1', 'C $1 D $0 E'),
)
keys = 'm' + EX + 'm1' + EX + 'CD' + JF + 'DE'
wanted = 'A C CD D DE E B'
class RecTabStops_ExpandedInZeroTS_ECR(_VimTest):
snippets = (
('m', 'A $0 B $1'),
('m1', 'C $1 D $0 E'),
)
keys = 'm' + EX + 'hi' + JF + 'm1' + EX + 'CD' + JF + 'DE'
wanted = 'A C CD D DE E B hi'
class RecTabStops_ExpandedInZeroTSTwice_ECR(_VimTest):
snippets = (
('m', 'A $0 B $1'),
('m1', 'C $1 D $0 E'),
)
keys = 'm' + EX + 'hi' + JF + 'm' + EX + 'again' + JF + 'm1' + \
EX + 'CD' + JF + 'DE'
wanted = 'A A C CD D DE E B again B hi'
class RecTabStops_ExpandedInZeroTSSecondTime_ECR(_VimTest):
snippets = (
('m', 'A $0 B $1'),
('m1', 'C $1 D $0 E'),
)
keys = 'm' + EX + 'hi' + JF + 'm' + EX + \
'm1' + EX + 'CD' + JF + 'DE' + JF + 'AB'
wanted = 'A A AB B C CD D DE E B hi'
class RecTabsStops_TypeInZero_ECR(_VimTest):
snippets = (
('v', r"\vec{$1}", 'Vector', 'w'),
('frac', r"\frac{${1:one}}${0:zero}{${2:two}}", 'Fractio', 'w'),
)
keys = 'v' + EX + 'frac' + EX + 'a' + JF + 'b' + JF + 'frac' + EX + 'aa' + JF + JF + 'cc' + JF + \
'hello frac' + EX + JF + JF + 'world'
wanted = r"\vec{\frac{a}\frac{aa}cc{two}{b}}hello \frac{one}world{two}"
class RecTabsStops_TypeInZero2_ECR(_VimTest):
snippets = (
('m', r"_${0:explicit zero}", 'snip', 'i'),
)
keys = 'm' + EX + 'hello m' + EX + 'world m' + EX + 'end'
wanted = r"_hello _world _end"
class RecTabsStops_BackspaceZero_ECR(_VimTest):
snippets = (
('m', r"${1:one}${0:explicit zero}${2:two}", 'snip', 'i'),
)
keys = 'm' + EX + JF + JF + BS + 'm' + EX
wanted = r"oneoneexplicit zerotwotwo"
class RecTabStops_MirrorInnerSnippet_ECR(_VimTest):
snippets = (
('m', '[ $1 $2 ] $1'),
('m1', 'ASnip $1 ASnip $2 ASnip'),
)
keys = 'm' + EX + 'm1' + EX + 'Hallo' + JF + 'Hi' + \
JF + 'endone' + JF + 'two' + JF + 'totalend'
wanted = '[ ASnip Hallo ASnip Hi ASnipendone two ] ASnip Hallo ASnip Hi ASnipendonetotalend'
class RecTabStops_NotAtBeginningOfTS_ExpectCorrectResult(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'hello m' + EX + 'hi' + JF + 'two' + JF + 'ups' + JF + 'three' + \
JF + 'end'
wanted = '[ hello [ hi two ]ups three ]end'
class RecTabStops_InNewlineInTabstop_ExpectCorrectResult(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'hello\nm' + EX + 'hi' + JF + 'two' + JF + 'ups' + JF + 'three' + \
JF + 'end'
wanted = '[ hello\n[ hi two ]ups three ]end'
class RecTabStops_InNewlineInTabstopNotAtBeginOfLine_ECR(_VimTest):
snippets = ('m', '[ ${1:first} ${2:sec} ]')
keys = 'm' + EX + 'hello\nhello again m' + EX + 'hi' + JF + 'two' + \
JF + 'ups' + JF + 'three' + JF + 'end'
wanted = '[ hello\nhello again [ hi two ]ups three ]end'
class RecTabStops_InNewlineMultiline_ECR(_VimTest):
snippets = ('m', 'M START\n$0\nM END')
keys = 'm' + EX + 'm' + EX
wanted = 'M START\nM START\n\nM END\nM END'
class RecTabStops_InNewlineManualIndent_ECR(_VimTest):
snippets = ('m', 'M START\n$0\nM END')
keys = 'm' + EX + ' m' + EX + 'hi'
wanted = 'M START\n M START\n hi\n M END\nM END'
class RecTabStops_InNewlineManualIndentTextInFront_ECR(_VimTest):
snippets = ('m', 'M START\n$0\nM END')
keys = 'm' + EX + ' hallo m' + EX + 'hi'
wanted = 'M START\n hallo M START\n hi\n M END\nM END'
class RecTabStops_InNewlineMultilineWithIndent_ECR(_VimTest):
snippets = ('m', 'M START\n $0\nM END')
keys = 'm' + EX + 'm' + EX + 'hi'
wanted = 'M START\n M START\n hi\n M END\nM END'
class RecTabStops_InNewlineMultilineWithNonZeroTS_ECR(_VimTest):
snippets = ('m', 'M START\n $1\nM END -> $0')
keys = 'm' + EX + 'm' + EX + 'hi' + JF + 'hallo' + JF + 'end'
wanted = 'M START\n M START\n hi\n M END -> hallo\n' \
'M END -> end'
class RecTabStops_BarelyNotLeavingInner_ECR(_VimTest):
snippets = (
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm' + EX + 'a' + 3 * ARR_L + JF + 'hallo' + \
JF + 'ups' + JF + 'world' + JF + 'end'
wanted = '[ [ a hallo ]ups world ]end'
class RecTabStops_LeavingInner_ECR(_VimTest):
snippets = (
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm' + EX + 'a' + 4 * ARR_L + JF + 'hallo' + \
JF + 'world'
wanted = '[ [ a sec ] hallo ]world'
class RecTabStops_LeavingInnerInner_ECR(_VimTest):
snippets = (
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm' + EX + 'm' + EX + 'a' + 4 * ARR_L + JF + 'hallo' + \
JF + 'ups' + JF + 'world' + JF + 'end'
wanted = '[ [ [ a sec ] hallo ]ups world ]end'
class RecTabStops_LeavingInnerInnerTwo_ECR(_VimTest):
snippets = (
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm' + EX + 'm' + EX + 'a' + 6 * ARR_L + JF + 'hallo' + \
JF + 'end'
wanted = '[ [ [ a sec ] sec ] hallo ]end'
class RecTabStops_ZeroTSisNothingSpecial_ECR(_VimTest):
snippets = (
('m1', '[ ${1:first} $0 ${2:sec} ]'),
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm1' + EX + 'one' + JF + 'two' + \
JF + 'three' + JF + 'four' + JF + 'end'
wanted = '[ [ one three two ] four ]end'
class RecTabStops_MirroredZeroTS_ECR(_VimTest):
snippets = (
('m1', '[ ${1:first} ${0:Year, some default text} $0 ${2:sec} ]'),
('m', '[ ${1:first} ${2:sec} ]'),
)
keys = 'm' + EX + 'm1' + EX + 'one' + JF + 'two' + \
JF + 'three' + JF + 'four' + JF + 'end'
wanted = '[ [ one three three two ] four ]end'
class RecTabStops_ChildTriggerContainsParentTextObjects(_VimTest):
# https://bugs.launchpad.net/bugs/1191617
files = { 'us/all.snippets': r"""
global !p
def complete(t, opts):
if t:
opts = [ q[len(t):] for q in opts if q.startswith(t) ]
if len(opts) == 0:
return ''
return opts[0] if len(opts) == 1 else "(" + '|'.join(opts) + ')'
def autocomplete_options(t, string, attr=None):
return complete(t[1], [opt for opt in attr if opt not in string])
endglobal
snippet /form_for(.*){([^|]*)/ "form_for html options" rw!
`!p
auto = autocomplete_options(t, match.group(2), attr=["id: ", "class: ", "title: "])
snip.rv = "form_for" + match.group(1) + "{"`$1`!p if (snip.c != auto) : snip.rv=auto`
endsnippet
"""}
keys = 'form_for user, namespace: some_namespace, html: {i' + EX + 'i' + EX
wanted = 'form_for user, namespace: some_namespace, html: {(id: |class: |title: )d: '
# End: Recursive (Nested) Snippets #}}}
|
nilq/baby-python
|
python
|
from disnake import CommandInteraction, Embed, Thread
from disnake.ext.commands import Cog, Param, slash_command
from src import Bot
from src.impl.database import Channel, ChannelMap, Message
from src.impl.utils import is_administrator
class Core(Cog):
def __init__(self, bot: Bot) -> None:
self.bot = bot
@slash_command(
name="status",
description="Get the status of the bot",
)
@is_administrator()
async def status(self, itr: CommandInteraction) -> None:
await itr.response.defer()
channels = await Channel.objects.count()
dchannels = await ChannelMap.objects.count()
messages = await Message.objects.count()
embed = Embed(
title="CrossChat Status",
colour=0x87CEEB,
description=(
f"Connected as {self.bot.user}\n"
f"Latency: {self.bot.latency * 1000:.2f}ms\n"
f"Guilds: {len(self.bot.guilds)}\n"
),
)
embed.add_field(
name="Channels",
value=f"Virtual: {channels}\nDiscord: {dchannels}",
)
embed.add_field(
name="Messages",
value=f"Total: {messages}",
)
await itr.send(embed=embed)
@slash_command(
name="setup",
description="Setup a channel for CrossChat",
)
@is_administrator()
async def setup(
self,
itr: CommandInteraction,
channel: str = Param(desc="The CrossChat channel to connect to"),
) -> None:
vchannel = self.bot.vchannels.get(channel, None)
if vchannel is None:
await itr.send(f"Channel {channel} does not exist.")
return
if isinstance(itr.channel, Thread):
await vchannel.join(itr.channel.parent_id, itr.channel.id)
else:
await vchannel.join(itr.channel.id)
await itr.send(f"Mapped channel {itr.channel.id} to CC:{channel}")
@slash_command(
name="unlink",
description="Unlink a channel from CrossChat",
)
@is_administrator()
async def unlink(
self,
itr: CommandInteraction,
) -> None:
vchannel = self.bot.resolve_channel(itr.channel.id)
if vchannel is None:
await itr.send(f"Channel {itr.channel.id} is not linked.")
return
await vchannel.leave(itr.channel.id)
await itr.send(f"Unlinked channel {itr.channel.id} from CC:{vchannel.channel.name}")
def setup(bot: Bot) -> None:
bot.add_cog(Core(bot))
|
nilq/baby-python
|
python
|
from .models import Folder, MEDIA_MODELS
def handle_uploaded_file(file, folder=None, is_public=True):
'''handle uploaded file to folder
match first media type and create media object and returns it
file: File object
folder: str or Folder isinstance
is_public: boolean
'''
_folder = None
if folder and isinstance(folder, Folder):
_folder = folder
elif folder:
_folder, folder_created = Folder.objects.get_or_create(
name=folder)
for cls in MEDIA_MODELS:
if cls.matches_file_type(file.name):
obj, created = cls.objects.get_or_create(
original_filename=file.name,
file=file,
folder=_folder,
is_public=is_public)
if created:
return obj
return None
def handle_uploaded_files(files, folder=None, is_public=True):
'''handle uploaded files to folder
files: array of File objects or single object
folder: str or Folder isinstance
is_public: boolean
'''
results = []
for f in files:
result = handle_uploaded_file(f, folder, is_public)
results.append(result)
return results
|
nilq/baby-python
|
python
|
# Copyright 2021 The SeqIO Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for seqio.preprocessors."""
from absl.testing import absltest
from seqio import dataset_providers
from seqio import experimental
from seqio import test_utils
from seqio import utils
from seqio import vocabularies
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
assert_dataset = test_utils.assert_dataset
Feature = dataset_providers.Feature
CacheDatasetPlaceholder = dataset_providers.CacheDatasetPlaceholder
MixtureRegistry = dataset_providers.MixtureRegistry
TaskRegistry = dataset_providers.TaskRegistry
ShardInfo = dataset_providers.ShardInfo
class FullyCachedTaskTest(absltest.TestCase):
def setUp(self):
super().setUp()
TaskRegistry.reset()
MixtureRegistry.reset()
self.fake_source = dataset_providers.FunctionDataSource(
lambda split, shuffle_files: tf.data.Dataset.range(2), ['train'])
self.vocabulary = vocabularies.PassThroughVocabulary(100)
self.metrics_fns = [lambda targets, predictions: 0]
def fake_preprocessor(ds):
"""Adds one and casts to int32."""
return ds.map(lambda x: tf.cast(x+1, tf.int32))
def fake_preprocessor_of(ds, output_features):
"""Creates output feature dict from scalar input."""
return ds.map(lambda x: {k: [x] for k in output_features})
def fake_preprocessor_sl(ds, sequence_length):
"""Concatenates the sequence length to each feature."""
return ds.map(
lambda x: { # pylint:disable=g-long-lambda
k: tf.concat([v, [sequence_length[k]]], 0) for k, v in x.items()
})
def fake_preprocessor_sl_of(ds, sequence_length, output_features):
"""Adds the sequence length to each feature with `add_eos` enabled."""
return ds.map(
lambda x: { # pylint:disable=g-long-lambda
k: tf.concat([v, [sequence_length[k]]], 0)
if output_features[k].add_eos else v for k, v in x.items()
})
self.preprocessors = [
fake_preprocessor,
fake_preprocessor_of,
fake_preprocessor_sl,
fake_preprocessor_sl_of,
]
def validate_fully_cached_task(
self, name, sequence_length, actual_sequence_length, expected_dataset):
new_task = TaskRegistry.get(name)
self.assertLen(new_task.preprocessors, 6)
self.assertEqual(new_task.metric_fns, self.metrics_fns)
self.assertIsInstance(new_task.preprocessors[-2], CacheDatasetPlaceholder)
self.assertTrue(new_task.preprocessors[-2].required)
with self.assertRaisesWithLiteralMatch(
ValueError,
f"Task '{name}' requires caching, but was called with "
"`use_cached=False`."):
new_task.get_dataset(None)
# Disable caching restriction to verify dataset is correct.
new_task.preprocessors[-2]._required = False
with self.assertRaisesWithLiteralMatch(
ValueError,
f"Fully-cached task '{name}' can only be loaded with "
f'`sequence_length={sequence_length}` or `None`. '
f'Given sequence_length={actual_sequence_length}.'):
new_task.get_dataset(
{k: v+1 for k, v in sequence_length.items()},
use_cached=False)
assert_dataset(
new_task.get_dataset(None, shuffle=False),
expected_dataset)
assert_dataset(
new_task.get_dataset(sequence_length, shuffle=False),
expected_dataset)
def test_add_fully_cached_task(self):
preprocessors = list(self.preprocessors)
preprocessors.insert(2, CacheDatasetPlaceholder())
TaskRegistry.add(
'encoder_decoder_task',
source=self.fake_source,
preprocessors=preprocessors,
output_features={
'inputs': Feature(self.vocabulary, add_eos=True),
'targets': Feature(self.vocabulary, add_eos=False)
},
metric_fns=self.metrics_fns)
sequence_length = {'inputs': 5, 'targets': 6}
actual_sequence_length = {'inputs': 6, 'targets': 7}
experimental.add_fully_cached_task('encoder_decoder_task', sequence_length)
self.validate_fully_cached_task(
'encoder_decoder_task_i5_t6',
sequence_length,
actual_sequence_length,
[
{'inputs': [1, 5, 5], 'targets': [1, 6]},
{'inputs': [2, 5, 5], 'targets': [2, 6]},
])
def test_add_fully_cached_task_single_feature(self):
TaskRegistry.add(
'decoder_task',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=True)
},
metric_fns=self.metrics_fns)
sequence_length = {'targets': 6}
actual_sequence_length = {'targets': 7}
experimental.add_fully_cached_task('decoder_task', sequence_length)
self.validate_fully_cached_task(
'decoder_task_6',
sequence_length,
actual_sequence_length,
[
{'targets': [1, 6, 6]},
{'targets': [2, 6, 6]},
])
def test_add_fully_cached_task_unique_prefix(self):
TaskRegistry.add(
'feature_prefix_task',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'tar': Feature(self.vocabulary, add_eos=True),
'targets': Feature(self.vocabulary, add_eos=False)
},
metric_fns=self.metrics_fns)
sequence_length = {'tar': 5, 'targets': 6}
actual_sequence_length = {'tar': 6, 'targets': 7}
experimental.add_fully_cached_task(
'feature_prefix_task', sequence_length)
self.validate_fully_cached_task(
'feature_prefix_task_tar5_targ6',
sequence_length,
actual_sequence_length,
[
{'tar': [1, 5, 5], 'targets': [1, 6]},
{'tar': [2, 5, 5], 'targets': [2, 6]},
])
def test_add_fully_cached_task_disallow_shuffling(self):
TaskRegistry.add(
'decoder_task',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=True)
},
metric_fns=self.metrics_fns)
sequence_length = {'targets': 6}
new_task = experimental.add_fully_cached_task(
'decoder_task', sequence_length, disallow_shuffling=True)
# Disable caching restriction to get past cache check.
new_task.preprocessors[-2]._required = False
with self.assertRaisesWithLiteralMatch(
ValueError,
"Shuffling is disallowed for Task 'decoder_task_6' since its "
'`shuffle_buffer_size` was set to `None` on construction.'):
new_task.get_dataset(None, shuffle=True, use_cached=False)
new_task.get_dataset(None, shuffle=False, use_cached=False)
def test_add_fully_cached_mixture(self):
TaskRegistry.add(
'task1',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=False)
},
metric_fns=self.metrics_fns)
TaskRegistry.add(
'task2',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=True)
},
metric_fns=self.metrics_fns)
MixtureRegistry.add('mix', [('task1', 2), ('task2', lambda x: 1)])
experimental.add_fully_cached_mixture('mix', sequence_length={'targets': 6})
new_mix = MixtureRegistry.get('mix_6')
new_task_names = ('task1_6', 'task2_6')
self.assertContainsSubset(new_task_names, TaskRegistry.names())
new_tasks = [TaskRegistry.get(n) for n in new_task_names]
self.assertCountEqual(new_tasks, new_mix.tasks)
self.assertEqual(new_mix.get_rate(new_tasks[0]), 2)
self.assertEqual(new_mix.get_rate(new_tasks[1]), 1)
with self.assertRaisesWithLiteralMatch(
ValueError,
"Task 'task1_6' requires caching, but was called with "
"`use_cached=False`."):
new_mix.get_dataset(None)
# Disable caching restriction to get past cache check.
for t in new_tasks:
t.preprocessors[-2]._required = False
with self.assertRaisesWithLiteralMatch(
ValueError,
"Fully-cached task 'task1_6' can only be loaded with "
"`sequence_length={'targets': 6}` or `None`. "
"Given sequence_length={'targets': 7}."):
new_mix.get_dataset({'targets': 7}, use_cached=False)
expected_dataset = [
{'targets': [1, 6, 6]},
{'targets': [2, 6, 6]},
{'targets': [1, 6]},
{'targets': [1, 6, 6]},
{'targets': [2, 6]},
{'targets': [2, 6, 6]},
]
assert_dataset(
new_mix.get_dataset(None, shuffle=False).take(6),
expected_dataset)
assert_dataset(
new_mix.get_dataset({'targets': 6}, shuffle=False).take(6),
expected_dataset)
def test_add_fully_cached_mixture_disallow_shuffling(self):
TaskRegistry.add(
'task1',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=False)
},
metric_fns=self.metrics_fns)
TaskRegistry.add(
'task2',
source=self.fake_source,
preprocessors=self.preprocessors,
output_features={
'targets': Feature(self.vocabulary, add_eos=True)
},
metric_fns=self.metrics_fns)
MixtureRegistry.add('mix', [('task1', 2), ('task2', lambda x: 1)])
new_mixture = experimental.add_fully_cached_mixture(
'mix', sequence_length={'targets': 6}, disallow_shuffling=True)
# Disable caching restriction to get past cache check.
for t in new_mixture.tasks:
t.preprocessors[-2]._required = False
with self.assertRaisesWithLiteralMatch(
ValueError,
"Shuffling is disallowed for Task 'task1_6' since its "
'`shuffle_buffer_size` was set to `None` on construction.'):
new_mixture.get_dataset(None, shuffle=True, use_cached=False)
new_mixture.get_dataset(None, shuffle=False, use_cached=False)
class FewshotTest(absltest.TestCase):
def test_fewshot_data_source(self):
def fake_dataset_fn(split, shuffle_files, seed=None):
# Note that for the purposes of this unit test, fake_dataset_fn
# deliberately does not properly implement shuffling. We test whether
# FewShotDataSource is robust to this.
del shuffle_files
del seed
return tf.data.Dataset.range(
*((0, 2) if split == 'validation' else (3, 7))
)
# 0 shot
src = experimental.FewshotDataSource(
dataset_providers.FunctionDataSource(
dataset_fn=fake_dataset_fn,
splits=['train', 'validation']
),
num_shots=0
)
dataset = src.get_dataset('validation', shuffle=False)
assert_dataset(
dataset, [{'eval': 0,}, {'eval': 1}]
)
# 1 shot
preprocessors = [
utils.map_over_dataset(lambda x: {'inputs': 0, 'targets': x})]
src = experimental.FewshotDataSource(
dataset_providers.FunctionDataSource(
dataset_fn=fake_dataset_fn, splits=['train', 'validation']),
train_preprocessors=preprocessors,
eval_preprocessors=preprocessors,
num_shots=1,
)
# When split is 'train', check that 'train' and 'eval' fields of each
# example are NOT always the same -- this can happen if the underlying
# dataset_fn does not implement shuffling, causing identical examples from
# the same split to be zipped together.
def train_and_eval_fields_always_same(dataset):
for ex in tfds.as_numpy(dataset):
if ex['train'] != ex['eval']:
return False
return True
# As long as train and eval fields aren't the same for SOME random seed, we
# have achieved the desired behavior. We fix the seed for this test because
# there are some seeds where train and eval fields DO happen to be the same
# by random chance, which would break this test.
self.assertFalse(
train_and_eval_fields_always_same(
src.get_dataset(split='train', shuffle=True, seed=123)))
# Even when shuffle is off, we don't want the train and eval fields to be
# the same. Instead, the 'train' field should be deterministically shuffled.
self.assertFalse(
train_and_eval_fields_always_same(
src.get_dataset(split='train', shuffle=False)))
# 3 shot
src = experimental.FewshotDataSource(
dataset_providers.FunctionDataSource(
dataset_fn=fake_dataset_fn,
splits=['train', 'validation']
),
train_preprocessors=[
utils.map_over_dataset(lambda x: {'inputs': 0, 'targets': x})
],
num_shots=3
)
dataset = src.get_dataset('validation', shuffle=False)
assert_dataset(
dataset, [
{
'eval': 0,
'train': {'inputs': [0, 0, 0], 'targets': [3, 5, 4]}
},
{
'eval': 1,
'train': {'inputs': [0, 0, 0], 'targets': [6, 6, 3]}
},
]
)
# Note: the train split has been deterministically shuffled, so the values
# of the 'targets' field that we test for are deterministic but arbitrary.
# 3-shot, sharded.
assert_dataset(
src.get_dataset(
'validation', shuffle=False, shard_info=ShardInfo(0, 2)), [
{
'eval': 0,
'train': {
'inputs': [0, 0, 0],
'targets': [3, 5, 5]
}
},
])
assert_dataset(
src.get_dataset(
'validation', shuffle=False, shard_info=ShardInfo(1, 2)), [
{
'eval': 1,
'train': {
'inputs': [0, 0, 0],
'targets': [4, 6, 6]
}
},
])
# Note: the train split has been deterministically shuffled, so the values
# of the 'targets' field that we test for are deterministic but arbitrary.
# Missing train
src = experimental.FewshotDataSource(
dataset_providers.FunctionDataSource(
dataset_fn=fake_dataset_fn,
splits=['validation']
),
num_shots=3
)
with self.assertRaisesRegex(
ValueError,
'Train split \'train\' is not one of the original source splits: '
r'\(\'validation\',\)'):
dataset = src.get_dataset('validation')
def test_fewshot_data_source_eval_on_fixed_exemplars(self):
def fake_dataset_fn(split, shuffle_files, seed=None):
# Note that for the purposes of this unit test, fake_dataset_fn
# deliberately does not properly implement shuffling. We test whether
# FewShotDataSource is robust to this.
del shuffle_files
del seed
return tf.data.Dataset.range(*((0, 2) if split == 'validation' else (3,
7)))
# 1 shot
preprocessors = [
utils.map_over_dataset(lambda x: {'inputs': 0, 'targets': x})]
src = experimental.FewshotDataSource(
dataset_providers.FunctionDataSource(
dataset_fn=fake_dataset_fn, splits=['train', 'validation']),
train_preprocessors=preprocessors,
num_shots=1,
eval_on_fixed_exemplars=True,
)
def exemplars_always_same(dataset):
"""Checks if exemplars are always the same."""
train_ex = None
for ex in dataset:
if train_ex is None:
train_ex = ex['train']
continue
if ex['train'] != train_ex:
return False
return True
# Use 'validation' split for `eval_ds`. Since `train_ds` is initiated from
# the 'train' split and `eval_on_fixed_exemplars=True`, always use the fixed
# set of exemplars.
self.assertTrue(
exemplars_always_same(
src.get_dataset(split='validation', shuffle=True, seed=123)))
assert_dataset(
src.get_dataset('validation', shuffle=False), [
{
'eval': 0,
'train': {
'inputs': [0],
'targets': [3],
}
},
{
'eval': 1,
'train': {
'inputs': [0],
'targets': [3],
}
},
])
# `eval_on_fixed_exemplars` is ignored when `split` equals `train_split`.
self.assertFalse(
exemplars_always_same(
src.get_dataset(split='train', shuffle=True, seed=123)))
def test_fewshot_preprocessor(self):
train_examples = [
{
'inputs': 'How many states in the US?',
'targets': '50',
},
{
'inputs': 'How many cents in a dollar?',
'targets': '100',
},
{
'inputs': 'How many cents in a quarter?',
'targets': '25',
}
]
eval_examples = [
{
'inputs': 'Who was in the Beatles?',
'targets': 'John',
'answers': ['John', 'Paul', 'George', 'Ringo']
},
{
'inputs': 'When did the Beatles break up?',
'targets': '1970',
'answers': ['1970', 'April 10, 1970', 'April 10', '4/10/1970'],
}
]
def _from_generator(examples):
return tf.data.Dataset.from_generator(
lambda: (x for x in examples),
output_types={k: tf.string for k in examples[0].keys()},
output_shapes={
k: [None] if isinstance(v, list) else []
for k, v in examples[0].items()
})
train_ds = _from_generator(train_examples).repeat()
eval_ds = _from_generator(eval_examples)
# 0-shot
dataset = experimental.fewshot_preprocessor(
tf.data.Dataset.zip({'eval': eval_ds}),
inputs_prefix='0 ',
targets_prefix=' X 1 ',
example_separator=' X ')
assert_dataset(
dataset,
[
{
'inputs': '0 Who was in the Beatles? X 1',
'targets': 'John',
'answers': ['John', 'Paul', 'George', 'Ringo']
},
{
'inputs': '0 When did the Beatles break up? X 1',
'targets': '1970',
'answers': ['1970', 'April 10, 1970', 'April 10', '4/10/1970'],
}
])
# 2-shot
dataset = experimental.fewshot_preprocessor(
tf.data.Dataset.zip({'train': train_ds.batch(2), 'eval': eval_ds}),
inputs_prefix='0 ',
targets_prefix=' X 1 ',
example_separator=' X ')
assert_dataset(
dataset,
[
{
'inputs':
'0 How many states in the US? X 1 50 X 0 How many cents in '
'a dollar? X 1 100 X 0 Who was in the Beatles? X 1',
'targets': 'John',
'answers': ['John', 'Paul', 'George', 'Ringo']
},
{
'inputs':
'0 How many cents in a quarter? X 1 25 X 0 How many states '
'in the US? X 1 50 X 0 When did the Beatles break up? X 1',
'targets': '1970',
'answers': ['1970', 'April 10, 1970', 'April 10', '4/10/1970'],
}
])
# 1-shot, batched eval
dataset = experimental.fewshot_preprocessor(
tf.data.Dataset.zip(
{'train': train_ds.batch(1), 'eval': eval_ds.batch(2)}
),
inputs_prefix='0 ',
targets_prefix=' X 1 ',
example_separator=' X ')
assert_dataset(
dataset,
[
{
'inputs':
'0 How many states in the US? X 1 50 X 0 Who was in the '
'Beatles? X 1',
'targets': 'John',
'answers': ['John', 'Paul', 'George', 'Ringo']
},
{
'inputs':
'0 How many states in the US? X 1 50 X 0 When did the '
'Beatles break up? X 1',
'targets': '1970',
'answers': ['1970', 'April 10, 1970', 'April 10', '4/10/1970'],
},
])
class SentinelTaskTest(FullyCachedTaskTest):
def validate_sentinel_task(
self, name, sequence_length, expected_dataset):
new_task = TaskRegistry.get(name)
# With sentinels inserted we want +1 processors.
self.assertLen(new_task.preprocessors, 5)
self.assertEqual(new_task.metric_fns, self.metrics_fns)
self.assertIsNotNone(new_task.postprocessor)
assert_dataset(
new_task.get_dataset(sequence_length, shuffle=False),
expected_dataset)
def test_add_sentinel_task(self):
preprocessors = list(self.preprocessors)
TaskRegistry.add(
'encoder_decoder_task',
source=self.fake_source,
preprocessors=preprocessors,
output_features={
'inputs': Feature(self.vocabulary, add_eos=True),
'targets': Feature(self.vocabulary, add_eos=False)
},
metric_fns=self.metrics_fns)
sequence_length = {'inputs': 10, 'targets': 11}
for num_sentinels in [1, 2, 4]:
experimental.add_task_with_sentinels(
'encoder_decoder_task', num_sentinels=num_sentinels)
for sentinel_num in [1, 2, 4]:
sentinel_ids = [
self.vocabulary.vocab_size - (i + 1) for i in range(sentinel_num)]
self.validate_sentinel_task(
f'encoder_decoder_task_{sentinel_num}_sentinel', sequence_length,
[
{
'inputs': [1, 10, 10] + sentinel_ids,
'targets': sentinel_ids + [1, 11]
},
{
'inputs': [2, 10, 10] + sentinel_ids,
'targets': sentinel_ids + [2, 11]
},
])
if __name__ == '__main__':
absltest.main()
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Map a DAGCircuit onto a `coupling_map` adding swap gates."""
from logging import getLogger
from math import inf
from collections import OrderedDict
import numpy as np
from qiskit.circuit.quantumregister import QuantumRegister
from qiskit.transpiler.basepasses import TransformationPass
from qiskit.transpiler.exceptions import TranspilerError
from qiskit.dagcircuit import DAGCircuit
from qiskit.circuit.library.standard_gates import SwapGate
from qiskit.transpiler.layout import Layout
# pylint: disable=no-name-in-module
from .cython.stochastic_swap.utils import nlayout_from_layout
# pylint: disable=no-name-in-module
from .cython.stochastic_swap.swap_trial import swap_trial
logger = getLogger(__name__)
class StochasticSwap(TransformationPass):
"""Map a DAGCircuit onto a `coupling_map` adding swap gates.
Uses a randomized algorithm.
Notes:
1. Measurements may occur and be followed by swaps that result in repeated
measurement of the same qubit. Near-term experiments cannot implement
these circuits, so some care is required when using this mapper
with experimental backend targets.
2. We do not use the fact that the input state is zero to simplify
the circuit.
"""
def __init__(self, coupling_map, trials=20, seed=None):
"""StochasticSwap initializer.
The coupling map is a connected graph
If these are not satisfied, the behavior is undefined.
Args:
coupling_map (CouplingMap): Directed graph representing a coupling
map.
trials (int): maximum number of iterations to attempt
seed (int): seed for random number generator
"""
super().__init__()
self.coupling_map = coupling_map
self.trials = trials
self.seed = seed
self.qregs = None
self.rng = None
self.trivial_layout = None
def run(self, dag):
"""Run the StochasticSwap pass on `dag`.
Args:
dag (DAGCircuit): DAG to map.
Returns:
DAGCircuit: A mapped DAG.
Raises:
TranspilerError: if the coupling map or the layout are not
compatible with the DAG
"""
if len(dag.qregs) != 1 or dag.qregs.get('q', None) is None:
raise TranspilerError('Basic swap runs on physical circuits only')
if len(dag.qubits()) > len(self.coupling_map.physical_qubits):
raise TranspilerError('The layout does not match the amount of qubits in the DAG')
canonical_register = dag.qregs['q']
self.trivial_layout = Layout.generate_trivial_layout(canonical_register)
self.qregs = dag.qregs
if self.seed is None:
self.seed = np.random.randint(0, np.iinfo(np.int32).max)
self.rng = np.random.default_rng(self.seed)
logger.debug("StochasticSwap default_rng seeded with seed=%s", self.seed)
new_dag = self._mapper(dag, self.coupling_map, trials=self.trials)
return new_dag
def _layer_permutation(self, layer_partition, layout, qubit_subset,
coupling, trials):
"""Find a swap circuit that implements a permutation for this layer.
The goal is to swap qubits such that qubits in the same two-qubit gates
are adjacent.
Based on S. Bravyi's algorithm.
Args:
layer_partition (list): The layer_partition is a list of (qu)bit
lists and each qubit is a tuple (qreg, index).
layout (Layout): The layout is a Layout object mapping virtual
qubits in the input circuit to physical qubits in the coupling
graph. It reflects the current positions of the data.
qubit_subset (list): The qubit_subset is the set of qubits in
the coupling graph that we have chosen to map into, as tuples
(Register, index).
coupling (CouplingMap): Directed graph representing a coupling map.
This coupling map should be one that was provided to the
stochastic mapper.
trials (int): Number of attempts the randomized algorithm makes.
Returns:
Tuple: success_flag, best_circuit, best_depth, best_layout
If success_flag is True, then best_circuit contains a DAGCircuit with
the swap circuit, best_depth contains the depth of the swap circuit,
and best_layout contains the new positions of the data qubits after the
swap circuit has been applied.
Raises:
TranspilerError: if anything went wrong.
"""
logger.debug("layer_permutation: layer_partition = %s",
layer_partition)
logger.debug("layer_permutation: layout = %s",
layout.get_virtual_bits())
logger.debug("layer_permutation: qubit_subset = %s",
qubit_subset)
logger.debug("layer_permutation: trials = %s", trials)
# The input dag is on a flat canonical register
# TODO: cleanup the code that is general for multiple qregs below
canonical_register = QuantumRegister(len(layout), 'q')
qregs = OrderedDict({canonical_register.name: canonical_register})
gates = [] # list of lists of tuples [[(register, index), ...], ...]
for gate_args in layer_partition:
if len(gate_args) > 2:
raise TranspilerError("Layer contains > 2-qubit gates")
if len(gate_args) == 2:
gates.append(tuple(gate_args))
logger.debug("layer_permutation: gates = %s", gates)
# Can we already apply the gates? If so, there is no work to do.
dist = sum([coupling.distance(layout[g[0]], layout[g[1]])
for g in gates])
logger.debug("layer_permutation: distance = %s", dist)
if dist == len(gates):
logger.debug("layer_permutation: nothing to do")
circ = DAGCircuit()
circ.add_qreg(canonical_register)
return True, circ, 0, layout
# Begin loop over trials of randomized algorithm
num_qubits = len(layout)
best_depth = inf # initialize best depth
best_edges = None # best edges found
best_circuit = None # initialize best swap circuit
best_layout = None # initialize best final layout
cdist2 = coupling._dist_matrix**2
# Scaling matrix
scale = np.zeros((num_qubits, num_qubits))
int_qubit_subset = _regtuple_to_numeric(qubit_subset, qregs)
int_gates = _gates_to_idx(gates, qregs)
int_layout = nlayout_from_layout(layout, qregs, coupling.size())
trial_circuit = DAGCircuit() # SWAP circuit for slice of swaps in this trial
for qubit in layout.get_virtual_bits().keys():
if qubit.register not in trial_circuit.qregs.values():
trial_circuit.add_qreg(qubit.register)
edges = np.asarray(coupling.get_edges(), dtype=np.int32).ravel()
cdist = coupling._dist_matrix
for trial in range(trials):
logger.debug("layer_permutation: trial %s", trial)
# This is one Trial --------------------------------------
dist, optim_edges, trial_layout, depth_step = swap_trial(num_qubits, int_layout,
int_qubit_subset,
int_gates, cdist2,
cdist, edges, scale,
self.rng)
logger.debug("layer_permutation: final distance for this trial = %s", dist)
if dist == len(gates) and depth_step < best_depth:
logger.debug("layer_permutation: got circuit with improved depth %s",
depth_step)
best_edges = optim_edges
best_layout = trial_layout
best_depth = min(best_depth, depth_step)
# Break out of trial loop if we found a depth 1 circuit
# since we can't improve it further
if best_depth == 1:
break
# If we have no best circuit for this layer, all of the
# trials have failed
if best_layout is None:
logger.debug("layer_permutation: failed!")
return False, None, None, None
edges = best_edges.edges()
for idx in range(best_edges.size//2):
swap_src = self.trivial_layout[edges[2*idx]]
swap_tgt = self.trivial_layout[edges[2*idx+1]]
trial_circuit.apply_operation_back(SwapGate(), [swap_src, swap_tgt], [])
best_circuit = trial_circuit
# Otherwise, we return our result for this layer
logger.debug("layer_permutation: success!")
best_lay = best_layout.to_layout(qregs)
return True, best_circuit, best_depth, best_lay
def _layer_update(self, i, best_layout, best_depth,
best_circuit, layer_list):
"""Provide a DAGCircuit for a new mapped layer.
Args:
i (int): layer number
best_layout (Layout): layout returned from _layer_permutation
best_depth (int): depth returned from _layer_permutation
best_circuit (DAGCircuit): swap circuit returned from _layer_permutation
layer_list (list): list of DAGCircuit objects for each layer,
output of DAGCircuit layers() method
Returns:
DAGCircuit: a DAGCircuit object to append to the output DAGCircuit
that the _mapper method is building.
"""
layout = best_layout
logger.debug("layer_update: layout = %s", layout)
logger.debug("layer_update: self.trivial_layout = %s", self.trivial_layout)
dagcircuit_output = DAGCircuit()
for qubit in layout.get_virtual_bits().keys():
if qubit.register not in dagcircuit_output.qregs.values():
dagcircuit_output.add_qreg(qubit.register)
# Output any swaps
if best_depth > 0:
logger.debug("layer_update: there are swaps in this layer, "
"depth %d", best_depth)
dagcircuit_output.compose(best_circuit)
else:
logger.debug("layer_update: there are no swaps in this layer")
# Output this layer
layer_circuit = layer_list[i]["graph"]
for creg in layer_circuit.cregs.values():
dagcircuit_output.add_creg(creg)
order = layout.reorder_bits(dagcircuit_output.qubits())
dagcircuit_output.compose(layer_circuit, qubits=order)
return dagcircuit_output
def _mapper(self, circuit_graph, coupling_graph, trials=20):
"""Map a DAGCircuit onto a CouplingMap using swap gates.
Use self.trivial_layout for the initial layout.
Args:
circuit_graph (DAGCircuit): input DAG circuit
coupling_graph (CouplingMap): coupling graph to map onto
trials (int): number of trials.
Returns:
DAGCircuit: object containing a circuit equivalent to
circuit_graph that respects couplings in coupling_graph
Raises:
TranspilerError: if there was any error during the mapping
or with the parameters.
"""
# Schedule the input circuit by calling layers()
layerlist = list(circuit_graph.layers())
logger.debug("schedule:")
for i, v in enumerate(layerlist):
logger.debug(" %d: %s", i, v["partition"])
qubit_subset = self.trivial_layout.get_virtual_bits().keys()
# Find swap circuit to precede each layer of input circuit
layout = self.trivial_layout.copy()
# Construct an empty DAGCircuit with the same set of
# qregs and cregs as the input circuit
dagcircuit_output = DAGCircuit()
dagcircuit_output.name = circuit_graph.name
for qreg in circuit_graph.qregs.values():
dagcircuit_output.add_qreg(qreg)
for creg in circuit_graph.cregs.values():
dagcircuit_output.add_creg(creg)
logger.debug("trivial_layout = %s", layout)
# Iterate over layers
for i, layer in enumerate(layerlist):
# Attempt to find a permutation for this layer
success_flag, best_circuit, best_depth, best_layout \
= self._layer_permutation(layer["partition"], layout,
qubit_subset, coupling_graph,
trials)
logger.debug("mapper: layer %d", i)
logger.debug("mapper: success_flag=%s,best_depth=%s",
success_flag, str(best_depth))
# If this fails, try one gate at a time in this layer
if not success_flag:
logger.debug("mapper: failed, layer %d, "
"retrying sequentially", i)
serial_layerlist = list(layer["graph"].serial_layers())
# Go through each gate in the layer
for j, serial_layer in enumerate(serial_layerlist):
success_flag, best_circuit, best_depth, best_layout = \
self._layer_permutation(
serial_layer["partition"],
layout, qubit_subset,
coupling_graph,
trials)
logger.debug("mapper: layer %d, sublayer %d", i, j)
logger.debug("mapper: success_flag=%s,best_depth=%s,",
success_flag, str(best_depth))
# Give up if we fail again
if not success_flag:
raise TranspilerError("swap mapper failed: " +
"layer %d, sublayer %d" % (i, j))
# Update the record of qubit positions
# for each inner iteration
layout = best_layout
# Update the DAG
dagcircuit_output.compose(
self._layer_update(j,
best_layout,
best_depth,
best_circuit,
serial_layerlist))
else:
# Update the record of qubit positions for each iteration
layout = best_layout
# Update the DAG
dagcircuit_output.compose(
self._layer_update(i,
best_layout,
best_depth,
best_circuit,
layerlist))
# This is the final edgemap. We might use it to correctly replace
# any measurements that needed to be removed earlier.
logger.debug("mapper: self.trivial_layout = %s", self.trivial_layout)
logger.debug("mapper: layout = %s", layout)
return dagcircuit_output
def _regtuple_to_numeric(items, qregs):
"""Takes Qubit instances and converts them into an integer array.
Args:
items (list): List of Qubit instances to convert.
qregs (dict): List of Qubit instances.
Returns:
ndarray: Array of integers.
"""
sizes = [qr.size for qr in qregs.values()]
reg_idx = np.cumsum([0]+sizes)
regint = {}
for ind, qreg in enumerate(qregs.values()):
regint[qreg] = ind
out = np.zeros(len(items), dtype=np.int32)
for idx, val in enumerate(items):
out[idx] = reg_idx[regint[val.register]]+val.index
return out
def _gates_to_idx(gates, qregs):
"""Converts gate tuples into a nested list of integers.
Args:
gates (list): List of Qubit instances representing gates.
qregs (dict): List of Qubit instances.
Returns:
list: Nested list of integers for gates.
"""
sizes = [qr.size for qr in qregs.values()]
reg_idx = np.cumsum([0]+sizes)
regint = {}
for ind, qreg in enumerate(qregs.values()):
regint[qreg] = ind
out = np.zeros(2*len(gates), dtype=np.int32)
for idx, gate in enumerate(gates):
out[2*idx] = reg_idx[regint[gate[0].register]]+gate[0].index
out[2*idx+1] = reg_idx[regint[gate[1].register]]+gate[1].index
return out
|
nilq/baby-python
|
python
|
import time
import RPi.GPIO as GPIO
import SerialWombatPigpioI2c
import SerialWombatServo
import SerialWombatAnalogInput
import SerialWombatQuadEnc
GPIO.setwarnings(False)
sw = SerialWombatPigpioI2c.SerialWombatChipPigpioI2c(17,27,0x6D)
sw.begin(False)
print(sw.version)
print(sw.model)
print(sw.fwVersion)
servo = SerialWombatServo.SerialWombatServo(sw)
servo.attach(3)
analog = SerialWombatAnalogInput.SerialWombatAnalogInput(sw)
analog.begin(2)
knob = SerialWombatQuadEnc.SerialWombatQuadEnc(sw)
knob.begin(0,1,10)
print("Pin 2 analog: ",analog.readPublicData())
print("Source Voltage mv: ",sw.readSupplyVoltage_mV())
time.sleep(2)
while(True):
print(knob.read()," ",analog.readCounts())
servo.write16bit(analog.readCounts())
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# newclass.py
from pfp_sdk.PFPUtil import *
class Example(wx.Frame):
def __init__(self, parent, title):
super(Example, self).__init__(parent, title=title, pos=(100,100), size=(800, 230))
self.InitUI()
self.Centre()
self.Show()
def InitUI(self):
#read config file
self.default_modulelistDB_path = ""
self.user = ""
self.contact = ""
panel = wx.Panel(self)
sizer = wx.GridBagSizer(9, 9)
#---Main Text
text1 = wx.StaticText(panel, label=" Category")
sizer.Add(text1, pos=(0, 0), flag=wx.EXPAND, border=50)
#---Logo
#icon = wx.StaticBitmap(panel, bitmap=wx.Bitmap('PFPModule/icons/SelfTest.png'))
#sizer.Add(icon, pos=(0, 3), flag=wx.EXPAND, border=5)
line = wx.StaticLine(panel)
sizer.Add(line, pos=(1, 0), span=(1, 4), flag=wx.EXPAND|wx.BOTTOM, border=5)
#---Category Combo
text2 = wx.StaticText(panel, label=" Category Name")
sizer.Add(text2, pos=(2, 0), flag=wx.EXPAND, border=5)
self.tc2 = wx.TextCtrl(panel)
sizer.Add(self.tc2, pos=(2, 1), span=(1, 3), flag=wx.TOP|wx.EXPAND, border=5)
text3 = wx.StaticText(panel, label=" Description")
sizer.Add(text3, pos=(3, 0), flag=wx.EXPAND, border=5)
self.tc3 = wx.TextCtrl(panel)
sizer.Add(self.tc3, pos=(3, 1), span=(1, 3), flag=wx.TOP|wx.EXPAND, border=5)
#---Last Buttons
self.button91 = wx.Button(panel, label="Apply", size = wx.Size(70,30))
sizer.Add(self.button91, pos=(4, 1), span=(1, 1), flag=wx.ALIGN_RIGHT)
self.button91.Bind(wx.EVT_BUTTON, self.OnButtonOK)
self.button92 = wx.Button(panel, label="Cancel", size = wx.Size(70,30))
sizer.Add(self.button92, pos=(4, 2), span=(1, 1), flag=wx.ALIGN_LEFT)
self.button92.Bind(wx.EVT_BUTTON, self.OnButtonCancel)
sizer.AddGrowableCol(2)
panel.SetSizer(sizer)
def OnButtonOK(self, event):
#Add RowID of Module into ModuleIDs field in the ModuleCategory table
con = sqlite3.connect( self.default_modulelistDB_path )
cursor = con.cursor()
SelectQuery = "select * from ModuleCategory;"
cursor.execute( SelectQuery )
Results = cursor.fetchall()
count = 0
for row in Results:
if row[1].lower() == self.tc2.GetValue().lower():
count += 1
if count > 0:
wx.MessageBox("Category Name is duplicated")
return
InsertQuery = "insert into ModuleCategory values (null, '" + self.tc2.GetValue() + "', '"+ self.tc3.GetValue() +"', '')"
cursor.execute( InsertQuery )
con.commit()
con.close()
self.Close()
return
def OnButtonCancel(self, event):
self.Close()
return
def main():
app = wx.App()
Example(None, title="Module Category Setting")
app.MainLoop()
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
"""A practical configuration system.
"""
from .extension_point import ExtensionPoint # noqa: F401
from .loading import load_from_module, load_from_pkg_resources # noqa: F401
from .option import build_default_config, Option # noqa: F401
from .profile import Profile # noqa: F401
from .utilities import merge # noqa: F401
|
nilq/baby-python
|
python
|
"""Transform metrics stored in SQuaSH into InfluxDB format.
See sqr-009.lsst.io for a description on how metrics are stored in SQuaSH and
the resulting InfluxDB data model.
"""
__all__ = ["Transformer"]
import logging
import math
import pathlib
import urllib.parse
import requests
import yaml
from requests.exceptions import ConnectionError, HTTPError
from squash.tasks.utils.format import Formatter
logger = logging.getLogger("squash")
class Transformer(Formatter):
"""Transform metrics stored in SQuaSH into InfluxDB format.
Parameters
----------
squash_api_url : `str`
SQuaSH API URL.
data : `str`
SQuaSH job data in JSON.
"""
def __init__(self, squash_api_url, data):
super().__init__(squash_api_url=squash_api_url)
self.squash_api_url = squash_api_url
self.data = data
self.mapping = self.load_mapping()
def load_mapping(self):
"""Load the SQuaSH to InfluxDB mapping.
Returns
-------
mapping : `dict`
Dictionary with the SQuaSH to InfluxDB mapping.
"""
filename = pathlib.Path(__file__).parent / "mapping.yaml"
with open(filename) as f:
mapping = yaml.load(f, Loader=yaml.FullLoader)
return mapping
def run_mapping(self, key):
"""Return schema, key, and transformation from the mapping.
Parameters
----------
key : `str`
The key to look for in the mapping.
Returns
-------
schema : `str` or `None`
The InfluxDB schema to write or `None` if it should not
be added to InfluxDB.
mapped_key : `str` or `None`
The mapped key or `None` if it should not be added to InfluxDB.
transformation : `str` or `None`
The transformation that should be applied to the value if any.
"""
# By default, if the key is not found in the mapping, it should be
# added to InfluxDB as a tag and preserving the original name.
schema = "tag"
mapped_key = key
transformation = None
if key in self.mapping:
item = self.mapping[key]
schema = item["schema"]
mapped_key = item["key"]
transformation = item["transformation"]
return schema, mapped_key, transformation
def get_timestamp(self):
"""Get the timestamp to use in InfluxDB.
Use the timestamp when the verification job is recorded. If it runs
in Jenkins uses the pipeline runtime instead.
Returns
-------
timestamp : `int`
Formatted timestamp.
"""
timestamp = Formatter.format_timestamp(self.data["date_created"])
if self.data["meta"]["env"]["env_name"] == "jenkins":
ci_id = self.data["meta"]["env"]["ci_id"]
ci_name = self.data["meta"]["env"]["ci_name"]
# Get timestamp from Jenkins
jenkins_url = (
f"{self.squash_api_url}/jenkins/{ci_id}?ci_name={ci_name}"
)
try:
r = requests.get(jenkins_url)
r.raise_for_status()
except HTTPError:
message = "Could not get timestamp from Jenkins."
logger.error(message)
except ConnectionError:
message = (
f"Failed to establish connection with Jenkins "
f"{jenkins_url}."
)
logger.error(message)
date_created = r.json()["date_created"]
timestamp = Formatter.format_timestamp(date_created)
return timestamp
def update_metadata(self):
"""Add/remove metadata before the trandformation step."""
# Add extra metadata
id = self.data["id"]
self.data["meta"]["id"] = id
self.data["meta"]["url"] = urllib.parse.urljoin(
self.squash_api_url, f"/job/{id}"
)
self.data["meta"]["date_created"] = self.data["date_created"]
self.data["meta"]["env"]["ci_dataset"] = self.data["ci_dataset"]
# Fix dataset_repo_url duplication
if "dataset_repo_url" in self.data["meta"].keys():
del self.data["meta"]["dataset_repo_url"]
# Fix use of ci_dataset key in environments other than jenkins
if self.data["meta"]["env"]["env_name"] != "jenkins":
if "ci_dataset" in self.data["meta"]["env"]:
del self.data["meta"]["env"]["ci_dataset"]
# Add code changes metadata keys
if self.data["meta"]["env"]["env_name"] == "jenkins":
self.data["meta"]["env"]["code_changes"] = ""
self.data["meta"]["env"]["code_changes_counts"] = ""
# Add ci_name until DM-18599 is not implemented
if "ci_url" in self.data["meta"]["env"].keys():
if "validate_drp_gen3" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "validate_drp_gen3"
elif "validate_drp" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "validate_drp"
elif "ap_verify" in self.data["meta"]["env"]["ci_url"]:
self.data["meta"]["env"]["ci_name"] = "ap_verify"
def process_metadata(self, data):
"""Process SQuaSH metadata using a pre-configured mapping to InfluxDB.
Parameters
----------
data : `dict`
A dictionary with SQuaSH metadata.
Return
------
tags : `<list>`
List of tags to be written to InfluxDB.
fields : `<list>`
List of fields to be written to InfluxDB.
"""
tags = []
fields = []
for key, value in data.items():
# process nested dict
if isinstance(value, dict):
tmp_tags, tmp_fields = self.process_metadata(value)
tags.extend(tmp_tags)
fields.extend(tmp_fields)
else:
schema, mapped_key, transformation = self.run_mapping(key)
if transformation:
value = eval(transformation)
if mapped_key and schema == "tag":
tags.append(
"{}={}".format(
Formatter.sanitize(mapped_key),
Formatter.sanitize(value),
)
)
elif mapped_key and schema == "field":
if isinstance(value, str):
fields.append(
'{}="{}"'.format(
Formatter.sanitize(mapped_key), value
)
)
else:
fields.append(
"{}={}".format(
Formatter.sanitize(mapped_key), value
)
)
# Make sure tags and fields are unique
tags = list(set(tags))
fields = list(set(fields))
return tags, fields
def get_meas_by_package(self):
"""Group verify measurements by package.
By grouping verify measurements by package we can send them to InfluxDB
in batch. A package is mapped to an InfluxDB measurement.
"""
meas_by_package = {}
for meas in self.data["measurements"]:
# DM-18360 - SQuaSH API/measurements should return the verification
# package
# a metric fqn is <package>.<metric>, extract package name from the
# metric fqn
package = None
if "." in meas["metric"]:
package = meas["metric"].split(".")[0]
if package:
# No need to carry the package name prefix in the metric name.
if meas["metric"].startswith(package):
metric = meas["metric"][len(package) + 1 :]
value = meas["value"]
# InfluxDB does not store NaNs and it is safe to just skip
# values that are NaN.
# https://github.com/influxdata/influxdb/issues/4089
if not math.isnan(value):
if package not in meas_by_package:
meas_by_package[package] = []
meas_by_package[package].append(f"{metric}={value}")
return meas_by_package
def to_influxdb_line(self):
"""Process job data and make the InfluxDB lines.
Returns
-------
influxdb_lines : `list`
A list with strings representing each InfluxDB line.
"""
timestamp = self.get_timestamp()
self.update_metadata()
tags, extra_fields = self.process_metadata(self.data["meta"])
meas_by_package = self.get_meas_by_package()
influxdb_lines = []
for meas in meas_by_package:
fields = meas_by_package[meas] + extra_fields
influxdb_lines.append(
Formatter.format_influxdb_line(meas, tags, fields, timestamp)
)
return influxdb_lines
|
nilq/baby-python
|
python
|
from globibot.lib.web.handlers import SessionHandler
from globibot.lib.web.decorators import authenticated, respond_json
from http import HTTPStatus
server_data = lambda server: dict(
id = server.id,
name = server.name,
icon_url = server.icon_url,
)
class GuildHandler(SessionHandler):
@authenticated
@respond_json
def get(self, server_id):
server = self.bot.find_server(server_id)
if server:
return server_data(server)
else:
self.set_status(HTTPStatus.BAD_REQUEST)
|
nilq/baby-python
|
python
|
import json
import random
import glob
import torch
import numpy as np
import clip.clip as clip
import pickle
from collections import Counter, defaultdict
from tqdm import tqdm
from torch.utils.data import DataLoader
import sys
from vqa.vqa_dataset import VQADataset
SOFT_PROMPT = True
ITER_TO_BREAK = 999
def eval_init():
global model, preprocess, device
torch.manual_seed(42)
np.random.seed(42)
random.seed(42)
print(clip.available_models())
device = "cuda" if torch.cuda.is_available() else "cpu"
print(f'Using device: {device}')
model, preprocess = clip.load("RN50", device=device, download_root='/home/work/checkpoints/CLIP')
def clip_infer(image, text):
with torch.no_grad():
image_features = model.encode_image(image)
b, k, n = text.size()
text = text.view(b*k, n)
text_features = model.encode_text(text, soft_prompting=SOFT_PROMPT)
text_features = text_features.view(b, k, -1)
# normalized features
image_features = image_features / image_features.norm(dim=-1, keepdim=True)
text_features = text_features / text_features.norm(dim=-1, keepdim=True)
# cosine similarity as logits
logit_scale = model.logit_scale.exp()
logits_per_image = logit_scale * torch.bmm(image_features.unsqueeze(1), text_features.permute(0,2,1)).squeeze(1)
probs = logits_per_image.softmax(dim=-1).cpu()
return probs
def main():
eval_init()
TP = 0
upper_bound_accuracy = 0
n_samples = 0
if sys.gettrace() is not None:
N_WORKERS = 0
else:
N_WORKERS = 4
dataset = VQADataset('/home/work/Datasets/vqa2', preprocess, clip.tokenize, 'val')
loader = DataLoader(dataset, 256, shuffle=False, num_workers=N_WORKERS)
for i, (text, image, label) in enumerate(tqdm(loader)):
image = image.to(device)
text = text.to(device)
upper_bound_accuracy += label.max(dim=1).values.sum().item()
probs = clip_infer(image, text)
pred_answer = torch.argmax(probs, dim=1)
TP += label[torch.arange(256), pred_answer].sum().item()
n_samples += image.size(0)
if i == ITER_TO_BREAK:
break
print(f'TP: {TP}, Accuracy: {TP/n_samples}, Upper bound: {upper_bound_accuracy / n_samples}')
main()
|
nilq/baby-python
|
python
|
"""
Created on Jan 27, 2016
@author: tmahrt
Tests that praat files can be read in and then written out, and that the two
resulting files are the same.
This does not test that the file reader is correct. If the file
reader is bad (e.g. truncates floating points to 1 decimal place), the
resulting data structures will look the same for both the source and
generated files.
"""
import unittest
import os
import io
from os.path import join
from praatio import tgio
from praatio import dataio
from praatio import kgio
from praatio import audioio
def areTheSame(fn1, fn2, fileHandler):
"""
Tests that files contain the same data
If fileHandler is tgio file reader like tgio.openTextgrid then
we can compare a shortTextgrid and a longTextgrid.
If fileHandler is readFile or io.open, etc then the raw
text will be compared.
"""
data1 = fileHandler(fn1)
data2 = fileHandler(fn2)
return data1 == data2
def readFile(fn):
data = ""
with io.open(fn, "r") as fd:
return fd.read()
def run_save(
tg,
minimumIntervalLength=None,
minTimestamp=None,
maxTimestamp=None,
ignoreBlankSpaces=False,
):
"""
Mock write function and return the first tier's entry list
tg.save() mutates the textgrid's data, so the entry list
before and after saving can be different
"""
tg.save(
"garbage.Textgrid",
minimumIntervalLength=minimumIntervalLength,
minTimestamp=minTimestamp,
maxTimestamp=maxTimestamp,
ignoreBlankSpaces=ignoreBlankSpaces,
)
entryList = tg.tierDict[tg.tierNameList[0]].entryList
entryList = [[start, end, label] for start, end, label in entryList]
return entryList
class IOTests(unittest.TestCase):
"""Testing input and output"""
def __init__(self, *args, **kargs):
super(IOTests, self).__init__(*args, **kargs)
cwd = os.path.dirname(os.path.realpath(__file__))
root = os.path.split(cwd)[0]
self.dataRoot = join(root, "files")
self.outputRoot = join(self.dataRoot, "io_test_output")
def setUp(self):
if not os.path.exists(self.outputRoot):
os.mkdir(self.outputRoot)
def test_reading_textgrids_with_newlines_in_labels(self):
"""Tests for reading/writing textgrids with newlines"""
fn = "bobby_words_with_newlines.TextGrid"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
tg = tgio.openTextgrid(inputFN)
tg.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, readFile))
def test_reading_long_textgrids_with_newlines_in_labels(self):
"""Tests for reading/writing textgrids with newlines"""
fn = "bobby_words_with_newlines_longfile.TextGrid"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
tg = tgio.openTextgrid(inputFN)
tg.save(outputFN, useShortForm=False)
self.assertTrue(areTheSame(inputFN, outputFN, readFile))
fn = "bobby_words_with_newlines_longfile_elan.TextGrid"
elanInputFN = join(self.dataRoot, fn)
elanOutputFN = join(self.outputRoot, fn)
tg = tgio.openTextgrid(elanInputFN)
tg.save(elanOutputFN, useShortForm=False)
self.assertTrue(areTheSame(inputFN, elanOutputFN, readFile))
def test_tg_io(self):
"""Tests for reading/writing textgrid io"""
fn = "textgrid_to_merge.TextGrid"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
tg = tgio.openTextgrid(inputFN)
tg.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, readFile))
def test_tg_io_long_vs_short(self):
"""Tests reading of long vs short textgrids"""
shortFN = join(self.dataRoot, "textgrid_to_merge.TextGrid")
longFN = join(self.dataRoot, "textgrid_to_merge_longfile.TextGrid")
self.assertTrue(areTheSame(shortFN, longFN, tgio.openTextgrid))
def test_saving_short_textgrid(self):
"""Tests that short textgrid files are saved non-destructively"""
fn = "textgrid_to_merge.TextGrid"
shortFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, "saved_short_file.textgrid")
tg = tgio.openTextgrid(shortFN)
tg.save(outputFN)
self.assertTrue(areTheSame(shortFN, outputFN, readFile))
def test_saving_long_textgrid(self):
"""Tests that long textgrid files are saved non-destructively"""
fn = "textgrid_to_merge_longfile.TextGrid"
longFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, "saved_long_file.textgrid")
tg = tgio.openTextgrid(longFN)
tg.save(outputFN, useShortForm=False)
self.assertTrue(areTheSame(longFN, outputFN, readFile))
def test_saving_and_loading_json(self):
"""Tests that json files are saved non-destructively"""
fn = "mary.TextGrid"
shortFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, "saved_textgrid_as_json.json")
outputLastFN = join(
self.outputRoot, "saved_textgrid_as_json_then_textgrid.TextGrid"
)
tgFromTgFile = tgio.openTextgrid(shortFN)
tgFromTgFile.save(outputFN, outputFormat=tgio.JSON)
tgFromJsonFile = tgio.openTextgrid(outputFN, readAsJson=True)
tgFromJsonFile.save(outputLastFN)
self.assertTrue(areTheSame(shortFN, outputLastFN, readFile))
def test_get_audio_duration(self):
"""Tests that the two audio duration methods output the same value."""
wavFN = join(self.dataRoot, "bobby.wav")
durationA = tgio._getWavDuration(wavFN)
durationB = audioio.getDuration(wavFN)
self.assertTrue(durationA == durationB)
def test_duration_tier_io(self):
"""Tests for reading/writing duration tiers"""
fn = "mary.DurationTier"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
dt = dataio.open2DPointObject(inputFN)
dt.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, dataio.open2DPointObject))
def test_pitch_io(self):
"""Tests for reading/writing pitch tiers"""
fn = "mary.PitchTier"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
pp = dataio.open2DPointObject(inputFN)
pp.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, dataio.open2DPointObject))
def test_pitch_io_long_vs_short(self):
"""Tests reading of long vs short 2d point objects"""
shortFN = join(self.dataRoot, "mary.PitchTier")
longFN = join(self.dataRoot, "mary_longfile.PitchTier")
self.assertTrue(areTheSame(shortFN, longFN, dataio.open2DPointObject))
def test_point_process_io(self):
"""Tests for reading/writing point processes"""
fn = "bobby.PointProcess"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
pp = dataio.open1DPointObject(inputFN)
pp.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, dataio.open1DPointObject))
def test_point_process_io_long_vs_short(self):
shortFN = join(self.dataRoot, "bobby.PointProcess")
longFN = join(self.dataRoot, "bobby_longfile.PointProcess")
self.assertTrue(areTheSame(shortFN, longFN, dataio.open1DPointObject))
def test_kg_io(self):
"""Tests for reading/writing klattgrids"""
fn = "bobby.KlattGrid"
inputFN = join(self.dataRoot, fn)
outputFN = join(self.outputRoot, fn)
kg = kgio.openKlattgrid(inputFN)
kg.save(outputFN)
self.assertTrue(areTheSame(inputFN, outputFN, kgio.openKlattgrid))
def test_save(self):
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0.0, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg)
self.assertEqual(expectedEntryList, actualEntryList)
def test_save_with_minimum_time_stamp(self):
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0.3, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg)
self.assertEqual(expectedEntryList, actualEntryList)
def test_save_with_force_zero_as_minimum_time(self):
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg, minTimestamp=0)
self.assertEqual(expectedEntryList, actualEntryList)
def test_save_with_force_larger_value_as_maximum_time(self):
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0.3, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 3.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg, maxTimestamp=3.0)
self.assertEqual(expectedEntryList, actualEntryList)
def test_save_with_force_too_large_minimum_time(self):
# If you choose to force save to use a minTimestamp, all
# of your entries must be higher than that minTimestamp
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
self.assertRaises(AssertionError, run_save, tg, minTimestamp=1.0)
def test_save_with_force_too_large_minimum_time(self):
# If you choose to force save to use a minTimestamp, all
# of your entries must be higher than that minTimestamp
userEntryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0, 0.4, ""],
[0.4, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
self.assertRaises(AssertionError, run_save, tg, maxTimestamp=1.0)
def test_save_with_minimum_interval_length(self):
# The first entry will be stretched to fill the unlabeled region in
# front of it: [0.30, 0.35, ''] (The unlabeled region starts at 0.3
# instead of 0 because the minTimestamp for this tg is 0.3)
userEntryList = [[0.35, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = [
[0.3, 0.6, "A"],
[0.6, 0.8, ""],
[0.8, 1.0, "E"],
[1.0, 1.2, ""],
[1.2, 1.3, "I"],
[1.3, 2.0, ""],
]
tier = tgio.IntervalTier("test", userEntryList, 0.3, 2.0)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg, minimumIntervalLength=0.06)
self.assertEqual(expectedEntryList, actualEntryList)
def test_save_with_ignore_blank_sections(self):
"""
Tests that blank sections can be ignored on saving a textgrid
"""
entryList = [[0.4, 0.6, "A"], [0.8, 1.0, "E"], [1.2, 1.3, "I"]]
expectedEntryList = entryList # Blank intervals should not be inserted
tier = tgio.IntervalTier("test", entryList)
tg = tgio.Textgrid()
tg.addTier(tier)
actualEntryList = run_save(tg, ignoreBlankSpaces=True)
self.assertEqual(expectedEntryList, actualEntryList)
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
from pyfluminus.authorization import vafs_jwt
from pyfluminus.api import name, modules, get_announcements
from pyfluminus.structs import Module
from flask import Flask, request, jsonify, redirect, url_for, render_template
import sys
from app import app, db, util
from app.models import User, User_Mods, Announcements, Mod_files
from app.extra_api import get_class_grps
import json
from sqlalchemy.orm.attributes import flag_modified
HTTP_OK = 200
HTTP_NO_CONTENT = 204
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORISED = 401
HTTP_NOT_FOUND = 404
@app.route('/')
def index():
return 'Main page'
# a simple page that says hello
@app.route('/hello')
def hello():
return 'Hello, World!'
@app.route('/get_class_grps', methods=['POST'])
def f():
mod_id = request.get_json()['mod_id']
auth = request.get_json()['auth']
return get_class_grps(auth, mod_id)
# receives login info and returns auth token, login info must be sent as application/json
@app.route('/login', methods=['POST'])
def login():
login_info = request.get_json()
print(login_info['userName']+'\n')
if login_info['userName'] == 'test':
auth = {'jwt' : 'test'}
return util.response_json(True, 1, auth), HTTP_OK
auth = vafs_jwt("nusstu\\" + login_info['userName'].upper(), login_info['password'])
if "error" in auth:
return util.response_json(False, 1, auth), HTTP_UNAUTHORISED
user_id = login_info['userName'].upper()
if User.query.filter_by(nus_net_id=user_id).first() == None:
uName = name(auth).data
u = User(name = uName, nus_net_id = user_id)
#mods = util.get_active_mods(auth)
db.session.add(u)
db.session.commit()
uId = User.query.filter_by(nus_net_id=user_id).first().id
util.add_mods(auth, uId)
u = User.query.get(uId)
u.get_busy_time()
flag_modified(u, "timetable")
db.session.commit()
return util.response_json(True, 1, auth), HTTP_OK
@app.route('/name', methods=['POST'])
def userName():
try:
auth = request.get_json()
return util.response_json(True, 1, name(auth).data), HTTP_OK
except:
return util.response_json(False, 1, {"error" : "Invalid"}), HTTP_NOT_FOUND
@app.route('/updateProfile', methods=['POST'])
def updateProfile():
login_info = request.get_json()
auth = vafs_jwt("nusstu\\" + login_info['userName'], login_info['password'])
user_id = login_info['userName'].upper()
if "error" in auth:
return util.response_json(False, 1, auth), HTTP_UNAUTHORISED
if User.query.filter_by(nus_net_id=user_id).first() != None:
uName = name(auth).data
db.update(User).where(User.nus_net_id==user_id).values(name=uName)
db.session.commit()
else:
uName = name(auth).data
u = User(name = uName, nus_net_id = user_id)
db.session.add(u)
db.session.commit()
if User.query.filter_by(nus_net_id=user_id).first().mods == []:
uId = User.query.filter_by(nus_net_id=user_id).first().id
util.add_mods(auth, uId)
else:
uId = User.query.filter_by(nus_net_id=user_id).first().id
util.update_mods(auth, uId)
u = User.query.filter_by(nus_net_id=user_id).first()
u.get_busy_time()
flag_modified(u, "timetable")
db.session.commit()
return redirect(url_for('profile', nusNetId=user_id))
@app.route('/activeModules', methods=['POST'])
def active_mods():
try:
auth = request.get_json()
mods = util.get_active_mods(auth)
return util.response_json(True, len(mods), mods), HTTP_OK
except:
return util.response_json(False, 1, {"error" : "Invalid"}), HTTP_NOT_FOUND
@app.route('/announcementsAll', methods = ['POST'])
def announcements():
try:
auth = request.get_json()
msgs = util.get_all_announcement(auth)
return util.response_json(True, len(msgs), msgs), HTTP_OK
except:
return util.response_json(False, 1, {"error" : "Invalid"}), HTTP_NOT_FOUND
@app.route('/profile/<nusNetId>')
def profile(nusNetId):
try:
user = User.query.filter_by(nus_net_id=nusNetId).first()
uId = user.id
mods = User_Mods.query.filter_by(student=uId).all()
mod_info = {}
for mod in mods:
mod_info[mod.code] = {"id" : mod.mod_id,
"name" : mod.name,
"term" : mod.term}
return util.response_json(True, len(mods), {
"name" : user.name,
"mods" : mod_info,
"timetable" : user.timetable}), HTTP_OK
except:
return util.response_json(False, 1, {"error" : "Not found"}), HTTP_NOT_FOUND
@app.route('/modules/filesAll', methods=['POST'])
def files_all():
auth = request.get_json()
files = util.get_mod_files(auth)
return util.response_json(True, len(files), files), HTTP_OK
@app.route('/modules/files', methods=['POST'])
def files():
auth = request.get_json()['auth']
code = request.get_json()['code']
files = json.dumps(util.get_single_mod_files(auth, code))
f = Mod_files(code=code, contents=files)
db.session.add(f)
db.session.commit()
return util.response_json(True, len(files), files), HTTP_OK
@app.route('/modules/announcements', methods=['POST'])
def announcements_single():
auth = request.get_json()['auth']
code = request.get_json()['code']
mod_id = User_Mods.query.filter_by(code=code).first().mod_id
msgs = util.get_single_mod_announcements(auth, mod_id)
m = Announcements(code=code, contents=msgs)
db.session.add(m)
db.session.commit()
return util.response_json(True, len(msgs), msgs), HTTP_OK
@app.route('/modules/announcementsTest', methods=['POST'])
def aTest():
#code = request.get_json()['code']
#reply = Announcements.query.filter_by(code=code).first().contents
#return util.response_json(True, len(reply), reply), HTTP_OK
auth = request.get_json()['auth']
code = request.get_json()['code']
mod_id = User_Mods.query.filter_by(code=code).first().mod_id
msgs = util.get_single_mod_announcements(auth, mod_id)
m = Announcements(code=code, contents=msgs)
db.session.add(m)
db.session.commit()
return util.response_json(True, len(msgs), msgs), HTTP_OK
@app.route('/modules/modFileTest', methods=['POST'])
def fTest():
#code = request.get_json()['code']
#reply = Mod_files.query.filter_by(code=code).first().contents
#return util.response_json(True, len(reply), reply), HTTP_OK
auth = request.get_json()['auth']
code = request.get_json()['code']
files = json.dumps(util.get_single_mod_files(auth, code))
f = Mod_files(code=code, contents=files)
db.session.add(f)
db.session.commit()
return util.response_json(True, len(files), files), HTTP_OK
|
nilq/baby-python
|
python
|
"""
Copyright (c) 2018-2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import pickle as pkl
import json
from .format_converter import DirectoryBasedAnnotationConverter, ConverterReturn
from ..representation import CharacterRecognitionAnnotation
from ..utils import read_txt, check_file_existence
from ..config import PathField
def read_vocab(vocab_path):
"""Reads vocab file from disk as .pkl or .json
Args:
vocab_path (str): path to vocab file
Raises:
ValueError: If wrong extension of the file
Returns:
Vocab: Vocab object with sign2id and id2sign dictinaries
"""
if vocab_path.suffix == '.pkl':
with open(vocab_path, "rb") as f:
vocab_dict = pkl.load(f)
elif vocab_path.suffix == '.json':
with open(vocab_path, "r") as f:
vocab_dict = json.load(f)
id2sign = {int(k): v for k, v in vocab_dict['id2sign'].items()}
vocab_dict['id2sign'] = id2sign
else:
raise ValueError("Wrong extension of the vocab file")
return vocab_dict["id2sign"]
class Im2latexDatasetConverter(DirectoryBasedAnnotationConverter):
__provider__ = 'im2latex_formula_recognition'
annotation_types = (CharacterRecognitionAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update(
{
'images_dir': PathField(
is_directory=True, optional=False,
description='path to input images'
),
'formula_file': PathField(
optional=True,
description='path to file containing one formula per line'
),
'split_file': PathField(
optional=True,
description='path to split containing image_name\\tformula_idx'
),
'vocab_file': PathField(
optional=True,
description='path to vocabulary'
),
}
)
return configuration_parameters
def configure(self):
super().configure()
self.images_dir = self.get_value_from_config('images_dir')
self.formula_path = self.get_value_from_config('formula_file')
self.split_path = self.get_value_from_config('split_file')
self.vocab_path = self.get_value_from_config('vocab_file')
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
"""Reads data from disk and returns dataset in converted for AC format
Args:
check_content (bool, optional): Check if content is valid. Defaults to False.
progress_callback (bool, optional): Display progress. Defaults to None.
progress_interval (int, optional): Units to display progress. Defaults to 100 (percent).
Returns:
[type]: Converted dataset
"""
annotations = []
content_errors = None if not check_content else []
split_file = read_txt(self.split_path)
formulas_file = read_txt(self.formula_path)
num_iterations = len(split_file)
vocab = read_vocab(self.vocab_path)
for line_id, line in enumerate(split_file):
img_name, formula_idx = line.split('\t')
gt_formula = formulas_file[int(formula_idx)]
annotations.append(CharacterRecognitionAnnotation(img_name, gt_formula))
if check_content:
if not check_file_existence(self.images_dir / img_name):
content_errors.append('{}: does not exist'.format(img_name))
if progress_callback is not None and line_id % progress_interval == 0:
progress_callback(line_id / num_iterations * 100)
meta = {'vocab': vocab}
return ConverterReturn(annotations, meta, content_errors)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
runserver.py
~~~~~~~~~~~~~
This code launches the backend webserver of moves using flask with eventlet
(for concurrency) and socket.io.
"""
from moves import app,socketio,r
app.run(debug=True)
socketio.run(app,host='0.0.0.0',port=PORT, debug=DEBUG)
|
nilq/baby-python
|
python
|
import torch
from models.MaskRCNN import get_model_instance_segmentation
from dataset import PennFudanDataset, get_transform
from references.engine import train_one_epoch, evaluate
from references import utils
# train on the GPU or the CPU, if a GPU is not available
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
# our dataset has two classes only - background and person
num_classes = 2
# use out dataset an defined transformations
dataset = PennFudanDataset('PennFudanPed', get_transform(train=True))
dataset_test = PennFudanDataset('PennFudanPed', get_transform(train=False))
# split the dataset in train and test set
indices = torch.randperm(len(dataset)).tolist()
dataset = torch.utils.data.Subset(dataset, indices[:-50])
dataset_test = torch.utils.data.Subset(dataset_test, indices[-50:])
# define training and validation data loaders
data_loader = torch.utils.data.DataLoader(
dataset, batch_size=2, shuffle=True, num_workers=4,
collate_fn=utils.collate_fn)
data_loader_test = torch.utils.data.DataLoader(
dataset_test, batch_size=1, shuffle=False, num_workers=4,
collate_fn=utils.collate_fn)
# get the model using our helper function
model = get_model_instance_segmentation(num_classes)
# mode model to the right device
model.to(device)
# construct an optimizer
params = [p for p in model.parameters() if p.requires_grad]
optimizer = torch.optim.SGD(params, lr=0.005,
momentum=0.9, weight_decay=0.0005)
# and a learning rete scheduler
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer,
step_size=3,
gamma=0.1)
# let's train it for 10 epochs
num_epoch = 10
for epoch in range(num_epoch):
# train for one epoch, printing every 10 iterations
train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq=10)
# update the learning rate
lr_scheduler.step()
# evaluate on the test dataset
evaluate(model, data_loader_test, device=device)
# Saving Model for Inference
torch.save(model.state_dict(), "dict.pth")
print("That's it!")
|
nilq/baby-python
|
python
|
"""empty message
Revision ID: 8c7f8fa92c20
Revises: c925e4d07621
Create Date: 2018-08-17 13:09:27.720622
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '8c7f8fa92c20'
down_revision = 'c925e4d07621'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('registrants', sa.Column('ab_completed_at', sa.DateTime(), nullable=True))
op.add_column('registrants', sa.Column('ab_permanent', sa.Boolean(), nullable=True))
op.add_column('registrants', sa.Column('vr_completed_at', sa.DateTime(), nullable=True))
op.drop_column('registrants', 'completed_at')
op.drop_column('registrants', 'last_completed_step')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('registrants', sa.Column('last_completed_step', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('registrants', sa.Column('completed_at', postgresql.TIMESTAMP(), autoincrement=False, nullable=True))
op.drop_column('registrants', 'vr_completed_at')
op.drop_column('registrants', 'ab_permanent')
op.drop_column('registrants', 'ab_completed_at')
# ### end Alembic commands ###
|
nilq/baby-python
|
python
|
import toml
t = toml.load("Cargo.toml")
crate_version = t['package']['version']
t = toml.load("pyproject.toml")
wheel_version = t['tool']['poetry']['version']
assert crate_version == wheel_version
print(crate_version)
|
nilq/baby-python
|
python
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from typing import Dict, Optional, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch import Tensor
from torch.nn import Parameter
from fairseq import utils
from fairseq.modules.multihead_attention import MultiheadAttention
class RelativePositionEmbeddings(nn.Module):
"""
learned relative position embedding for self-attention with relative position of shaw et al
"""
def __init__(self, max_rel_positions, embedding_dim, dropout=0.0, direction=True, **params):
super().__init__()
self.window_size = max_rel_positions
self.embedding_dim = embedding_dim
self.direction = direction
num_embeddings = max_rel_positions * 2 + 1 if self.direction else max_rel_positions + 1
self.embeddings = nn.Embedding(num_embeddings=num_embeddings, embedding_dim=embedding_dim)
self.dropout = nn.Dropout(dropout)
def map_to_index(self, distance, shift_to_zero=True):
max_rel_len = self.window_size
if max_rel_len is None:
distance = distance
else:
distance = distance.clamp(-max_rel_len, max_rel_len)
if self.direction:
if shift_to_zero and max_rel_len is not None:
distance = distance + max_rel_len
else:
distance = distance
else:
distance = distance.abs()
return distance
def forward(self, inputs):
"""
:param inputs: length, length, num_embeddings or length
:return:
"""
if inputs.dim() > 2:
embed = inputs @ self.embeddings.weight
embed = self.dropout(embed)
return embed
elif inputs.dim() == 2:
distance = inputs
else:
inputs = inputs.squeeze()
distance = inputs[:, None] - inputs[None, :]
distance = self.map_to_index(distance)
embed = self.embeddings(distance)
embed = self.dropout(embed)
return embed
def matmul(x, y):
if x.dim() == y.dim():
return x @ y
if x.dim() == y.dim() - 1:
return (x.unsqueeze(-2) @ y).squeeze(-2)
return (x @ y.unsqueeze(-1)).squeeze(-1)
def shaw_attention(query, key, pos_key):
"""
:param query:
:param key:
:param pos_key: length, length, depth
:return:
"""
bsize, heads, length, depth = key.size()
q_dot_k = matmul(query, key.contiguous().transpose(-1, -2)) # batch, heads, length, length
query_for_pos = query.contiguous().permute(2, 0, 1, 3).view(length, bsize * heads, depth)
pos_for_att = pos_key.contiguous().transpose(-2, -1) # length, depth, length
q_dot_p = matmul(query_for_pos, pos_for_att) # length, batch*heads, length
q_dot_p = q_dot_p.contiguous().permute(1, 0, 2).view(bsize, heads, length, length)
return q_dot_k + q_dot_p
def shaw_combine(probs, value, pos_val):
"""
:param probs:
:param value:
:param pos_val: length, length, depth
:return:
"""
bsize, heads, length, depth = value.size()
w_dot_v = matmul(probs, value) # batch, head, length, depth
w_for_comb = probs.contiguous().permute(2, 0, 1, 3).view(length, bsize * heads, length)
w_dot_p = matmul(w_for_comb, pos_val) # length,batch*heads, depth
w_dot_p = w_dot_p.contiguous().permute(1, 0, 2).view(bsize, heads, length, depth)
return w_dot_v + w_dot_p
class RelativeSelfAttention(MultiheadAttention):
"""Multi-headed attention with relative attentions.
See "Self Attention with relative positions" for more details.
"""
@classmethod
def relative_attention(cls, query, key, pos_key):
if pos_key.dim() == 3:
return shaw_attention(query, key, pos_key)
@classmethod
def relative_combine(cls, probs, value, pos_val):
if pos_val.dim() == 3:
return shaw_combine(probs, value, pos_val)
def forward(
self,
query,
key: Optional[Tensor],
value: Optional[Tensor],
pos_key=None,
pos_val=None,
key_padding_mask: Optional[Tensor] = None,
incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None,
need_weights: bool = True,
static_kv: bool = False,
attn_mask: Optional[Tensor] = None,
before_softmax: bool = False,
need_head_weights: bool = False,
**kwargs
) -> Tuple[Tensor, Optional[Tensor]]:
if need_head_weights:
need_weights = True
tgt_len, bsz, embed_dim = query.size()
assert embed_dim == self.embed_dim
assert list(query.size()) == [tgt_len, bsz, embed_dim]
if incremental_state is not None:
saved_state = self._get_input_buffer(incremental_state)
if saved_state is not None and "prev_key" in saved_state:
# previous time steps are cached - no need to recompute
# key and value if they are static
if static_kv:
assert self.encoder_decoder_attention and not self.self_attention
key = value = None
else:
saved_state = None
# self-attention
if self.self_attention:
q = self.q_proj(query)
k = self.k_proj(query)
v = self.v_proj(query)
else:
assert key is not None and value is not None
q = self.q_proj(query)
k = self.k_proj(key)
v = self.v_proj(value)
q *= self.scaling
if self.bias_k is not None:
assert self.bias_v is not None
k = torch.cat([k, self.bias_k.repeat(1, bsz, 1)])
v = torch.cat([v, self.bias_v.repeat(1, bsz, 1)])
if attn_mask is not None:
attn_mask = torch.cat([attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1)
if key_padding_mask is not None:
key_padding_mask = torch.cat(
[
key_padding_mask, key_padding_mask.new_zeros(key_padding_mask.size(0), 1),
],
dim=1,
)
q = q.contiguous().view(tgt_len, bsz * self.num_heads, self.head_dim).transpose(0, 1)
if k is not None:
k = (k.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1))
if v is not None:
v = v.contiguous().view(-1, bsz * self.num_heads, self.head_dim).transpose(0, 1)
if saved_state is not None:
# saved states are stored with shape (bsz, num_heads, seq_len, head_dim)
if "prev_key" in saved_state:
_prev_key = saved_state["prev_key"]
assert _prev_key is not None
prev_key = _prev_key.view(bsz * self.num_heads, -1, self.head_dim)
if static_kv:
k = prev_key
else:
assert k is not None
k = torch.cat([prev_key, k], dim=1)
if "prev_value" in saved_state:
_prev_value = saved_state["prev_value"]
assert _prev_value is not None
prev_value = _prev_value.view(bsz * self.num_heads, -1, self.head_dim)
if static_kv:
v = prev_value
else:
assert v is not None
v = torch.cat([prev_value, v], dim=1)
prev_key_padding_mask: Optional[Tensor] = None
if "prev_key_padding_mask" in saved_state:
prev_key_padding_mask = saved_state["prev_key_padding_mask"]
assert k is not None and v is not None
key_padding_mask = MultiheadAttention._append_prev_key_padding_mask(
key_padding_mask=key_padding_mask,
prev_key_padding_mask=prev_key_padding_mask,
batch_size=bsz,
src_len=k.size(1),
static_kv=static_kv,
)
saved_state["prev_key"] = k.view(bsz, self.num_heads, -1, self.head_dim)
saved_state["prev_value"] = v.view(bsz, self.num_heads, -1, self.head_dim)
saved_state["prev_key_padding_mask"] = key_padding_mask
# In this branch incremental_state is never None
assert incremental_state is not None
incremental_state = self._set_input_buffer(incremental_state, saved_state)
assert k is not None
src_len = k.size(1)
if key_padding_mask is not None and key_padding_mask.dim() == 0:
key_padding_mask = None
if key_padding_mask is not None:
assert key_padding_mask.size(0) == bsz
assert key_padding_mask.size(1) == src_len
if self.add_zero_attn:
assert v is not None
src_len += 1
k = torch.cat([k, k.new_zeros((k.size(0), 1) + k.size()[2:])], dim=1)
v = torch.cat([v, v.new_zeros((v.size(0), 1) + v.size()[2:])], dim=1)
if attn_mask is not None:
attn_mask = torch.cat(
[attn_mask, attn_mask.new_zeros(attn_mask.size(0), 1)], dim=1
)
if key_padding_mask is not None:
key_padding_mask = torch.cat(
[
key_padding_mask,
torch.zeros(key_padding_mask.size(0), 1).type_as(
key_padding_mask
),
],
dim=1,
)
attn_weights = self.relative_attention(
q.contiguous().view(bsz, self.num_heads, -1, self.head_dim),
k.contiguous().view(bsz, self.num_heads, -1, self.head_dim),
pos_key,
).contiguous().view(bsz * self.num_heads, tgt_len, src_len)
if attn_mask is not None:
attn_mask = attn_mask.unsqueeze(0)
if self.onnx_trace:
attn_mask = attn_mask.repeat(attn_weights.size(0), 1, 1)
attn_weights += attn_mask
if key_padding_mask is not None:
# don't attend to padding symbols
attn_weights = attn_weights.view(bsz, self.num_heads, tgt_len, src_len)
if not self.tpu:
attn_weights = attn_weights.masked_fill(
key_padding_mask.unsqueeze(1).unsqueeze(2).to(torch.bool),
float("-inf")
)
else:
attn_weights = attn_weights.transpose(0, 2)
attn_weights = attn_weights.masked_fill(key_padding_mask, float('-inf'))
attn_weights = attn_weights.transpose(0, 2)
attn_weights = attn_weights.view(bsz * self.num_heads, tgt_len, src_len)
if before_softmax:
return attn_weights, v
attn_weights_float = utils.softmax(
attn_weights, dim=-1, onnx_trace=self.onnx_trace
)
attn_weights = attn_weights_float.type_as(attn_weights)
attn_probs = self.dropout_module(attn_weights)
assert v is not None
attn = self.relative_combine(
probs=attn_probs.contiguous().view(bsz, self.num_heads, tgt_len, src_len),
value=v.contiguous().view(bsz, self.num_heads, -1, self.head_dim),
pos_val=pos_val
).contiguous().view(bsz * self.num_heads, -1, self.head_dim)
if self.onnx_trace and attn.size(1) == 1:
attn = attn.contiguous().view(tgt_len, bsz, embed_dim)
else:
attn = attn.transpose(0, 1).contiguous().view(tgt_len, bsz, embed_dim)
attn = self.out_proj(attn)
attn_weights: Optional[Tensor] = None
if need_weights:
attn_weights = attn_weights_float.view(
bsz, self.num_heads, tgt_len, src_len
).transpose(1, 0)
if not need_head_weights:
# average attention weights over heads
attn_weights = attn_weights.mean(dim=0)
return attn, attn_weights
class FFNAttention(nn.Module):
def __init__(self, input_dim, hidden_dim, bias=False):
super(FFNAttention, self).__init__()
self.input_dim = input_dim
self.hidden_dim = hidden_dim
self.q_proj = nn.Linear(input_dim, hidden_dim)
self.k_proj = nn.Conv1d(input_dim, hidden_dim, 1, 1)
self.out = nn.Linear(hidden_dim, 1, bias=bias)
self._inf = Parameter(torch.Tensor([-1e18]), requires_grad=False)
self.inf = None
# Initialize vector V
nn.init.uniform_(self.out.weight, -1, 1)
def forward(self, query, key, mask=None):
query = self.q_proj(query).unsqueeze(2).expand(-1, -1, key.size(1)) # (batch, hidden, seq_len)
key = key.permute(0, 2, 1) # (batch, hidden, seq_len)
key = self.k_proj(key) # (batch, hidden, seq_len)
attn_weight = self.out((query + key).permute(0, 2, 1)).squeeze(-1) # (batch, seq_len)
if mask is not None and len(attn_weight[mask]) > 0:
attn_weight[mask] = self.inf[mask]
attn_prob = attn_weight.softmax(dim=-1)
attn = torch.bmm(key, attn_prob.unsqueeze(2)).squeeze(2)
return attn, attn_weight
def init_inf(self, mask_size):
self.inf = self._inf.unsqueeze(1).expand(*mask_size)
class DotProductAttention(nn.Module):
""" Attention model for Pointer-Net """
def __init__(self, ninp, nhid):
"""
Initiate Attention
:param int ninp: Input's diamention
:param int nhid: Number of hidden units in the attention
"""
super(DotProductAttention, self).__init__()
self.input_dim = ninp
self.hidden_dim = nhid
self.input_linear = nn.Linear(ninp, nhid)
self.context_linear = nn.Conv1d(ninp, nhid, 1, 1)
self.V = Parameter(torch.FloatTensor(nhid), requires_grad=True)
self._inf = Parameter(torch.FloatTensor([-1e18]), requires_grad=False)
self.tanh = nn.Tanh()
self.softmax = nn.Softmax(dim=-1)
self.inf = None
# Initialize vector V
nn.init.uniform_(self.V, -1, 1)
def forward(self, inputs, context, mask):
"""
Attention - Forward-pass
:param Tensor inputs: Hidden state h
:param Tensor context: Attention context
:param ByteTensor mask: Selection mask
:return: tuple of - (Attentioned hidden state, Alphas)
"""
# (batch, hidden_dim, seq_len)
inp = self.input_linear(inputs).unsqueeze(2).expand(-1, -1, context.size(1))
# (batch, hidden_dim, seq_len)
context = context.permute(0, 2, 1)
ctx = self.context_linear(context)
# (batch, 1, hidden_dim)
V = self.V.unsqueeze(0).expand(context.size(0), -1).unsqueeze(1)
# (batch, seq_len)
attn_weight = torch.bmm(V, self.tanh(inp + ctx)).squeeze(1)
if mask is not None and len(attn_weight[mask]) > 0:
attn_weight[mask] = self.inf[mask]
attn_prob = self.softmax(attn_weight)
attn = torch.bmm(ctx, attn_prob.unsqueeze(2)).squeeze(2)
return attn, attn_weight
def init_inf(self, mask_size):
self.inf = self._inf.unsqueeze(1).expand(*mask_size)
class FeedForward(nn.Module):
def __init__(self, d_model, d_hidden, dropout=0.0):
super().__init__()
# dropout = 0.0 # means 17
self.input_to_hidden = nn.Linear(d_model, d_hidden)
self.hidden_to_output = nn.Linear(d_hidden, d_model)
self.dropout = nn.Dropout(dropout)
def forward(self, inputs):
h = F.relu(self.input_to_hidden(inputs))
h = self.dropout(h)
return self.hidden_to_output(h)
|
nilq/baby-python
|
python
|
import nltk, json, pickle
from HelperFunctions import find_author
class MessageSentiment:
"""Generate mood sentiments for messages"""
MINIMUM_CERTAINTY_PROBABILITY = 0.85
TRAINING_SET_SIZE = 5000
try:
STOP_WORDS = set(nltk.corpus.stopwords.words('english'))
except:
nltk.download('stopwords')
STOP_WORDS = set(nltk.corpus.stopwords.words('english'))
def __init__(self, training_size = 5000):
"""Generates the classifier for NB analysis of messages"""
self.TRAINING_SET_SIZE = training_size
self.tweets = self.make_tweets()
self.word_features = self.make_word_features()
self.classifier = self.get_saved_classifier()
if self.classifier is None:
# Must generate new classifier
self.classifier = self.generate_classfier_from_twitter()
self.save_classifier()
def make_tweets(self):
raw_tweets = []
with open('negative_tweets.json') as txt:
for line in txt.readlines()[:self.TRAINING_SET_SIZE]:
tup = (json.loads(line)['text'], 'negative')
raw_tweets.append(tup)
with open('positive_tweets.json') as txt:
for line in txt.readlines()[:self.TRAINING_SET_SIZE]:
tup = (json.loads(line)['text'], 'positive')
raw_tweets.append(tup)
# Combine negative and positive tweets
parsed_tweets = []
for (words, sentiment) in raw_tweets:
words_filtered = [e.lower() for e in words.split() if self.is_real_word(e)]
parsed_tweets.append((words_filtered, sentiment))
# Make and return word features
return parsed_tweets
def is_real_word(self, word):
return len(word) >= 3 #and word not in self.STOP_WORDS
def make_word_features(self):
wordlist = []
for (words, sentiment) in self.tweets:
wordlist.extend(words)
return nltk.FreqDist(wordlist).keys()
def get_saved_classifier(self):
"""Return memozied data; if none exists, then make empty DB"""
with open("classifier.pkl", "rb") as pkl_db:
try:
memoized_data = pickle.load(pkl_db)
if memoized_data['classifier'] is not None:
return memoized_data['classifier']
except:
print("Saved classifier not found. Regenerating classifier...")
return None
print("Saved classifier not found. Regenerating classifier...")
return None
def generate_classfier_from_twitter(self):
print("Generating training set...")
training_set = nltk.classify.apply_features(self.extract_features, self.tweets)
return nltk.NaiveBayesClassifier.train(training_set)
def extract_features(self, document):
document_words = set(document)
features = {}
for word in self.word_features:
features['contains(%s)' % word] = (word in document_words)
return features
def save_classifier(self):
with open("classifier.pkl", "wb") as pkl_db:
print('Pickling classifier')
pickle.dump({'classifier': self.classifier}, pkl_db)
def classify_text(self, text_features):
prob = self.classifier.prob_classify(text_features)
(prob_pos, prob_neg) = prob.prob('positive'), prob.prob('negative')
if prob_neg > self.MINIMUM_CERTAINTY_PROBABILITY:
classification = "negative"
elif prob_pos > self.MINIMUM_CERTAINTY_PROBABILITY:
classification = "positive"
else:
classification = "neutral"
return (classification, max(prob_neg, prob_pos))
def get_mood(self, text):
parsed_text = [word for word in text.split() if self.is_real_word(word)]
return self.classify_text(self.extract_features(parsed_text))
|
nilq/baby-python
|
python
|
import os, cv2, shutil
import numpy as np
import argparse
def read_coords(coord_file):
coord_data, inds_pos = [], []
assert os.path.exists(coord_file), "File does not exist! %s"%coord_file
with open(coord_file, 'r') as f:
for ind, line in enumerate(f.readlines()):
x_coord = int(line.strip().split(',')[0])
y_coord = int(line.strip().split(',')[1])
if x_coord > 0 and y_coord > 0:
inds_pos.append(ind)
coord_data.append([x_coord, y_coord])
coord_data = np.array(coord_data, dtype=np.int32)
inds_pos = np.array(inds_pos, dtype=np.int32)
return coord_data, inds_pos
def write_coords(coord_data, frame_ids, coord_file):
coord_dir = os.path.dirname(coord_file)
if not os.path.exists(coord_dir):
os.makedirs(coord_dir)
with open(coord_file, 'w') as f:
for i in frame_ids:
x_coord = int(coord_data[i, 0])
y_coord = int(coord_data[i, 1])
f.writelines('%d,%d\n'%(x_coord, y_coord))
def reduce_video(src_file, dst_file, ratio, frame_ids):
assert os.path.exists(src_file), "File does not exist! %s"%src_file
video_dir = os.path.dirname(dst_file)
if os.path.exists(dst_file):
return
if not os.path.exists(video_dir):
os.makedirs(video_dir)
# video capture of src video
cap_src = cv2.VideoCapture(src_file)
ret, frame = cap_src.read()
ind = 0
# dest capture
dst_size = (int(frame.shape[1] * ratio), int(frame.shape[0] * ratio)) # (width, height)
cap_dst = cv2.VideoWriter(dst_file, cv2.VideoWriter_fourcc(*'XVID'), 30, dst_size)
while (ret):
if ind in frame_ids:
frame_resize = cv2.resize(frame, dst_size)
cap_dst.write(frame_resize)
# read next frame
ret, frame = cap_src.read()
ind += 1
def reduce_data(data_path, ratio, max_frames, subset, result_path):
# the input path
coord_path_src = os.path.join(data_path, subset, 'coordinate')
focus_path_src = os.path.join(data_path, subset, 'focus_videos')
salmap_path_src = os.path.join(data_path, subset, 'salmap_videos')
video_path_src = os.path.join(data_path, subset, 'rgb_videos')
# the input path
coord_path_dst = os.path.join(result_path, subset, 'coordinate')
focus_path_dst = os.path.join(result_path, subset, 'focus_videos')
salmap_path_dst = os.path.join(result_path, subset, 'salmap_videos')
video_path_dst = os.path.join(result_path, subset, 'rgb_videos')
for accID in sorted(os.listdir(coord_path_src)):
txtfile_dir = os.path.join(coord_path_src, accID)
for filename in sorted(os.listdir(txtfile_dir)):
coord_file_src = os.path.join(txtfile_dir, filename)
coord_data, inds_pos = read_coords(coord_file_src)
if inds_pos.shape[0] == 0:
continue # ignore videos without any accident
# remove the frames after accident ends
video_end = min(inds_pos[-1] + 1 + 16, coord_data.shape[0])
video_start = max(0, video_end - max_frames)
frame_ids = np.arange(video_start, video_end)
vid = filename.split('_')[0]
print("Processing the video: %s/%s, # frames: %d"%(accID, vid, len(frame_ids)))
# resize & write coords
coord_file_dst = os.path.join(coord_path_dst, accID, filename)
write_coords(ratio * coord_data, frame_ids, coord_file_dst)
# read focus videos
focus_video_src = os.path.join(focus_path_src, accID, vid + '.avi')
focus_video_dst = os.path.join(focus_path_dst, accID, vid + '.avi')
reduce_video(focus_video_src, focus_video_dst, ratio, frame_ids)
# read salmap videos
salmap_video_src = os.path.join(salmap_path_src, accID, vid + '.avi')
salmap_video_dst = os.path.join(salmap_path_dst, accID, vid + '.avi')
reduce_video(salmap_video_src, salmap_video_dst, ratio, frame_ids)
# read rgb videos
rgb_video_src = os.path.join(video_path_src, accID, vid + '.avi')
rgb_video_dst = os.path.join(video_path_dst, accID, vid + '.avi')
reduce_video(rgb_video_src, rgb_video_dst, ratio, frame_ids)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Reduce the size of DADA-2000')
parser.add_argument('--data_path', default="./DADA-2000",
help='Directory to the original DADA-2000 folder.')
parser.add_argument('--result_path', default="./DADA-2000-small",
help='Directory to the result DADA-2000 folder.')
args = parser.parse_args()
ratio = 0.5
max_frames = 450 # for fps=30, the maxtime=20 s after clipped
if not os.path.exists(args.result_path):
os.makedirs(args.result_path)
reduce_data(args.data_path, ratio, max_frames, 'training', args.result_path)
reduce_data(args.data_path, ratio, max_frames, 'testing', args.result_path)
reduce_data(args.data_path, ratio, max_frames, 'validation', args.result_path)
|
nilq/baby-python
|
python
|
# Type of the message
FIELD_MSGTYPE = "t"
MSG_OP = 1 # This is an operation
MSG_REPLY = 2 # This is a regular reply
MSG_EXCEPTION = 3 # This is an exception
MSG_CONTROL = 4 # This is a control message
MSG_INTERNAL_ERROR = 5 # Some internal error happened
# Fields for operations/control
FIELD_OPTYPE = "o"
FIELD_TARGET = "o_t"
FIELD_ARGS = "o_a"
FIELD_KWARGS = "o_ka"
# Fields for reply/exception
FIELD_CONTENT = "c"
# Fields for values
# Indicates that the object is remote to the receiver (and local to the sender)
VALUE_REMOTE = 1
# Indicates that the object is local to the receiver (and remote to the sender)
VALUE_LOCAL = 2
# Operations that we support
OP_GETATTR = 1
OP_SETATTR = 2
OP_DELATTR = 3
OP_CALL = 4
OP_CALLATTR = 5
OP_REPR = 6
OP_STR = 7
OP_HASH = 9
OP_PICKLE = 10
OP_DEL = 11
OP_GETMETHODS = 12
OP_DIR = 13
OP_CALLFUNC = 14
OP_GETVAL = 15
OP_SETVAL = 16
OP_INIT = 17
OP_CALLONCLASS = 18
# Control messages
CONTROL_SHUTDOWN = 1
CONTROL_GETEXPORTS = 2
|
nilq/baby-python
|
python
|
import logging
from typing import List, Union, Iterable
from matplotlib.pyplot import Figure
import matplotlib.ticker as mtick
import pandas as pd
from py_muvr.permutation_test import PermutationTest
from matplotlib import pyplot as plt
from py_muvr.data_structures import FeatureSelectionResults
log = logging.getLogger(__name__)
class PALETTE:
lightblue = "#deebf7"
blue = "#3182bd"
black = "black"
white = "white"
grey = "grey"
lightgrey = "#9facbd"
def plot_validation_curves(
feature_selection_results: FeatureSelectionResults, **figure_kwargs
) -> plt.Figure:
curves = feature_selection_results.score_curves
plt.figure(**figure_kwargs)
for i, curve in enumerate(curves["outer_loops"]):
label = "Outer loop average" if i == 0 else None
plt.semilogx(curve.n_features, curve.scores, c=PALETTE.lightblue, label=label)
for i, curve in enumerate(curves["repetitions"]):
label = "Repetition average" if i == 0 else None
plt.semilogx(curve.n_features, curve.scores, c=PALETTE.blue, label=label)
for i, curve in enumerate(curves["total"]):
label = "Total average" if i == 0 else None
plt.semilogx(curve.n_features, curve.scores, c=PALETTE.black, label=label)
min_y, max_y = plt.gca().get_ylim()
selected_features = feature_selection_results.selected_features
for attribute in ["min", "max", "mid"]:
n_feats = len(getattr(selected_features, attribute))
plt.vlines(
n_feats,
min_y,
max_y,
linestyle="--",
colors=PALETTE.grey,
lw=2,
label=attribute,
zorder=100000,
)
plt.xlabel("# features")
plt.ylabel("Fitness score")
plt.grid(ls=":")
plt.legend(bbox_to_anchor=(1.05, 1), loc="upper left", borderaxespad=0)
return plt.gcf()
def plot_feature_rank(
feature_selection_results: FeatureSelectionResults,
model: str,
feature_names: List[str] = None,
show_outliers: bool = True,
**figure_kwargs,
) -> Figure:
if model not in {"min", "max", "mid"}:
raise ValueError("The model parameter must be one of 'min', 'max' or 'mid'.")
eval_attr = model + "_eval"
feats_attr = model
ranks = []
for r in feature_selection_results.raw_results:
for ol in r:
ranks_raw_data = getattr(ol, eval_attr).ranks.get_data()
ranks.append(ranks_raw_data)
selected_features = feature_selection_results.selected_features
best = getattr(selected_features, feats_attr)
selected_ranks = pd.DataFrame(r for r in ranks)[best]
sorted_feats = selected_ranks.mean().sort_values().index
selected_ranks = selected_ranks[sorted_feats]
if "figsize" not in figure_kwargs.keys():
fig_width = len(selected_ranks.columns) / 3
figure_kwargs["figsize"] = (6, max(fig_width, 5))
fig, (ax_ranks, ax_notnan) = plt.subplots(
nrows=1, ncols=2, sharey=True, **figure_kwargs
)
ax_notnan.xaxis.set_major_formatter(mtick.PercentFormatter())
ax_notnan.set_ylabel("Feature")
ax_notnan.set_xlabel("Percentage of times selected")
ax_ranks.set_xlabel("Feature Rank")
for ax in [ax_notnan, ax_ranks]:
ax.grid(linestyle=":", zorder=0)
ax.tick_params(axis="x")
ax.xaxis.tick_top()
ax.xaxis.set_label_position("top")
bbox_props = {
"color": PALETTE.blue,
"alpha": 0.8,
}
bbox_color = {"boxes": PALETTE.blue, "medians": PALETTE.black}
if feature_names is not None:
feature_numbers = range(len(feature_names))
numbers_to_names = dict(zip(feature_numbers, feature_names))
selected_ranks.rename(columns=numbers_to_names, inplace=True)
selected_ranks.boxplot(
positions=range(len(selected_ranks.columns)),
color=bbox_color,
patch_artist=True,
ax=ax_ranks,
boxprops=bbox_props,
vert=False,
showfliers=show_outliers,
)
(selected_ranks.notna().mean() * 100).plot.barh(
facecolor=PALETTE.lightgrey,
ax=ax_notnan,
edgecolor=PALETTE.black,
grid=True,
alpha=0.8,
)
ax_notnan.invert_yaxis() # being the y-axis shared, it will invert both
fig.tight_layout() # otherwise the right y-label is slightly clipped
return fig
def plot_permutation_scores(
permutation_test: PermutationTest,
model: str,
bins: Union[int, str, Iterable[float]] = "auto",
**fig_kwargs,
) -> Figure:
score, perm_scores = permutation_test.compute_permutation_scores(model)
p_value = permutation_test.compute_p_values(model, ranks=False)
fig, ax = plt.subplots(1, 1, **fig_kwargs)
ax.grid(linestyle=":", zorder=0)
counts, _, _ = ax.hist(
perm_scores,
bins=bins,
alpha=0.8,
edgecolor=PALETTE.white,
facecolor=PALETTE.blue,
label="Permutation Scores",
zorder=10,
)
ax.vlines(
score,
ymin=0,
ymax=counts.max(),
color=PALETTE.black,
label="Feature Selection Score",
zorder=20,
)
ax.set_ylabel("Number of Occurrences")
ax.set_xlabel("Score")
ax.legend(bbox_to_anchor=(1.05, 1), loc="upper left", borderaxespad=0)
ax.set_title("Feature selection p-value = %1.3g" % p_value)
return fig
|
nilq/baby-python
|
python
|
# WEATHER RETRIEVING MICROSERVICE
# By: Cody Jennette
# CS 361 - Software Engineering I
# jennettc@oregonstate.edu
import requests
import urllib.request
# Program fetches local machine's external IP address, later used for current location:
external_ip = urllib.request.urlopen('https://ident.me').read().decode('utf8')
# get_location function saves local machine's city, country, latitude, and longitude as a tuple.
def get_location(ip):
access_key = "9c0df8c38ae552d45174ea3dc2454c18"
base_url = "http://api.ipstack.com/"
full_url = str(base_url) + str(ip) + "?access_key=" + str(access_key)
response = requests.get(full_url)
loc_info = response.json()
country = loc_info["country_code"]
city = loc_info["city"]
lat = loc_info["latitude"]
lon = loc_info["longitude"]
return lat, lon, city, country # Tuple is later unpacked for proper output display
def get_weather(latitude, longitude): # Latitude and longitude from tuple used to get weather conditions
key = "20184d8f0b1ac6a9146bc617163b1c64"
url_weather = "http://api.openweathermap.org/data/2.5/weather"
params = {"lat": latitude, "lon": longitude, "appid": key, "units": "imperial"}
output = requests.get(url_weather, params=params)
output_json = output.json()
desc = output_json["weather"][0]["description"]
temp = output_json["main"]["temp"]
return desc, temp # Last part of the output, saved as a tuple and later unpacked
def display_output(location_name, description, temperature): # Function to properly display output
display = "City: %s \nConditions: %s \nTemperature (°F): %s" % (location_name, description, temperature)
return display
location = get_location(external_ip)
(lat, lon, city, country) = location
full_city = str(city + ", " + country)
wea_info = get_weather(lat, lon)
(desc, temp) = wea_info
# full_city and wea_info tuples unpacked, then displayed by display_output function:
final = display_output(full_city, desc, temp)
print(final)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# pylint: disable=no-member,invalid-name,duplicate-code
"""
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline
compliance reporting for transportation fuel suppliers in accordance with
the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
import json
import logging
from typing import Callable
from collections import namedtuple, defaultdict
from enum import Enum
from api.models.CreditTrade import CreditTrade
from api.models.CreditTradeStatus import CreditTradeStatus
class CreditTradeRelationshipMixin(object):
"""
Mixin to provide user mapping for related parties to credit transactions
"""
class UserRelationship(Enum):
"""
Enumerates the ways in which a client (user) can be related to a
credit trade
"""
INITIATOR = 1
RESPONDENT = 2
THIRD_PARTY = 3
GOVERNMENT_ANALYST = 4
GOVERNMENT_DIRECTOR = 5
user_map = {
UserRelationship.INITIATOR: 'fs_user_1',
UserRelationship.RESPONDENT: 'fs_user_2',
UserRelationship.THIRD_PARTY: 'fs_user_3',
UserRelationship.GOVERNMENT_ANALYST: 'gov_analyst',
UserRelationship.GOVERNMENT_DIRECTOR: 'gov_director'
}
class CreditTradeFlowHooksMixin(object):
ChangeRecord = namedtuple('ChangeRecord', [
'trade_id',
'requesting_username',
'relationship',
'expected_to_be_successful',
'data_before_request',
'data_after_request',
'response_code'
])
PreChangeRecord = namedtuple('PreChangeRecord', [
'trade_id',
'current_status',
'rescinded',
'status_change'
])
StatusChange = namedtuple('StatusChange', [
'relationship',
'status',
'rescinded'
])
def _sensible_status_changes(self, current_status, rescinded):
"""
Return a list of valid potential status changes for a given starting
state
"""
status_changes = defaultdict(lambda: [])
status_changes[('Draft', False)] = [
self.StatusChange(self.UserRelationship.INITIATOR,
'Submitted', False),
self.StatusChange(self.UserRelationship.INITIATOR,
'Cancelled', False)
]
status_changes[('Submitted', False)] = [
self.StatusChange(self.UserRelationship.INITIATOR,
'Submitted', True), # rescind
self.StatusChange(self.UserRelationship.RESPONDENT,
'Accepted', False),
self.StatusChange(self.UserRelationship.RESPONDENT,
'Refused', False)
]
status_changes[('Accepted', False)] = [
self.StatusChange(self.UserRelationship.INITIATOR,
'Accepted', True), # rescind
self.StatusChange(self.UserRelationship.RESPONDENT,
'Accepted', True), # rescind
self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST,
'Recommended', False),
self.StatusChange(self.UserRelationship.GOVERNMENT_ANALYST,
'Not Recommended', False)
]
status_changes[('Recommended', False)] = [
self.StatusChange(self.UserRelationship.INITIATOR,
'Recommended', True), # rescind
self.StatusChange(self.UserRelationship.RESPONDENT,
'Recommended', True), # rescind
self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR,
'Approved', False),
self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR,
'Declined', False)
]
status_changes[('Not Recommended', False)] = [
self.StatusChange(self.UserRelationship.INITIATOR,
'Not Recommended', True), # rescind
self.StatusChange(self.UserRelationship.RESPONDENT,
'Not Recommended', True), # rescind
self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR,
'Approved', False),
self.StatusChange(self.UserRelationship.GOVERNMENT_DIRECTOR,
'Declined', False)
]
return status_changes[(current_status, rescinded)]
def _path_builder(self, node, path=[], valid_paths=[]):
"""
Recursively build an array of valid paths through the status tree
"""
s = self._sensible_status_changes(node.status, node.rescinded)
is_leaf = not s
path = path + [node]
if is_leaf:
valid_paths.append(path) # end of the line
for branch in s:
self._path_builder(branch, path, valid_paths)
return valid_paths
def check_credit_trade_workflow(
self,
before_change_callback: Callable[[PreChangeRecord], None] = lambda x: None,
after_change_callback: Callable[[ChangeRecord], None] = lambda x: None,
path_end_callback: Callable[[], None] = lambda: None,
modify_request_payload: Callable[[dict], None] = lambda x: None
):
"""
Evaluate all normal status paths through the application via
REST API as appropriate users
with callbacks for tests:
before_change_callback called just before a status change.
Initial status and trade_id may be None
after_change_callback called after a change
data_before_request can be None if this was a creation
path_end_callback called when this pathway is done
(another will begin unless this was the last)
"""
initiating_org = self.users[
self.user_map[
self.UserRelationship.INITIATOR
]].organization
responding_org = self.users[
self.user_map[
self.UserRelationship.RESPONDENT
]].organization
payload = {
'fairMarketValuePerCredit': 1,
'initiator': initiating_org.id,
'numberOfCredits': 1,
'respondent': responding_org.id,
'tradeEffectiveDate': datetime.datetime.today().strftime('%Y-%m-%d'),
'type': self.credit_trade_types['sell'].id,
'zeroReason': None
}
valid_paths = (self._path_builder(
self.StatusChange(self.UserRelationship.INITIATOR, 'Draft', False)
))
for path in valid_paths:
logging.debug('evaluating path: {}'.format(
'\n'.join(
[
'{} sets status to {} and rescinded to {}'.format(
c.relationship, c.status, c.rescinded) for c in path
]
)))
trade_id = None
response_data = None
for node in path:
before_change_callback(self.PreChangeRecord(
trade_id,
CreditTrade.objects.filter(
id=trade_id
).first().status.status if trade_id else None,
CreditTrade.objects.filter(
id=trade_id
).first().is_rescinded if trade_id else None,
node
))
payload['status'] = CreditTradeStatus.objects.get_by_natural_key(node.status).id
payload['is_rescinded'] = node.rescinded
modify_request_payload(payload)
if not trade_id:
response = self.clients[self.user_map[node.relationship]].post(
'/api/credit_trades',
content_type='application/json',
data=json.dumps(payload)
)
else:
response = self.clients[self.user_map[node.relationship]].put(
'/api/credit_trades/{}'.format(trade_id),
content_type='application/json',
data=json.dumps(payload)
)
previous_response_data = response_data
response_data = json.loads(response.content.decode('utf-8'))
trade_id = response_data['id'] if 'id' in response_data else trade_id
after_change_callback(self.ChangeRecord(
trade_id,
self.user_map[node.relationship],
node.relationship,
True,
previous_response_data,
response_data,
response.status_code
))
path_end_callback()
|
nilq/baby-python
|
python
|
import time
import requests
import threading
from filibuster.logger import debug
TIMEOUT_ITERATIONS = 100
SLEEP = 1
def num_services_running(services):
num_running = len(services)
for service in services:
if not service_running(service):
debug("! service " + service + " not yet running!")
num_running -= 1
return num_running
def wait_for_num_services_running(services, num_running, waiting_message):
timeout = TIMEOUT_ITERATIONS
while num_services_running(services) != num_running:
debug("Filibuster server waiting for {} to {}.".format(services, waiting_message))
debug("=> num_running: " + str(num_running))
debug("=> num_services_running(services): " + str(num_services_running(services)))
time.sleep(SLEEP)
timeout -= 1
if timeout == 0:
debug("Filibuster server timed out waiting for {} to {}.".format(services, waiting_message))
exit(1)
def wait_for_services_to_stop(services):
wait_for_num_services_running(services, 0, "stop")
def wait_for_services_to_start(services):
wait_for_num_services_running(services, len(services), "start")
def service_running(service):
name = service[0]
host = service[1]
port = service[2]
base_uri = "http://{}:{}".format(host, str(port))
# Jaeger will pass the health check only because health-check reroutes to /search.
debug("checking service's health-check: " + name)
try:
response = requests.get(
"{}/health-check".format(base_uri, timeout=60))
if response.status_code == 200:
return True
else:
return False
except requests.exceptions.ConnectionError:
debug("! connection error")
return False
except requests.exceptions.Timeout:
debug("! timeout")
return False
def start_filibuster_server_thread(app):
class Server(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
def run(self):
app.run(port=5005, host="0.0.0.0")
server_thread = Server()
server_thread.setDaemon(True)
server_thread.start()
|
nilq/baby-python
|
python
|
class ParkingSystem(object):
def __init__(self, big, medium, small):
"""
:type big: int
:type medium: int
:type small: int
"""
self.lot = {
1: [big,0],
2: [medium,0],
3: [small,0]
}
def addCar(self, carType):
"""
:type carType: int
:rtype: bool
"""
if self.lot[carType][1] < self.lot[carType][0]:
self.lot[carType][1] += 1
return True
else:
return False
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Formatter for the shell item events."""
from plaso.formatters import interface
class ShellItemFileEntryEventFormatter(interface.ConditionalEventFormatter):
"""Class that formats Windows volume creation events."""
DATA_TYPE = 'windows:shell_item:file_entry'
FORMAT_STRING_PIECES = [
u'Name: {name}',
u'Long name: {long_name}',
u'Localized name: {localized_name}',
u'NTFS file reference: {file_reference}',
u'Origin: {origin}']
FORMAT_STRING_SHORT_PIECES = [
u'Name: {name}',
u'NTFS file reference: {file_reference}',
u'Origin: {origin}']
SOURCE_LONG = 'File entry shell item'
SOURCE_SHORT = 'FILE'
|
nilq/baby-python
|
python
|
# test_hello_add.py
from API import app
from flask import json
def test_predict():
response = app.test_client().post(
'/predict',
data=json.dumps({"gender":["Male"],
"SeniorCitizen":["0"],
"Partner":["0"],
"Dependents":["0"],
"tenure":["-0.223317"],
"MultipleLines":["-0.508112"],
"InternetService":["No"],
"Contract":["Month-to-month"],
"PaperlessBilling":["1"],
"PaymentMethod":["Electronic check"],
"MonthlyCharges":["-1.512322"],
"AddServices":["-1.757234"]
}),
content_type='application/json',
)
data = json.loads(response.get_data(as_text=True))
assert response.status_code == 200
assert data['sum'] == 3
print(data)
|
nilq/baby-python
|
python
|
import argparse
import marshal
import os
import py_compile
from importlib import import_module
from pathlib import Path
from zipfile import ZipFile, PyZipFile
from Crypto.Cipher import AES
from loaders import register, PycZimpLoader, PyZimpLoader
def get_key(path):
if path is None:
return None
with open(path, "rb") as file:
return file.read()
class ZimpCreator:
def __init__(self, name, key, optimize, is_pyc):
self.name = name
self.key = key
self.optimize = optimize
self.is_pyc = is_pyc
def walk_pyc(self):
with PyZipFile(self.name + ".zip", 'w', optimize=self.optimize) as zimpfile:
zimpfile.writepy(self.name)
@staticmethod
def _get_data(path):
raise NotImplementedError("Use subclass")
def _encrypt(self, data):
if self.key is None:
return data
cipher = AES.new(self.key, AES.MODE_EAX)
nonce = cipher.nonce
encrypted_data, tag = cipher.encrypt_and_digest(data)
return b"".join((nonce, tag, encrypted_data))
def run(self):
with ZipFile(self.name + ".zip", 'w') as zimpfile:
# Iterate over all the files in directory
for folder_name, subfolders, filenames in os.walk(self.name):
for filename in filenames:
file_path = os.path.join(folder_name, filename)
if filename.endswith(".py"):
zimpfile.writestr(file_path + "c", self._encrypt(self._get_data(file_path)))
class PyZimpCreator(ZimpCreator):
@staticmethod
def _get_data(path):
with open(path, "rb") as file:
return file.read()
class PycZimpCreator(ZimpCreator):
def _get_data(self, path):
pycpath = py_compile.compile(path, optimize=self.optimize)
with open(pycpath, "rb") as pycfile:
return pycfile.read()
class ZimpRunner:
def __init__(self, name, key):
self.name = name
self.key = key
def _get_loader(self):
raise NotImplementedError("Use subclass")
def run(self):
register(self._get_loader())
import_module(self.name)
class PyZimpRunner(ZimpRunner):
def _get_loader(self):
return PyZimpLoader({self.name: self.key})
class PycZimpRunner(ZimpRunner):
def __init__(self, name, key, marshal_offset):
super().__init__(name, key)
self.marshal_offset = marshal_offset
def _get_loader(self):
return PycZimpLoader({self.name: self.key}, self.marshal_offset)
def find_marshal():
py_name = "__test_marshal.py"
pyc_name = py_name + "c"
try:
open(py_name, "wb").close()
py_compile.compile(py_name, pyc_name)
with open(pyc_name, "rb") as pycfile:
pyc = pycfile.read()
for i in range(Path(pyc_name).stat().st_size):
try:
exec(marshal.loads(pyc[i:]))
# ValueError when marshal fails. TypeError when exec fails. For example, during testing,
# on i=9 marshal.loads returns an int, which fails exec.
except (ValueError, TypeError):
pass
else:
return i
finally:
os.unlink(py_name)
os.unlink(pyc_name)
def run_zimp(args):
if args.pyc:
PycZimpRunner(args.name, get_key(args.key_file), args.marshal_offset).run()
else:
PyZimpRunner(args.name, get_key(args.key_file)).run()
def create_zimp(args):
if args.pyc:
PycZimpCreator(args.name, get_key(args.key_file), args.optimize, args.pyc).run()
else:
PyZimpCreator(args.name, get_key(args.key_file), args.optimize, args.pyc).run()
def main():
modes = {
"run": run_zimp,
"zip": create_zimp
}
argparser = argparse.ArgumentParser()
argparser.add_argument("mode", choices=modes.keys())
argparser.add_argument("--key-file")
argparser.add_argument("--name", required=True)
run_argparser = argparser.add_argument_group("run", "Run zimp")
run_argparser.add_argument("--pyc", action="store_true")
run_argparser.add_argument("--marshal-offset", default=16)
zip_argparser = argparser.add_argument_group("zip", "Create zimp")
zip_argparser.add_argument("--compression-level", default=None)
zip_argparser.add_argument("--optimize", type=int, default=-1)
args = argparser.parse_args()
modes[args.mode](args)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from brick_wall_build import task
@task()
def clean():
pass
# Should be marked as task.
def html():
pass
# References a non task.
@task(clean,html)
def android():
pass
|
nilq/baby-python
|
python
|
def INSERTION_SORT(list):
for n in range(1, len(list)):
for i in range(0, len(list) - 2):
if list[i] > list[i + 1]:
tmp = list[i]
list[i] = list[i + 1]
list[i + 1] = tmp
k = 0
for i in range(0, len(list) - 2):
if list[-1] > list[i]:
k = i + 1
else:
break
tmp = list[k:-1]
list[k] = list[-1]
list[k + 1:] = tmp
return list
A = [5, 2, 4, 6, 1, 3]
print(INSERTION_SORT(A))
|
nilq/baby-python
|
python
|
"""placeholder
Revision ID: 57539722e5cf
Revises: c1b5abada09c
Create Date: 2019-12-03 00:55:16.012247
"""
# revision identifiers, used by Alembic.
revision = '57539722e5cf'
down_revision = 'c1b5abada09c'
from alembic import op
import sqlalchemy as sa
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
|
nilq/baby-python
|
python
|
first_number=1+1
print(first_number)
second_number=105+10
print(second_number)
|
nilq/baby-python
|
python
|
from flask import Flask, request, jsonify, url_for, Blueprint
from api.models import db, User
from api.utils import generate_sitemap, APIException
from flask_jwt_extended import create_access_token
from flask_jwt_extended import get_jwt_identity
from flask_jwt_extended import jwt_required
import os
api = Blueprint('api', __name__)
# Create a route to authenticate your users and return JWTs. The
# create_access_token() function is used to actually generate the JWT.
@app.route("/token", methods=["POST"])
def create_token():
email = request.json.get("email", None)
password = request.json.get("password", None)
if email != "test" or password != "test":
return jsonify({"msg": "Bad username or password"}), 401
access_token = create_access_token(identity=email)
return jsonify(access_token=access_token)
|
nilq/baby-python
|
python
|
import os
import package1.process as process
import package1.loadsettings as loadsettings
filenames = os.listdir("./task") #Create list of mediafiles to run through
if filenames == []:
print( "\nERROR: Task folder is empty. Put in video file(s) that you want to condense." )
quit()
if 'deletethis.txt' in filenames:
print("\nYou need to delete the file 'deletethis' in the 'condenser\Task' directory before the program can run.")
quit()
def strip_filename_extension(string):
temp = string.split('.')
return temp[0]
first_file = True #Certain operations need to be performed for the first file of a batch only.
for filename in filenames:
stripped_filename = strip_filename_extension(filename)
output_name = loadsettings.file_prefix + stripped_filename + ".mp3"
process.run_condenser(filename, output_name, first_file)
first_file = False
|
nilq/baby-python
|
python
|
import urllib
from BeautifulSoup import *
class ComputerLab():
def __init__(self, room, num, time):
self.room = room
self.num = num
self.time = time
def __repr__(self):
str = "Room: %s\nNum: %s\nTime: %s\n" % (self.room, self.num, self.time)
return str
url = "https://tomcat.itap.purdue.edu:8445/ICSWeb/AvailableStations"
page = urllib.urlopen(url)
soup = BeautifulSoup(page.read())
xp = []
mac = []
sun = []
labs = [xp, mac, sun]
i=0
j=0
for tbl in soup.findAll('table'):
if (i==0):
i=1
continue
for tr in tbl.findAll('tr'):
if (len(tr.contents) > 2):
a = ComputerLab(None, None, None)
a.room = tr.contents[0].find('font').contents[0]
a.num = tr.contents[1].find('font').contents[0]
a.time = tr.contents[2].find('font').contents[0]
labs[j].append(a)
j+=1
for labos in labs:
for lab in labos:
print lab
print
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# encoding: utf-8
"""
mssql2csv.py
Created by Bill Wiens on 2010-05-04.
"""
import sys, os, getopt, getpass
import optparse
import logging
import csv
import pymssql
def main():
parser = optparse.OptionParser()
parser.description="""Python script to dump a MSSQL Server Database to folder of CSV files.
Requires the freetds library and the pymssql module"""
parser.add_option("-H", "--host", dest="hostname", help="connect to HOSTNAME", metavar="HOSTNAME")
parser.add_option("-d", "--database", dest="database", help="connect to DATABASE", metavar="DATABASE")
parser.add_option("-u", "--user", dest="username", help="username to connect with", metavar="USERNAME")
parser.add_option("-p", "--password", dest="password", help="password to connect with", metavar="PASSWORD")
parser.add_option("-t", "--tables", dest="tables", help="Comma-separated list of tables to dump", metavar="TABLES")
(options, args) = parser.parse_args()
options = vars(options)
if not options['password']:
options['password'] = getpass.getpass("Enter password:")
if options['tables']:
options['tables'] = str.split(options['tables'], ",")
dump_db(options['hostname'], options['database'], options['username'], options['password'], options['tables'])
def dump_db(database_host, database_name, database_user, database_pass, database_tables):
try:
os.mkdir(database_name)
os.chdir(database_name)
except:
logging.getLogger().error("Failed to make folder for CSV's: {0}".format(database_name))
sys.exit(2)
try:
conn = pymssql.connect(user = database_user, password = database_pass, host = database_host, database = database_name)
cursor = conn.cursor()
except:
logging.getLogger().error("Error: Can't connect to database")
sys.exit(2)
if len(database_tables) > 0:
tables = database_tables
else:
cursor.execute("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='Base Table'")
tables = [table[0] for table in cursor.fetchall()]
for table_name in tables:
dump_table(cursor, table_name)
cursor.close()
conn.close()
def dump_table(cursor, tablename):
query = "SELECT COLUMN_NAME, DATA_TYPE FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME='{0}'".format(tablename)
cursor.execute(query)
schema = cursor.fetchall()
fieldnames = [column[0] for column in schema]
# casts 'ntext' to nvarchar
selectnames = ["CAST ({0} as nvarchar(max))".format(name) if datatype == 'ntext' else name for name, datatype in schema]
query = "SELECT {0} FROM {1}".format(", ".join(selectnames), tablename)
cursor.execute(query)
filename = "{0}.csv".format(tablename)
with open(filename, "wb") as fp:
writer = csv.writer(fp)
writer.writerow(fieldnames)
row = cursor.fetchone()
while row:
writer.writerow(row)
row = cursor.fetchone()
if __name__ == '__main__':
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
try:
main()
except KeyboardInterrupt:
logging.getLogger().error("Cancelled by user")
|
nilq/baby-python
|
python
|
#! usr/bin/python3.6
"""
Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445
.. warning::
The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only.
They are there as a guide as to how the visual basic / catscript functions work
and thus help debugging in pycatia.
"""
from pycatia.mec_mod_interfaces.hybrid_bodies import HybridBodies
from pycatia.mec_mod_interfaces.hybrid_shape import HybridShape
from pycatia.mec_mod_interfaces.hybrid_shapes import HybridShapes
from pycatia.mec_mod_interfaces.ordered_geometrical_sets import OrderedGeometricalSets
from pycatia.mec_mod_interfaces.shapes import Shapes
from pycatia.mec_mod_interfaces.sketches import Sketches
from pycatia.system_interfaces.any_object import AnyObject
class Body(AnyObject):
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)
| System.IUnknown
| System.IDispatch
| System.CATBaseUnknown
| System.CATBaseDispatch
| System.AnyObject
| Body
|
| The object that manages a sequence of shapes, a set of sketches, a set of
| hybrid bodies, a set of ordered geometrical sets and a set of hybrid
| shapes.
|
| It belongs to the Bodies collection of a Part or HybridBody
| object.
"""
def __init__(self, com_object):
super().__init__(com_object)
self.body = com_object
@property
def hybrid_bodies(self) -> HybridBodies:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property HybridBodies() As HybridBodies (Read Only)
|
| Returns the body's HybridBodies collection.
|
| Example:
| The following example returns in hybridBodyColl the collection of
| hybrid bodies of the main body of partDoc part
| document:
|
| Dim body As Body
| Set body = partDoc.Part.Bodies.MainBody
| Set hybridBodyColl = body.HybridBodies
:return: HybridBodies
:rtype: HybridBodies
"""
return HybridBodies(self.body.HybridBodies)
@property
def hybrid_shapes(self) -> HybridShapes:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property HybridShapes() As HybridShapes (Read Only)
|
| Returns the list of hybrid shapes included in the body.
|
| Returns:
| oHybridShapes The list of hybrid shapes in the body (@see
| CATIAHybridShapes
| for more information).
|
| Example:
| The following example returns in HybridShapes1 the list
| of
| hybrid shapes in the body Body1:
|
| Dim HybridShapes1 As HybridShapes
| Set HybridShapes1 = Body1.HybridShapes
:return: HybridShapes
:rtype: HybridShapes
"""
return HybridShapes(self.body.HybridShapes)
@property
def in_boolean_operation(self) -> bool:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property InBooleanOperation() As boolean (Read Only)
|
| Returns True if the body is involved in a boolean operation, else returns
| False.
|
| Example:
| The following example returns in operated True if the body body1belongs
| to a boolean operation.
|
| operated = body1.InBooleanOperation
:return: bool
:rtype: bool
"""
return self.body.InBooleanOperation
@property
def ordered_geometrical_sets(self) -> OrderedGeometricalSets:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property OrderedGeometricalSets() As OrderedGeometricalSets (Read
| Only)
|
| Returns the body's OrderedGeometricalSets collection.
|
| ometricalSetColl = Body1.OrderedGeometricalSets Example:
| The following example returns in OrderedGeometricalSetColl the
| collection of ordered geometrical set of the body Body1
| :
|
| Set OrderedGe
:return: OrderedGeometricalSets
:rtype: OrderedGeometricalSets
"""
return OrderedGeometricalSets(self.body.OrderedGeometricalSets)
@property
def shapes(self) -> Shapes:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property Shapes() As Shapes (Read Only)
|
| Returns the body's Shapes collection. These shapes make up the sequence of
| shapes that will produce an intermediate result for the part, or the final
| result in the case of the main body.
|
| Example:
| The following example returns in shapColl the collection of shapes
| managed by the main body of the partDoc part document:
|
| Dim body As Body
| Set body = partDoc.Part.Bodies.MainBody
| Set shapColl = body.Shapes
:return: Shapes
:rtype: Shapes
"""
return Shapes(self.body.Shapes)
@property
def sketches(self) -> Sketches:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384)
| o Property Sketches() As Sketches (Read Only)
|
| Returns the body's Sketches collection. These sketches are those inside the
| body at all levels.
|
| Example:
| The following example returns in skColl the collection of sketches of
| the main body of partDoc part document:
|
| Dim body As Body
| Set body = partDoc.Part.Bodies.MainBody
| Set skColl = body.Sketches
:return: Sketches
:rtype: Sketches
"""
return Sketches(self.body.Sketches)
def insert_hybrid_shape(self, i_hybrid_shape: HybridShape) -> None:
"""
.. note::
:class: toggle
CAA V5 Visual Basic Help (2020-07-06 14:02:20.222384))
| o Sub InsertHybridShape(HybridShape iHybridShape)
|
| Insert a hybrid shape to the body.
|
| Parameters:
|
| iHybriShape
| The hybrid shape to insert.
|
| Example:
| This example inserts the hybrid shape HybridShape1 to the body
| Body1:
|
| Body1.InsertHybridShape (HybridShape1)
:param HybridShape i_hybrid_shape:
:return: None
:rtype: None
"""
return self.body.InsertHybridShape(i_hybrid_shape.com_object)
# # # # Autogenerated comment:
# # some methods require a system service call as the methods expects a vb array object
# # passed to it and there is no way to do this directly with python. In those cases the following code
# # should be uncommented and edited accordingly. Otherwise completely remove all this.
# # vba_function_name = 'insert_hybrid_shape'
# # vba_code = """
# # Public Function insert_hybrid_shape(body)
# # Dim iHybridShape (2)
# # body.InsertHybridShape iHybridShape
# # insert_hybrid_shape = iHybridShape
# # End Function
# # """
# # system_service = self.application.system_service
# # return system_service.evaluate(vba_code, 0, vba_function_name, [self.com_object])
def __repr__(self):
return f'Body(name="{self.name}")'
|
nilq/baby-python
|
python
|
import numpy as np
import read_thres as thrs
def test_thr(check_thr):
data, x = thrs.ths_def(check_thr, threshd=1.E-5)
dat_nw = check_thr.drop(columns=["norm", "<x>", "<y>"])
x_nw = dat_nw.columns.values
assert len(x) == len(x_nw)
assert np.array_equal(x, x_nw)
assert data.equals(dat_nw)
|
nilq/baby-python
|
python
|
import os
import yaml
import getpass
from ConfigParser import SafeConfigParser
from twisted.internet import defer, reactor
from twisted.internet.endpoints import TCP4ClientEndpoint
from os.path import abspath, expanduser
from ooni.utils.net import ConnectAndCloseProtocol, connectProtocol
from ooni import geoip
from ooni.utils import Storage, log, get_ooni_root
from ooni import errors
class OConfig(object):
_custom_home = None
def __init__(self):
self.current_user = getpass.getuser()
self.global_options = {}
self.reports = Storage()
self.scapyFactory = None
self.tor_state = None
# This is used to store the probes IP address obtained via Tor
self.probe_ip = geoip.ProbeIP()
self.logging = True
self.basic = Storage()
self.advanced = Storage()
self.tor = Storage()
self.privacy = Storage()
self.set_paths()
def embedded_settings(self, category, option):
embedded_settings = os.path.join(get_ooni_root(), 'settings.ini')
if os.path.isfile(embedded_settings):
settings = SafeConfigParser()
with open(embedded_settings) as fp:
settings.readfp(fp)
return settings.get(category, option)
return None
@property
def var_lib_path(self):
var_lib_path = self.embedded_settings("directories", "var_lib")
if var_lib_path:
return os.path.abspath(var_lib_path)
return "/var/lib/ooni"
@property
def usr_share_path(self):
usr_share_path = self.embedded_settings("directories", "usr_share")
if usr_share_path:
return os.path.abspath(usr_share_path)
return "/usr/share/ooni"
@property
def data_directory_candidates(self):
dirs = [
self.ooni_home,
self.var_lib_path,
self.usr_share_path,
os.path.join(get_ooni_root(), '..', 'data'),
'/usr/share/'
]
if os.getenv("OONI_DATA_DIR"):
dirs.insert(0, os.getenv("OONI_DATA_DIR"))
if self.global_options.get('datadir'):
dirs.insert(0, abspath(expanduser(self.global_options['datadir'])))
return dirs
@property
def data_directory(self):
for target_dir in self.data_directory_candidates:
if os.path.isdir(target_dir):
return target_dir
return self.var_lib_path
@property
def ooni_home(self):
home = expanduser('~'+self.current_user)
if os.getenv("HOME"):
home = os.getenv("HOME")
if self._custom_home:
return self._custom_home
else:
return os.path.join(home, '.ooni')
def get_data_file_path(self, file_name):
for target_dir in self.data_directory_candidates:
file_path = os.path.join(target_dir, file_name)
if os.path.isfile(file_path):
return file_path
def set_paths(self):
self.nettest_directory = os.path.join(get_ooni_root(), 'nettests')
if self.advanced.inputs_dir:
self.inputs_directory = self.advanced.inputs_dir
else:
self.inputs_directory = os.path.join(self.ooni_home, 'inputs')
if self.advanced.decks_dir:
self.decks_directory = self.advanced.decks_dir
else:
self.decks_directory = os.path.join(self.ooni_home, 'decks')
self.reports_directory = os.path.join(self.ooni_home, 'reports')
self.resources_directory = os.path.join(self.data_directory,
"resources")
if self.advanced.report_log_file:
self.report_log_file = self.advanced.report_log_file
else:
self.report_log_file = os.path.join(self.ooni_home,
'reporting.yml')
if self.global_options.get('configfile'):
config_file = self.global_options['configfile']
self.config_file = expanduser(config_file)
else:
self.config_file = os.path.join(self.ooni_home, 'ooniprobe.conf')
if 'logfile' in self.basic:
self.basic.logfile = expanduser(self.basic.logfile.replace(
'~', '~'+self.current_user))
def initialize_ooni_home(self, custom_home=None):
if custom_home:
self._custom_home = custom_home
self.set_paths()
if not os.path.isdir(self.ooni_home):
print "Ooni home directory does not exist."
print "Creating it in '%s'." % self.ooni_home
os.mkdir(self.ooni_home)
os.mkdir(self.inputs_directory)
os.mkdir(self.decks_directory)
def _create_config_file(self):
target_config_file = self.config_file
print "Creating it for you in '%s'." % target_config_file
sample_config_file = self.get_data_file_path('ooniprobe.conf.sample')
with open(sample_config_file) as f:
with open(target_config_file, 'w+') as w:
for line in f:
if line.startswith(' logfile: '):
w.write(' logfile: %s\n' % (
os.path.join(self.ooni_home, 'ooniprobe.log'))
)
else:
w.write(line)
def read_config_file(self, check_incoherences=False):
if not os.path.isfile(self.config_file):
print "Configuration file does not exist."
self._create_config_file()
self.read_config_file()
with open(self.config_file) as f:
config_file_contents = '\n'.join(f.readlines())
configuration = yaml.safe_load(config_file_contents)
for setting in configuration.keys():
if setting in dir(self) and configuration[setting] is not None:
for k, v in configuration[setting].items():
getattr(self, setting)[k] = v
self.set_paths()
if check_incoherences:
self.check_incoherences(configuration)
def check_incoherences(self, configuration):
incoherent = []
if configuration['advanced']['interface'] != 'auto':
from scapy.all import get_if_list
if configuration['advanced']['interface'] not in get_if_list():
incoherent.append('advanced:interface')
self.log_incoherences(incoherent)
def log_incoherences(self, incoherences):
if len(incoherences) > 0:
if len(incoherences) > 1:
incoherent_pretty = ", ".join(incoherences[:-1]) + ' and ' + incoherences[-1]
else:
incoherent_pretty = incoherences[0]
log.err("You must set properly %s in %s." % (incoherent_pretty, self.config_file))
raise errors.ConfigFileIncoherent
@defer.inlineCallbacks
def check_tor(self):
"""
Called only when we must start tor by director.start
"""
incoherent = []
if not self.advanced.start_tor:
if self.tor.socks_port is None:
incoherent.append('tor:socks_port')
else:
socks_port_ep = TCP4ClientEndpoint(reactor,
"localhost",
self.tor.socks_port)
try:
yield connectProtocol(socks_port_ep, ConnectAndCloseProtocol())
except Exception:
incoherent.append('tor:socks_port')
if self.tor.control_port is not None:
control_port_ep = TCP4ClientEndpoint(reactor,
"localhost",
self.tor.control_port)
try:
yield connectProtocol(control_port_ep, ConnectAndCloseProtocol())
except Exception:
incoherent.append('tor:control_port')
self.log_incoherences(incoherent)
config = OConfig()
if not os.path.isfile(config.config_file) \
and os.path.isfile('/etc/ooniprobe.conf'):
config.global_options['configfile'] = '/etc/ooniprobe.conf'
config.set_paths()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import pandas as pd
from tqdm import tqdm
from collections import defaultdict
import os, re, time, warnings, sys
import warnings
import pickle
from mutagen.mp3 import MP3
import numpy as np
def create_df(tsv, audio_dir):
tqdm.pandas()
df = pd.read_csv(tsv, sep='\t')
df['dur'] = df['path'].progress_apply(get_dur, args=(audio_dir,))
return df
def get_dur(mp3, audio_dir):
""" return audio duration in seconds """
audio=MP3(os.path.join(audio_dir,mp3))
return audio.info.length
def select_subset(df, n_spk, tgt_dur, accent=None, balanced_gender=True, add_precaution_spks=False):
print("n_spk = ", n_spk)
if add_precaution_spks:
tgt_dur= (tgt_dur/n_spk)*(n_spk+4)
n_spk+=4
print(n_spk)
print('Adding 4 additional speakers that will be not counted towards max duration, to allow manual suppression of unwanted speakers')
#1. filter out speakers with accents
df_start=len(df)
if not accent:
df = df[df['accent'].isnull()] #only want those with no sepcific accent
else:
print(accent)
df = df[df.accent.isin(accent)]
print("{}% of data was removed after filtering by accent".format((df_start-len(df))/df_start*100))
df_start=len(df)
#2. filter out speakers with not enough data
print("n_spk = ", n_spk)
tgt_dur_spk = float(tgt_dur) / n_spk
df_spk = df.groupby('client_id')['dur'].sum()
spks=list(df_spk[df_spk >= tgt_dur_spk].index)
df = df[df.client_id.isin(spks)]
if not len(df['client_id'].unique()) >= n_spk :
raise ValueError('There are no enough speakers to reach the desired target duration with the target number of speakers. Try reducing either one or the other of these values')
#3. select x spks, half from each langauge.
if (n_spk % 2) != 0:
warnings.warn("Warning....... n_spk is an odd number, adding one speaker so that we can have equal share between males and females")
n_spk += 1
df_m = df[df["gender"] == "male"]
df_f = df[df["gender"] == "female"]
print("n_spk = ", n_spk)
if not len(df_m['client_id'].unique()) >= n_spk / 2 or not len(df_f['client_id'].unique()) >= n_spk / 2 :
raise ValueError('Not enough speakers of each gender for the taarget duration. It could be becuse a lot of speakers have not entered their the gender information. Try setting "balanced_gender" to false or reducing tgt_dur.')
if accent and len(accent) > 1:
#then do balanced.
finalspks = set()
for ac in accent :
a=df_m[df_m["accent"] == ac]['client_id']
spks= list(list(np.random.choice(df_m[df_m["accent"] == ac]['client_id'].unique(),int(n_spk/2/len(accent)), replace=False)) + list(np.random.choice(df_f[df_f["accent"] == ac]['client_id'].unique(),int(n_spk/2/len(accent)), replace=False)))
for x in spks:
finalspks.add(x)
else:
print("n_spk/2 = ", int(n_spk/2))
print("n_spk/2 = ", n_spk/2)
finalspks = set(list(np.random.choice(df_m['client_id'].unique(),int(n_spk/2), replace=False)) + list(np.random.choice(df_f['client_id'].unique(),int(n_spk/2), replace=False)))
print(len(finalspks))
print("male: ", len(df_m['client_id'].unique()))
print("female: ", len(df_f['client_id'].unique()))
#4. Sample n seconds per spk
#filter out if above threshold length
final_df = pd.DataFrame(columns=df.columns)
for spk in tqdm(finalspks):
print(spk)
tot=0
tmp_df = df[df['client_id'] == spk]
for i in tmp_df.sample(frac=1).iterrows():
if i[1]['dur'] >= 20:
continue #not over 20 sec
if tot >= tgt_dur_spk:
break
final_df = final_df.append(i[1])
tot += i[1]['dur']
return final_df
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("CV_path", help="path to the commonvoice main directory")
parser.add_argument("lang", help="language code in CV format")
parser.add_argument('output_tsv', type=str, help="path to the output tsv")
parser.add_argument("--tgt_spk", type=int, help='target number of shespeakers, must be an even number', default=24)
parser.add_argument("--tgt_dur", type=int, help='target total duration of the selection, in seconds', default=36000)
parser.add_argument("--add_precaution_spks", default=False, action="store_true", help="If True, add 4 more speakers (2 for each gender) that will have to be manually removed.")
parser.add_argument("--accent", default=None, action='append')
parser.parse_args()
args, leftovers = parser.parse_known_args()
validated_tsv = os.path.join(args.CV_path, args.lang, "validated.tsv")
audio_dir= os.path.join(args.CV_path, args.lang,"clips")
print(args.accent)
if os.path.exists(os.path.join(args.CV_path, args.lang, "validated.pkl")):
df=pickle.load(open(os.path.join(args.CV_path, args.lang, "validated.pkl"), 'rb'))
else :
print("Retrieveing audio information from the tsv file")
df = create_df(validated_tsv, audio_dir)
df.to_pickle(os.path.join(args.CV_path, args.lang, "validated.pkl"))
if not os.path.exists(args.output_tsv):
print("Selecting the subset")
if args.add_precaution_spks:
final_df = select_subset(df, args.tgt_spk, args.tgt_dur, add_precaution_spks=args.add_precaution_spks, accent=args.accent)
else:
final_df = select_subset(df, args.tgt_spk, args.tgt_dur, accent=args.accent)
final_df.to_csv(args.output_tsv, sep="\t")
|
nilq/baby-python
|
python
|
from unittest import TestCase
from cards.businesslogic.description_generator.DescriptionAppender import DescriptionAppender
class DescriptionAppenderTestCase(TestCase):
def test_sample_description1(self):
appender = DescriptionAppender()
text1 = "line1"
text2 = "line2"
appender.append(text1)
appender.append(text2)
result = appender.process()
self.assertEqual("Line1 i line2.", result)
def test_single_item_desc(self):
appender = DescriptionAppender()
appender.append("test")
self.assertEqual("Test.", appender.process())
def test_multiple(self):
appender = DescriptionAppender()
appender.append("12test")
appender.append("ASDASD")
appender.append("112a")
appender.append("Test")
self.assertEqual("12test, aSDASD, 112a i test.", appender.process())
def test_none(self):
appender = DescriptionAppender()
self.assertEqual("", appender.process())
|
nilq/baby-python
|
python
|
# Copyright 2020 Thomas Rogers
# SPDX-License-Identifier: Apache-2.0
import typing
import yaml
from direct.gui import DirectGui, DirectGuiGlobals
from direct.task import Task
from panda3d import core
from ... import constants, edit_mode
from ...tiles import manager
from ...utils import gui
from .. import descriptors, event_grouping, map_objects
from ..descriptors import wall_type_descriptor
from . import sprite_property_view
_WALL_CATEGORIES_TYPE = typing.Dict[
str, typing.List[wall_type_descriptor.WallTypeDescriptor]
]
class WallDialog:
def __init__(self, parent: core.NodePath, edit_mode: edit_mode.EditMode):
self._dialog = DirectGui.DirectFrame(
parent=parent,
pos=core.Vec3(-0.78, -0.9),
frameSize=(0, 1.58, 0, 1.8),
relief=DirectGuiGlobals.RAISED,
borderWidth=(0.01, 0.01),
)
self._dialog.hide()
self._property_parent: core.NodePath = self._dialog.attach_new_node(
"properties"
)
self._property_parent.set_pos(0.04, 0, 0.38)
self._edit_mode = edit_mode
self._wall: map_objects.EditorWall = None
self._selected_descriptor: wall_type_descriptor.WallTypeDescriptor = None
self._current_descriptor: wall_type_descriptor.WallTypeDescriptor = None
self._current_picnum: int = None
self._current_palette: int = None
self._current_status_number: int = None
self._properties: sprite_property_view.SpritePropertyView = None
self._type_lookup = {
wall_type.name: type_index
for type_index, wall_type in descriptors.wall_types.items()
}
type_names = list(self._type_lookup.keys())
self._type_selector = DirectGui.DirectOptionMenu(
parent=self._dialog,
pos=core.Vec3(0.05, 0.38),
scale=constants.TEXT_SIZE,
items=type_names,
command=self._type_changed,
)
DirectGui.DirectLabel(
parent=self._dialog,
text="Special Source:",
pos=core.Vec3(1.12, 0.38),
scale=constants.TEXT_SIZE,
)
self._special_source_menu = DirectGui.DirectOptionMenu(
parent=self._dialog,
pos=core.Vec3(1.28, 0.38),
items=["None", "Level Start"],
scale=constants.TEXT_SIZE,
)
DirectGui.DirectLabel(
parent=self._dialog,
text="Special Target:",
pos=core.Vec3(1.12, 0.38 - constants.TEXT_SIZE - 0.02),
scale=constants.TEXT_SIZE,
)
self._special_target_menu = DirectGui.DirectOptionMenu(
parent=self._dialog,
pos=core.Vec3(1.28, 0.38 - constants.TEXT_SIZE - 0.02),
items=["None", "Next Level", "Secret Level"],
scale=constants.TEXT_SIZE,
)
DirectGui.DirectButton(
parent=self._dialog,
pos=core.Vec3(1.36, 0.07),
text="Ok",
scale=constants.TEXT_SIZE,
command=self._save_changes,
)
DirectGui.DirectButton(
parent=self._dialog,
pos=core.Vec3(1.48, 0.07),
text="Cancel",
scale=constants.TEXT_SIZE,
command=self._hide,
)
def show(self, wall: map_objects.EditorWall):
self._wall = wall
self._current_descriptor = descriptors.wall_types[self._wall.get_type()]
if (
self._wall.target_event_grouping
== event_grouping.EventGroupingCollection.END_LEVEL_GROUPING
):
self._special_target_menu.set("Next Level")
elif (
self._wall.target_event_grouping
== event_grouping.EventGroupingCollection.SECRET_END_LEVEL_GROUPING
):
self._special_target_menu.set("Secret Level")
else:
self._special_target_menu.set("None")
if (
self._wall.source_event_grouping
== event_grouping.EventGroupingCollection.START_LEVEL_GROUPING
):
self._special_source_menu.set("Next Level")
else:
self._special_source_menu.set("None")
type_name = self._current_descriptor.name
self._type_selector.set(type_name)
self._update_property_view()
self._edit_mode.push_mode(self)
def _save_changes(self):
new_values = self._properties.get_values()
new_picnum = self._properties.get_current_tile()
if new_picnum is not None:
self._current_picnum = new_picnum
self._current_descriptor.apply_wall_properties(self._wall, new_values)
self._wall.blood_wall.wall.tags[0] = self._current_descriptor.wall_type
self._wall.invalidate_geometry()
target_special_value = self._special_target_menu.get()
if target_special_value == "Next Level":
self._wall.set_target_event_grouping(
event_grouping.EventGroupingCollection.END_LEVEL_GROUPING
)
elif target_special_value == "Secret Level":
self._wall.set_target_event_grouping(
event_grouping.EventGroupingCollection.SECRET_END_LEVEL_GROUPING
)
elif (
self._wall.target_event_grouping is not None
and self._wall.target_event_grouping.special_receiver_id is not None
):
self._wall.set_target_event_grouping(None)
source_special_value = self._special_source_menu.get()
if source_special_value == "Level Start":
self._wall.set_source_event_grouping(
event_grouping.EventGroupingCollection.START_LEVEL_GROUPING
)
elif (
self._wall.source_event_grouping is not None
and self._wall.source_event_grouping.special_receiver_id is not None
):
self._wall.set_source_event_grouping(None)
self._hide()
def _clear_property_view(self):
if self._properties is not None:
self._properties.destroy()
self._properties = None
def _update_property_view(self):
self._clear_property_view()
self._properties = sprite_property_view.SpritePropertyView(
self._property_parent,
-1,
self._current_descriptor.get_wall_properties(self._wall),
None,
None,
1.65,
1.5,
1.25,
)
def _type_changed(self, value):
type_index = self._type_lookup[value]
self._current_descriptor = descriptors.wall_types[type_index]
self._update_property_view()
if self._wall.get_type() == type_index:
return
self._wall.blood_wall.wall.tags[0] = type_index
def _reset_selected_wall_type(self, task):
self._selected_descriptor = None
return task.done
def enter_mode(self, state: dict):
self._dialog.show()
def exit_mode(self):
self._dialog.hide()
return {}
def _hide(self):
self._edit_mode.pop_mode()
def tick(self):
pass
|
nilq/baby-python
|
python
|
import tkinter as tk
import pygubu
import csv
import serial
import rospy
import numpy as np
import PID
import ctypes
from can_msgs import msg
import fcntl
import termios
import sys
import select
import subprocess
import os
from threading import Timer
import signal
from termios import tcflush, TCIOFLUSH
from rospy.core import NullHandler
import tty
from pyquaternion import Quaternion
from OdriveClass import *
# Note that positive velocity values lower winch 1
MAX_VEL = 100000 # Max. speed for winch in encoder counts per second
LIVEPLOTTER = 1
doCalibrate = 0
class numhex64(ctypes.Union):
_fields_ = [("num", ctypes.c_double),
("sint", ctypes.c_int64),
("uint", ctypes.c_uint64),
("hex", ctypes.c_ubyte * 8)]
class numhex32(ctypes.Union):
_fields_ = [("num", ctypes.c_float),
("sint", ctypes.c_int32),
("uint", ctypes.c_uint32),
("hex", ctypes.c_ubyte * 4)]
class ManualWinchApp:
def __init__(self):
# Initalize the gantry:
os.system("stty -echo")
rospy.init_node('can_send', anonymous=True)
pub = rospy.Publisher('sent_messages', msg.Frame, queue_size=100)
rate = rospy.Rate(50)
xSpeed = numhex64()
ySpeed = numhex64()
msgData = ""
frame = msg.Frame()
frame.is_rtr = False
frame.is_extended = False
frame.dlc = 8
self.gantry_x_pid = PID.PID(P=0.2, I=0.0, D=0.0)
self.gantry_z_pid = PID.PID(P=0.2, I=0.0, D=0.0)
#1: Create a builder
self.builder = builder = pygubu.Builder()
#2: Load an ui file
builder.add_from_file('WinchesManualGUI.ui')
#3: Create the mainwindow
self.mainwindow = builder.get_object('MainWindow')
#4: Connect callbacks
builder.connect_callbacks(self)
self.ser_add='/dev/ttyACM3' #For Strain Gauges and IMU
self.ser_add2='/dev/ttyACM1' # for Encoders
self.testCounter=1
self.list_of_floats=[]
# Data from gantry crane:
self.x = 0
self.y = 0
self.z = 0
self.xval1=0
self.yval1=0
self.buttjpin=0
self.butt1pin=0
self.butt2pin=0
self.butt3pin=0
self.str1=0
self.str2=0
self.str3=0
self.ytilt=0
self.ztilt=0
self.qw=0
self.qx=0
self.qy=0
self.qz=0
self.q = Quaternion(self.qw,self.qx,self.qy,self.qz)
self.rot_ax = self.q.axis
self.rot_ang = self.q.degrees
self.accx=0
self.accy=0
self.accz=0
self.sys_cal=0
self.gyro_cal=0
self.acc_cal=0
self.mag_cal=0
self.accz_thresh_wedgeBreaking=0.5
self.ytilt_zero=-.81
self.ztilt_zero=2.87
self.accx_zero=--.133
self.accy_zero=-.5
self.accz_zero=10.08
self.angle_Zthresh=.75
self.angle_Ythresh=.75
self.exitholeflag=0
self.psiwedge=0
self.pitch=0
self.roll=0
self.psi=0
self.v4=0.0
self.v4_prev=0.0
self.mot1spd=0.0
self.mot1spd_prev=0.0
self.mot2spd=0.0
self.mot2spd_prev=0.0
self.mot3spd=0.0
self.mot3spd_prev=0.0
self.ytiltw=0
self.ztiltw=0
self.sv=0
if self.sv==1:
self.str1thresh=10
self.str2thresh=10
self.str3thresh=10
else:
self.str1thresh=5
self.str2thresh=5
self.str3thresh=5
#print([self.str1thresh,self.str2thresh,self.str3thresh])
#time.sleep(.5)
# get calibration parameters
self.list_of_floats=[]
self.list_of_floats_temp=[]
self.TotalList=[]
def run(self):
self.mainwindow.mainloop()
def winch1scale_move(self, vel):
des_vel = MAX_VEL*float(vel)/100
odrv0.VelMove(des_vel,0)
def winch2scale_move(self, vel):
des_vel = MAX_VEL*float(vel)/100
odrv1.VelMove(des_vel,0)
def winch3scale_move(self, vel):
des_vel = MAX_VEL*float(vel)/100
odrv1.VelMove(des_vel,1)
def move_all(self,vel):
des_vel = MAX_VEL*float(vel)/100
odrv0.VelMove(des_vel,0)
odrv1.VelMove(des_vel,0)
odrv1.VelMove(des_vel,1)
def stopall_butt(self):
odrv0.VelMove(0,0)
odrv1.VelMove(0,0)
odrv1.VelMove(0,1)
def get_gantry_coords(self):
self.x = 0
self.y = 0
self.z = 0
def move_gantry(self,x,y,z):
xSpeed.num = 0.0
zSpeed.num = 0.0
self.gantry_x_pid.SetPoint = x
self.gantry_z_pid.SetPoint = z
self.gantry_x_pid.update()
self.gantry_z_pid.update()
frame.id = 0x01
for idx in range(8):
msgData += chr(xSpeed.hex[idx])
frame.data = msgData
frame.header.stamp = rospy.Time.now()
pub.publish(frame)
frame.id = 0x02
for idx in range(8):
msgData += chr(zSpeed.hex[idx])
frame.data = msgData
frame.header.stamp = rospy.Time.now()
pub.publish(frame)
rospy.loginfo("x: %f rps, y: %f rps", xSpeed.num, zSpeed.num)
rate.sleep()
return 0
##### ARDUINO SERIAL FUNCS
def ArduinoSetup(self):
userinput=input('Setting up the arduino. If you restarted the arduino, unload everything and then enter 1 so it can calibrate')
#print(type(int(userinput)))
print(self.ser_add)
print(self.ser_add2)
self.ser = serial.Serial(self.ser_add, 115200,timeout=1)
# self.ser.flushInput()
# self.ser.write(int(userinput))
# self.ser.flushInput()
self.ser2 = serial.Serial(self.ser_add2, 115200,timeout=1)
# self.ser2.flushInput()
# self.ser2.write(int(userinput))
# self.ser2.flushInput()
print(self.ser_add)
print(self.ser_add2)
print("connected")
# Calibrate Arduino if needed
line=[]
ctr=0
while a.buttjpin==0:
try:
line = self.ser.readline()
line.decode('ascii').strip()
print(line.decode('ascii').strip())
list_of_floats_temp=[]
list_of_floats_temp_2=[]
list_of_floats_temp_1= [float(item) for item in line.decode('ascii').strip().split(';')]
line2 = self.ser2.readline()
line2.decode('ascii').strip()
print(line2.decode('ascii').strip())
list_of_floats_temp_2= [float(item) for item in line2.decode('ascii').strip().split(';')]
list_of_floats_temp_1.extend(list_of_floats_temp_2)
print(list_of_floats_temp_1)
# if len(self.list_of_floats_temp)==13:
# list_of_floats_temp2=list_of_floats_temp
# #print(self.list_of_floats)
# list_of_floats_temp2[8]=180-(360-list_of_floats_temp[8])
# list_of_floats_temp2[9]=90-list_of_floats_temp[9]
# print(list_of_floats_temp2)
ctr=ctr+1
except:
pass
# print("Keyboard Interrupt")
finally:
#if len(line)>0:
#if line.decode('ascii').strip()=="good":
if ctr>5:
break
# input('Everything calibrated. LOAD UP. Then serial data will display. Enter 1 to start ')
# line=[]
timeout_start=time.time()
timeout=2;
#display serialdata for 5 seconds to make sure it looks good
while time.time() < timeout_start + timeout:
self.get_data(0)
#print([ self.str1,self.str2,self.str3])
print([ self.str1,self.str2,self.str3, self.phi1enc,self.phi2enc,self.phi3enc])
#print([ self.phi1enc,self.phi2enc,self.phi3enc, self.phi1deg,self.phi2deg,self.phi3deg])
input('If you are happy with the serial, press 1 to continue. otherwise, restart the python ')
def ReadSerial(self,tosaveflag):
self.ser.flushInput()
self.ser2.flushInput()
# while (self.ser.inWaiting()<30 and self.ser2.inWaiting()<15):
# pass
try:
line2 = self.ser2.readline()
self.list_of_floats_temp2=[]
self.list_of_floats_temp2 = [float(item) for item in line2.decode('ascii').strip().split(';')]
line = self.ser.readline()
self.list_of_floats_temp=[]
self.list_of_floats_temp = [float(item) for item in line.decode('ascii').strip().split(';')]
self.list_of_floats_temp.extend(self.list_of_floats_temp2)
#print(self.list_of_floats_temp)
#print(len(self.list_of_floats_temp))
# while (self.ser.inWaiting()<30):
# pass
# #print('less')
# try:
# line = self.ser.readline()
# line2=line
# #print(len(line))
# # print(line)
# self.list_of_floats_temp=[]
# self.list_of_floats_temp = [float(item) for item in line.decode('ascii').strip().split(';')]
# except:
# pass
# while (self.ser2.inWaiting()<30):
# pass
# try:
# line2 = self.ser2.readline()
# self.list_of_floats_temp2=[]
# self.list_of_floats_temp2 = [float(item) for item in line2.decode('ascii').strip().split(';')]
# except:
# pass
#self.list_of_floats_temp.extend(self.list_of_floats_temp2)
#print(self.list_of_floats_temp)
# print(line.decode('ascii').strip())
#print(len(self.list_of_floats_temp))
if len(self.list_of_floats_temp)==28:
self.list_of_floats=[]
self.list_of_floats=self.list_of_floats_temp
self.xval1=self.list_of_floats[0]
self.yval1=self.list_of_floats[1]
self.buttjpin=self.list_of_floats[2]
self.butt1pin=self.list_of_floats[3]
self.butt2pin=self.list_of_floats[4]
self.butt3pin=self.list_of_floats[5]
self.str1=round(self.list_of_floats[6],1)
self.str2=round(self.list_of_floats[7],1)
self.str3=round(self.list_of_floats[8],1)
self.ytilt=self.list_of_floats[9]
self.ztilt=self.list_of_floats[10]
self.qw=self.list_of_floats[11]
self.qx=self.list_of_floats[12]
self.qy=self.list_of_floats[13]
self.qz=self.list_of_floats[14]
self.q = Quaternion(self.qw,self.qx,self.qy,self.qz)
self.rot_ax = self.q.axis
self.rot_ang = self.q.degrees
self.accx=self.list_of_floats[15]
self.accy=self.list_of_floats[16]
self.accz=self.list_of_floats[17]
self.sys_cal=self.list_of_floats[18]
self.gyro_cal=self.list_of_floats[19]
self.acc_cal=self.list_of_floats[20]
self.mag_cal=self.list_of_floats[21]
self.phi1enc=self.list_of_floats[22]
self.phi2enc=self.list_of_floats[23]
self.phi3enc=self.list_of_floats[24]
self.beta1enc=self.list_of_floats[25]
self.beta2enc=self.list_of_floats[26]
self.beta3enc=self.list_of_floats[27]
self.phi1deg=360-self.phi1enc/16384*360
self.phi2deg=360-self.phi2enc/16384*360
self.phi3deg=360-self.phi3enc/16384*360
self.beta1deg=self.beta1enc/16384*360
self.beta2deg=self.beta2enc/16384*360
self.beta3deg=self.beta3enc/16384*360
self.calculatepsi()
self.list_of_floats.append(self.psi)
self.list_of_floats.append(self.ytilt_zero)
self.list_of_floats.append(self.ztilt_zero)
self.list_of_floats.append(self.accx_zero)
self.list_of_floats.append(self.accx_zero)
self.list_of_floats.append(self.accz_zero)
self.pitch = 180 * np.arctan2(self.accx ,np.sqrt(self.accy*self.accy+ self.accz*self.accz))/3.14;
self.roll = 180 * np.arctan2(self.accy, np.sqrt(self.accx*self.accx + self.accz*self.accz))/3.14;
self.list_of_floats.append(self.pitch)
self.list_of_floats.append(self.roll)
#append aruco stuff
#self.list_of_floats.extend(self.pegrvec)
#self.list_of_floats.extend(self.pegtvec)
#self.list_of_floats.extend(self.holervec)
#self.list_of_floats.extend(self.holetvec)
#self.getPegDepth()
#print(self.depth_1,self.depth_2)
#self.list_of_floats.append(self.phi1deg)
#self.list_of_floats.append(self.phi2deg)
#self.list_of_floats.append(self.phi3deg)
#self.list_of_floats.append(self.beta1deg)
#self.list_of_floats.append(self.beta2deg)
#self.list_of_floats.append(self.beta3deg)
#self.list_of_floats.append(self.depth_1)
self.winchenc1=0
self.winchenc2=0
self.winchenc3=0
if self.connectflag==1:
self.winchenc1=self.odrv0.get_encoder_count(0)
self.winchenc2=self.odrv1.get_encoder_count(0)
self.winchenc3=self.odrv1.get_encoder_count(1)
self.list_of_floats.append(self.winchenc1)
self.list_of_floats.append(self.winchenc2)
self.list_of_floats.append(self.winchenc3)
self.list_of_floats.append(self.mot1spd)
self.list_of_floats.append(self.mot2spd)
self.list_of_floats.append(self.mot3spd)
#self.phi1rad=self.phi1deg*3.14/180
#self.phi2rad=self.phi2deg*3.14/180
#self.phi3rad=self.phi3deg*3.14/180
#self.str1P=self.str1*np.cos(self.phi1rad)
#self.str2P=self.str2*np.cos(self.phi2rad)
#self.str3P=self.str3*np.cos(self.phi3rad)
self.list_of_floats.insert(0,time.time())
#yrdgs.append((self.ytilt-self.ytilt_zero))
#zrdgs.append((self.ztilt-self.ztilt_zero))
#self.ytilta=self.avg(yrdgs)
#self.ztilta=self.avg(zrdgs)
#self.list_of_floats.append(self.ytilta)
#self.list_of_floats.append(self.ztilta)
if len(yrdgs)==20:
yrdgs.pop(0)
if len(zrdgs)==20:
zrdgs.pop(0)
if tosaveflag==1:
self.DataToSave()
except:
pass
def CalibrateIMU(self):
self.buttjpin=0
input("Calibrate the IMU. Press 1 to start, hit the joystick button 4 to stop")
while self.buttjpin==0:
self.get_data(0)
print(self.buttjpin,self.sys_cal,self.gyro_cal,self.acc_cal,self.mag_cal)
self.get_data(0)
time.sleep(2)
input("get IMU Data. Hit joystick button to stop")
self.get_data(0)
self.buttjpin=0
while self.buttjpin==0:
self.get_data(0)
pitch = 180 * np.arctan2(self.accx ,np.sqrt(self.accy*self.accy+ self.accz*self.accz))/3.14;
roll = 180 * np.arctan2(self.accy, np.sqrt(self.accx*self.accx + self.accz*self.accz))/3.14;
#print([self.ytilt,self.ytilt-self.ytilt_zero, self.ztilt,self.ztilt-self.ztilt_zero,self.psi ,self.accz, self.sys_cal, self.gyro_cal, self.acc_cal, self.mag_cal])
#print([self.psi,self.accx-self.accx_zero,self.accy-self.accy_zero,self.accz-self.accz_zero])
print([round(self.ytilt,2),round(self.ztilt,2)])
def GetIMUOffsets(self):
print(self.roll,self.roll-self.ytilt_zero, self.pitch,self.pitch-self.ztilt_zero)
getimuoffsets_var=input("Enter 1 to get IMU Offsets. Enter 0 to use stored offsets: ")
if int(getimuoffsets_var)==1:
input("Adjust peg so it is in the hole")
time.sleep(1)
self.get_data(0)
self.buttjpin=0
print(self.buttjpin)
while self.buttjpin==0:
self.get_data(0)
self.getJoystickMotorSpeed(1)
print(self.mot1spd,self.mot2spd,self.mot3spd)
self.CmdMotors()
input("Let the peg rest so vals can be obtained: ")
tc=time.time()
accxlist=[]
accylist=[]
acczlist=[]
ytiltlist=[]
ztiltlist=[]
rolllist=[]
pitchlist=[]
while time.time()-tc<5:
self.ReadSerial(0)
accxlist.append(self.accx)
accylist.append(self.accy)
acczlist.append(self.accz)
ytiltlist.append(self.ytilt)
ztiltlist.append(self.ztilt)
rolllist.append(self.roll)
pitchlist.append(self.pitch)
self.accx_zero=self.avg(accxlist)
self.accy_zero=self.avg(accylist)
self.accz_zero=self.avg(acczlist)
# self.ytilt_zero=self.avg(rolllist)
# self.ztilt_zero=self.avg(pitchlist)
self.ytilt_zero=self.avg(ytiltlist)
self.ztilt_zero=self.avg(ztiltlist)
print("done!")
print(self.ytilt_zero,self.ztilt_zero,self.accx_zero,self.accy_zero,self.accz_zero)
input("Write down ytilt_zero,ztilt_zero and accx_zero,accy_zero,accz_zero in the code for future use!!")
# self.get_data(0)
# time.sleep(2)
# input("get IMU Data. Hit joystick button to stop")
# self.get_data(0)
# a.buttjpin=0
# while a.buttjpin==0:
# self.get_data(0)
# pitch = 180 * np.arctan2(self.accx ,np.sqrt(self.accy*self.accy+ self.accz*self.accz))/3.14;
# roll = 180 * np.arctan2(self.accy, np.sqrt(self.accx*self.accx + self.accz*self.accz))/3.14;
# #print([self.ytilt,self.ytilt-self.ytilt_zero, self.ztilt,self.ztilt-self.ztilt_zero,self.psi ,self.accz, self.sys_cal, self.gyro_cal, self.acc_cal, self.mag_cal])
# #print([self.psi,self.accx-self.accx_zero,self.accy-self.accy_zero,self.accz-self.accz_zero])
# print([self.ytilt-self.ytilt_zero,self.roll-self.ytilt_zero, self.ztilt-self.ztilt_zero,self.pitch-self.ztilt_zero])
def IMUData(self):
self.get_data(0)
time.sleep(2)
input("get IMU Data. Hit joystick button to stop")
self.get_data(0)
a.buttjpin=0
while a.buttjpin==0:
self.get_data(0)
pitch = 180 * np.arctan2(self.accx ,np.sqrt(self.accy*self.accy+ self.accz*self.accz))/3.14;
roll = 180 * np.arctan2(self.accy, np.sqrt(self.accx*self.accx + self.accz*self.accz))/3.14;
#print([self.ytilt,self.ytilt-self.ytilt_zero, self.ztilt,self.ztilt-self.ztilt_zero,self.psi ,self.accz, self.sys_cal, self.gyro_cal, self.acc_cal, self.mag_cal])
#print([self.psi,self.accx-self.accx_zero,self.accy-self.accy_zero,self.accz-self.accz_zero])
print([self.ytilt-self.ytilt_zero,self.roll-self.ytilt_zero, self.ztilt-self.ztilt_zero,self.pitch-self.ztilt_zero])
def IMUData2(self):
self.get_data(0)
time.sleep(2)
input("get IMU Data. Hit joystick button to stop")
self.get_data(0)
a.buttjpin=0
while a.buttjpin==0:
self.get_data(0)
pitch = 180 * np.arctan2(self.accx ,np.sqrt(self.accy*self.accy+ self.accz*self.accz))/3.14;
roll = 180 * np.arctan2(self.accy, np.sqrt(self.accx*self.accx + self.accz*self.accz))/3.14;
#print([self.ytilt,self.ytilt-self.ytilt_zero, self.ztilt,self.ztilt-self.ztilt_zero,self.psi ,self.accz, self.sys_cal, self.gyro_cal, self.acc_cal, self.mag_cal])
#print([self.psi,self.accx-self.accx_zero,self.accy-self.accy_zero,self.accz-self.accz_zero])
print([round(self.ytilt,2),round(self.ztilt,2)])
""" def Sensorcheck(self):
self.buttjpin=0
sensorcheckflag=0
sensorcheckflag=int(input("Do you want to check sensors? 1 for yes: "))
if sensorcheckflag==1:
while self.buttjpin==0:
self.get_data(0)
#print([ self.str1,self.str2,self.str3])
print([ round(self.str1,2), round(self.str2,2),round(self.str3,2), round(self.phi1deg,1),round(self.phi2deg,1),round(self.phi3deg,1), round(self.beta1deg,1),round(self.beta2deg,1),round(self.beta3deg,1)])
#print([ self.phi1enc,self.phi2enc,self.phi3enc, self.phi1deg,self.phi2deg,self.phi3deg])
time.sleep(1)
self.buttjpin=0
while self.buttjpin==0:
self.get_data(0)
#print([ self.str1,self.str2,self.str3])
print([ round(self.ytilt,3), round(self.ztilt,3)])
#print([ self.phi1enc,self.phi2enc,self.phi3enc, self.phi1deg,self.phi2deg,self.phi3deg]) """
### SAVING AND GET DATA
def SetupNewFile(self):
if self.testname=='1':
self.tn=input("Enter Test name for series, without number: ")
self.TotalList=[]
self.testname=self.tn+'_'+str(self.testCounter)
self.testCounter=self.testCounter+1
print(self.testname)
self.vidname='/home/rachel/odrive/Data_and_Vids/'+ self.testname
self.filename=self.vidname+".csv"
self.vidfile1=self.vidname+".avi"
self.vidfile2=self.vidname+"_2"+".avi"
# self.cap1 = cv2.VideoCapture(int(self.camnum1))
# self.frame_width1 = int(self.cap1.get(3))
# self.frame_height1 = int(self.cap1.get(4))
self.window = 'Camera'
self.out1 = cv2.VideoWriter(self.vidfile1,cv2.VideoWriter_fourcc('M','J','P','G'), 10, (self.frame_width1,self.frame_height1))
self.out2 = cv2.VideoWriter(self.vidfile2,cv2.VideoWriter_fourcc('M','J','P','G'), 10, (self.frame_width1,self.frame_height1))
def DataToSave(self):
#self.TotalList.append([self.ytilt,self.ztilt,self.str1,self.str2,self.str3])
self.TotalList.append(self.list_of_floats)
def writevideo(self):
self.ret1, self.frame1 = self.cap1.read()
self.out1.write(self.frame1)
# self.ret2, self.frame2 = self.cap2.read()
# self.out2.write(self.frame2)
def delaywithvideo(self,timedelay):
tc=time.time()
while time.time()-tc<timedelay:
#self.writevideo()
self.get_data(1)
def writefile(self):
with open(self.filename, "w") as f:
writer = csv.writer(f)
writer.writerows(self.TotalList)
print("saved")
def finishtestrecording(self):
self.StopPeg()
#a.writevideo()
self.writefile()
#self.cap1.release()
self.out1.release()
self.out2.release()
cv2.destroyAllWindows()
def get_data(self,tosaveflag):
self.ReadDisplayCVApril(tosaveflag)
self.ReadSerial(tosaveflag)
def ring_alignment(self):
""" Perform the alignment of the ring and the peg using data from the IMU. """
xtilt_thresh = 3
ytilt_thresh = 3
while True:
dz = 0.03
self.move_gantry(self.x, self.y, self.z+dz)
perp_vec = np.cross(self.rot_ax, [0,0,1])
if self.rot_ang > 3:
self.move_gantry(self.x-perp_vec(0),self.y-perp_vec(2),self.z-perp_vec(3))
elif self.rot_ang < 3:
break
self.move_gantry(self.x,self.y,self.z+0.1)
def exit_system(self):
os.system("stty echo")
sys.exit()
if __name__ == '__main__':
odrv0 = Odrive('20673881304E') # Only has 1 winch
odrv1 = Odrive('2087377E3548') # Has 2 winches
if (doCalibrate):
print('ODrive 0 Calibrating')
od0.full_init()
time.sleep(2)
print('ODrive 1 Calibrating')
od1.full_init()
print('Calibration Complete')
app = ManualWinchApp()
app.run()
|
nilq/baby-python
|
python
|
# Generated by Django 2.0.3 on 2018-04-07 13:59
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0035_auto_20180402_1507'),
]
operations = [
migrations.AddField(
model_name='project_attendees',
name='minimum_registration',
field=models.PositiveIntegerField(default=2),
),
migrations.AlterField(
model_name='project_volunteers',
name='minimum_registration',
field=models.PositiveIntegerField(default=2),
),
]
|
nilq/baby-python
|
python
|
from .fpath import *
from .tree import *
|
nilq/baby-python
|
python
|
from .constants import AWSRegion
def parse_aws_region(region_arg: str) -> AWSRegion:
for region in AWSRegion:
if region_arg == region.value[0]:
return region
raise ValueError(f'Invalid AWS region {region_arg}')
|
nilq/baby-python
|
python
|
import os
import json
import dfd
extended_python_path = dfd.get_path_if_exists('extended_python_path')
environment_path = dfd.get_path_if_exists('environment')
if extended_python_path:
import site
site.addsitedir(extended_python_path)
if environment_path:
with open(environment_path) as env_file:
new_env_vars = json.load(env_file)
os.environ.update(new_env_vars)
real_settings = os.environ.get('DJANGO_SETTINGS_MODULE')
if real_settings is not None:
os.environ['REAL_DJANGO_SETTINGS'] = real_settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'dfd_settings'
|
nilq/baby-python
|
python
|
from typing import Optional, Tuple
from munch import Munch
import logging
from api.jwt import Jwt, JwtPayload
ADMIN_AUTHORITY = "ADMIN"
BASIC_AUTHORITY = "BASIC"
class Auth:
def __init__(self, event):
self._event = Munch.fromDict(event)
self.jwt = Jwt()
@property
def auth_header(self) -> Optional[str]:
if self.event.headers.get("Authorization"):
return (
self.event.headers.Authorization[7:]
if "Bearer " in self.event.headers.Authorization
else self.event.headers.Authorization
)
@property
def refresh_header(self) -> Optional[str]:
if self.event.headers.get("Refresh"):
return (
self.event.headers.Refresh[7:]
if "Bearer " in self.event.headers.Refresh
else self.event.headers.Refresh
)
def validate_jwt(self) -> Tuple[bool, Optional[str], Optional[str]]:
jwt_payload = self.get_jwt_payload()
if not jwt_payload:
logging.info("JWT payload is missing.")
return False, None, None
if not jwt_payload.all_fields_present():
logging.info("JWT payload is missing a field.")
return False, None, None
if jwt_payload.is_expired():
refresh_payload = self.get_refresh_payload()
if (
refresh_payload
and refresh_payload.all_fields_present()
and not refresh_payload.is_expired()
):
new_jwt = self.jwt.extend_jwt_token(jwt_payload)
return (
True,
new_jwt,
self.refresh_header,
)
return False, None, None
return True, self.auth_header, self.refresh_header
def is_admin(self) -> bool:
payload = self.get_jwt_payload()
return ADMIN_AUTHORITY in payload.authorities if payload else False
def get_jwt_payload(self) -> Optional[JwtPayload]:
return self.jwt.decode_jwt_token(self.auth_header)
def get_refresh_payload(self) -> Optional[JwtPayload]:
return self.jwt.decode_refresh_token(self.refresh_header)
@property
def event(self) -> Munch:
return Munch.fromDict(self._event)
|
nilq/baby-python
|
python
|
import choraconfig, re, sys, os.path
def master_theorem_bounds_callout(params) :
if "logpath" not in params :
print "ERROR: duet_bounds_callout was called without a path"
sys.exit(0)
#output = ""
with open(params["logpath"],"rb") as logfile : output = logfile.read().strip()
return output
# really should have a tool root
tool = choraconfig.get_default_tool_dict()
tool["displayname"] = "Master Theorem"
tool["shortname"] = "master"
tool["root"] = choraconfig.benchroot + "rba/master-theorem"
tool["cmd"] = ["python",os.path.join(tool["root"],"mastertheorem.py"),"{filename}"]
tool["bounds_callout"] = master_theorem_bounds_callout
tool["no_assert_line_numbers"] = True
tool["error_callout"] = choraconfig.generic_error_callout
|
nilq/baby-python
|
python
|
import numpy as np
class Territory:
def __init__(self,name,adjacent_territories,occupying_player=None,troops=None):
self.name = name
self.adjacent_territories = adjacent_territories
self.occupying_player = occupying_player
self.troops = troops
def __str__(self):
return str(self.__class__) + ": " + str(self.__dict__)
def get_bsr(self,game):
get = getattr(game,'get_territory')
self.bsr = sum([len(get(territory).troops) for territory in self.adjacent_territories if get(territory).troops]) / len(self.troops)
return self.bsr
def get_nbsr(self,game):
self.nbsr = self.get_bsr(game) / sum([trt.get_bsr(game) for trt in self.occupying_player.territories])
return self.nbsr
def json(self):
return {
"name":self.name,
"adjacent_territories":self.adjacent_territories,
"occupying_player":self.occupying_player.id if self.occupying_player else None,
"troops": [troop.json() for troop in self.troops] if self.occupying_player else None
}
def print(self):
print("Territory",self.name," occupied by player",self.occupying_player.id,
"with",len(self.troops),"troops")
|
nilq/baby-python
|
python
|
# This file will contain the entry point where you load the data and init the variables
|
nilq/baby-python
|
python
|
from mars_profiling.report.presentation.core.collapse import Collapse
from mars_profiling.report.presentation.core.container import Container
from mars_profiling.report.presentation.core.duplicate import Duplicate
from mars_profiling.report.presentation.core.frequency_table import FrequencyTable
from mars_profiling.report.presentation.core.frequency_table_small import (
FrequencyTableSmall,
)
from mars_profiling.report.presentation.core.html import HTML
from mars_profiling.report.presentation.core.image import Image
from mars_profiling.report.presentation.core.root import Root
from mars_profiling.report.presentation.core.sample import Sample
from mars_profiling.report.presentation.core.table import Table
from mars_profiling.report.presentation.core.toggle_button import ToggleButton
from mars_profiling.report.presentation.core.variable import Variable
from mars_profiling.report.presentation.core.variable_info import VariableInfo
from mars_profiling.report.presentation.core.warnings import Warnings
|
nilq/baby-python
|
python
|
from django.urls import path, include
from users.api.loginviews import LoginAPI
urlpatterns = [
path('', LoginAPI.as_view())
]
|
nilq/baby-python
|
python
|
def corrupt_part_data_on_disk(node, table, part_name):
part_path = node.query(
"SELECT path FROM system.parts WHERE table = '{}' and name = '{}'".format(
table, part_name
)
).strip()
corrupt_part_data_by_path(node, part_path)
def corrupt_part_data_by_path(node, part_path):
print("Corrupting part", part_path, "at", node.name)
print(
"Will corrupt: ",
node.exec_in_container(
["bash", "-c", "cd {p} && ls *.bin | head -n 1".format(p=part_path)]
),
)
node.exec_in_container(
[
"bash",
"-c",
"cd {p} && ls *.bin | head -n 1 | xargs -I{{}} sh -c 'echo \"1\" >> $1' -- {{}}".format(
p=part_path
),
],
privileged=True,
)
|
nilq/baby-python
|
python
|
#!/usr/bin/python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unitests for wbo2.py"""
import unittest
from absl.testing import absltest
import config_lib
import mock
import serial
import wbo2
TEST_FRAME = (b'Z\xa5\x08\x0c\xf8\x0f\xff \x00\x020\x01`\x03\xd0\x00\x15\x00'
b'\x1a\x00 \x01\xa4\x00\x00\x03\x00i')
class TestWBO2(unittest.TestCase):
"""WBO2 unittests."""
def setUp(self):
super().setUp()
self.start = 0
def MockRead(self, size=1):
data = TEST_FRAME[5:] + TEST_FRAME
output = data[self.start:self.start + size]
self.start += size
return output
def testFindFrameStart(self):
mock_serial = mock.create_autospec(serial.Serial)
mock_serial.read.side_effect = self.MockRead
self.assertEqual(TEST_FRAME, wbo2.FindFrameStart(mock_serial))
def testCheckFrame(self):
self.assertTrue(wbo2.CheckFrame(TEST_FRAME))
self.assertFalse(wbo2.CheckFrame(TEST_FRAME[:-1] + b'0x02'))
def testReadSerial(self):
mock_serial = mock.create_autospec(serial.Serial)
mock_serial.read.side_effect = self.MockRead
for frame in wbo2.ReadSerial(mock_serial):
self.assertEqual(TEST_FRAME, frame)
break
def testGetBytes(self):
self.assertEqual(0.5962854349951124, wbo2.GetBytes(TEST_FRAME, 'user_3'))
self.assertEqual(0.0010162306553235967,
wbo2.GetBytes(TEST_FRAME, 'thermocouple_1'))
self.assertEqual(3320, wbo2.GetBytes(TEST_FRAME, 'tick'))
self.assertEqual(0, wbo2.GetBytes(TEST_FRAME, 'rpm_count'))
self.assertEqual(14.69820556640625, wbo2.GetBytes(TEST_FRAME, 'lambda_16'))
def testLambda16ToAFR(self):
lambda_16 = int.from_bytes(b'\x0f\xff', 'big')
self.assertEqual(14.69820556640625, wbo2.Lambda16ToAFR(lambda_16))
def testRPMCountToRPM(self):
self.assertEqual(6000, wbo2.RPMCountToRPM(1000, 4))
self.assertEqual(4000, wbo2.RPMCountToRPM(1000, 6))
def testGetUser3(self):
self.assertEqual(0.5962854349951124,
wbo2.GetBytes(TEST_FRAME, 'user_3'))
def testAddConfigValues(self):
config = config_lib.LoadConfig('etc/corrado.yaml')
interface = wbo2.WBO2(config, start_process=False)
keys = {'afr': None, 'rpm': None, 'tps_voltage': None}.keys()
self.assertEqual(keys, interface.values.keys())
if __name__ == '__main__':
absltest.main()
|
nilq/baby-python
|
python
|
import numpy as np
import pandas as pd
import bbi
import pysam
##only mappabilty_by_idx called from top level
def load_chromsizes(f_bw):
chroms = bbi.chromsizes(f_bw)
chroms.pop('chrM')
chroms.pop('chrX')
chroms.pop('chrY')
return chroms
def mappability_by_window(f_mapp, window, overlap=0):
chroms = load_chromsizes(f_mapp)
mapp_lst = []
for chr_id, chr_size in chroms.items():
print(chr_id, end=' ')
i = 0
while i + window < chr_size:
# print(i)
mapp = bbi.fetch(f_mapp, chr_id, i, i + window, bins=1)[0]
mapp_lst.append([chr_id, i, i+window, mapp])
i += window - overlap
return pd.DataFrame(np.array(mapp_lst),
columns=['CHROM', 'START', 'END', 'MAPP'])
def mappability_by_idx(f_mapp, idx):
mapp_lst = []
chr_prev = ''
for row in idx:
chr_id = 'chr{}'.format(row[0])
start = row[1]
end = row[2]
if chr_id != chr_prev:
print(chr_id)
mapp = bbi.fetch(f_mapp, chr_id, start, end, bins=1)[0]
mapp_lst.append([row[0], start, end, mapp])
chr_prev = chr_id
return mapp_lst
def P_bases_by_window(f_fasta, window, overlap=0):
fasta = pysam.FastaFile(f_fasta)
sizes = fasta.lengths
chroms = fasta.references
mapp_lst = []
for chr_id, chr_size in zip(chroms, sizes):
print(chr_id, end=' ')
i = 0
while i + window < chr_size:
seq = fasta.fetch(chr_id, i, i + window)
mapp = seq.count('P') / window
mapp_lst.append([chr_id, i, i+window, mapp])
i += window - overlap
return pd.DataFrame(np.array(mapp_lst),
columns=['CHROM', 'START', 'END', 'MAPP'])
|
nilq/baby-python
|
python
|
import unittest
import io
from contextlib import redirect_stdout
from rdflib import Namespace, Graph
from sparqlslurper import SlurpyGraph
from sparqlslurper._graphdb_slurpygraph import GraphDBSlurpyGraph
endpoint = 'https://graph.fhircat.org/repositories/fhirontology'
class SparqlParametersTestCase(unittest.TestCase):
def test_parms(self):
""" Show how to pass a parameter to a wrapper instance
This test assumes a GraphDB SPARQL endpoint
loaded with the fhir.ttl w/ the inference option on.
We are testing that the parameter makes it through and
changes the behavior of the server.
Note that a copy of fhir.ttl can be found in tests/data.
"""
FHIR = Namespace("http://hl7.org/fhir/")
g = GraphDBSlurpyGraph(endpoint)
self.assertLess(85, len(list(g.predicate_objects(FHIR.Patient))))
g = GraphDBSlurpyGraph(endpoint)
g.sparql.addParameter("infer", "false")
self.assertGreater(60, len(list(g.predicate_objects(FHIR.Patient))))
g = GraphDBSlurpyGraph(endpoint + '?infer=false')
self.assertGreater(60, len(list(g.predicate_objects(FHIR.Patient))))
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
import pathlib
import subprocess
import signal
import time
import os
import sys
import argparse
def main():
parser = argparse.ArgumentParser(prog="run-snet-services")
parser.add_argument("--daemon-config-path", help="Path to daemon configuration file", required=False)
args = parser.parse_args(sys.argv[1:])
root_path = pathlib.Path(__file__).absolute().parent
all_p = [start_snetd(root_path, args.daemon_config_path), start_service(root_path)]
# Continuous checking all subprocess
while True:
for p in all_p:
p.poll()
if p.returncode and p.returncode != 0:
kill_and_exit(all_p)
time.sleep(1)
def start_snetd(cwd, daemon_config_path=None):
cmd = ["snetd", "serve"]
if daemon_config_path is not None:
cmd.extend(["--config", daemon_config_path])
return subprocess.Popen(cmd, cwd=cwd)
def start_service(cwd):
return subprocess.Popen(["python3.6", "-m", "services.summary_server"], cwd=cwd)
def kill_and_exit(all_p):
"""
Kills main, service and daemon's processes if one fails.
"""
for p in all_p:
try:
os.kill(p.pid, signal.SIGTERM)
except Exception as e:
print(e)
exit(1)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from django.urls import path
from.import views
urlpatterns = [
path('index',views.index,name='Iniciowarehouse')
]
|
nilq/baby-python
|
python
|
# coding=utf-8
from collections import namedtuple
CamouflageInfo = namedtuple('CamouflageInfo', ['id', 'schemeId'])
|
nilq/baby-python
|
python
|
lua_1 = """
local k = 1/math.sqrt(0.05)
local val = tonumber(ARGV[1])
local old_vals = redis.call('get',KEYS[1])
local new_vals = {}
if (old_vals) then
old_vals = cjson.decode(old_vals)
new_vals["count_1"] = old_vals['count_1'] + 1
local delta = val - old_vals["mean_1"]
new_vals["mean_1"] = old_vals["mean_1"] + delta / new_vals["count_1"]
new_vals["M2_1"] = old_vals["M2_1"] + delta * (val - new_vals["mean_1"])
new_vals["variance_1"] = new_vals["M2_1"] / new_vals["count_1"]
local std = math.sqrt(new_vals["variance_1"])
new_vals["ODV1L"] = new_vals["mean_1"] - k * std
new_vals["ODV1U"] = new_vals["mean_1"] + k * std
if (val <= new_vals["ODV1U"] and val >= new_vals["ODV1L"]) then
new_vals["count_2"] = old_vals['count_2'] + 1
delta = val - old_vals["mean_2"]
new_vals["mean_2"] = old_vals["mean_2"] + delta / new_vals["count_2"]
new_vals["M2_2"] = old_vals["M2_2"] + delta * (val - new_vals["mean_2"])
new_vals["variance_2"] = new_vals["M2_2"] / new_vals["count_2"]
std = math.sqrt(new_vals["variance_2"])
new_vals["ODV2L"] = new_vals["mean_2"] - k * std
new_vals["ODV2U"] = new_vals["mean_2"] + k * std
else
new_vals["count_2"] = old_vals['count_2']
new_vals["mean_2"] = old_vals["mean_2"]
new_vals["M2_2"] = old_vals["M2_2"]
new_vals["variance_2"] = old_vals["variance_2"]
new_vals["ODV2L"] = old_vals["ODV2L"]
new_vals["ODV2U"] = old_vals["ODV2U"]
end
else
new_vals["count_1"] = 1
new_vals["mean_1"] = val
new_vals["M2_1"] = 0
new_vals["variance_1"] = 0
new_vals["ODV1L"] = val
new_vals["ODV1U"] = val
new_vals["count_2"] = 1
new_vals["mean_2"] = val
new_vals["M2_2"] = 0
new_vals["variance_2"] = 0
new_vals["ODV2L"] = val
new_vals["ODV2U"] = val
end
redis.call('set', KEYS[1], cjson.encode(new_vals))
"""
lua_2 = """
local val = tonumber(ARGV[1])
local dt = tostring(ARGV[2])
local month = tostring(ARGV[3])
local hour = tostring(ARGV[4])
local old_vals = redis.call('get',KEYS[1])
local new_vals = {}
if (old_vals) then
old_vals = cjson.decode(old_vals)
new_vals = old_vals
if(old_vals["count_" .. dt]) then
new_vals["count_" .. dt] = old_vals["count_" .. dt] + 1
new_vals["sum_" .. dt] = old_vals["sum_" .. dt] + val
else
new_vals["count_" .. dt] = 1
new_vals["sum_" .. dt] = val
end
if (old_vals["count_" .. dt .. '_' .. hour]) then
new_vals["count_" .. dt .. '_' .. hour] = old_vals["count_" .. dt .. '_' .. hour] + 1
new_vals["sum_" .. dt .. '_' .. hour] = old_vals["sum_" .. dt .. '_' .. hour] + val
else
new_vals["count_" .. dt .. '_' .. hour] = 1
new_vals["sum_" .. dt .. '_' .. hour] = val
end
else
new_vals["count_" .. dt .. '_' .. hour] = 1
new_vals["sum_" .. dt .. '_' .. hour] = val
new_vals["count_" .. dt] = 1
new_vals["sum_" .. dt] = val
end
redis.call('set', KEYS[1], cjson.encode(new_vals))
"""
|
nilq/baby-python
|
python
|
import time
from django.core.management.base import BaseCommand
from django.db import transaction
import database_locks
class Command(BaseCommand):
help = 'Lock it!'
def add_arguments(self, parser):
parser.add_argument('lock_name', help='lock name to be used')
parser.add_argument(
'-o',
'--owner',
help='Owner to be registered with the lock (used to renew and persist lock - hostname is default)',
)
parser.add_argument(
'-d', '--duration', default=10, help='Lock duration (in seconds)'
)
def handle(self, *args, **options):
with database_locks.lock(options['lock_name'], locked_by=options['owner']):
self.stdout.write(f'Got the lock, sleeping {options["duration"]} seconds')
time.sleep(options["duration"])
self.stdout.write(f'Releasing lock')
|
nilq/baby-python
|
python
|
import logging.config
import uvicorn
from fastapi import FastAPI, Request, status
from fastapi.encoders import jsonable_encoder
from dotenv import load_dotenv
from fastapi.responses import JSONResponse, PlainTextResponse
from starlette.exceptions import HTTPException as StarletteHTTPException
from fastapi.exceptions import RequestValidationError
from layer_view import view
from config.settings import config_basic
logging.config.dictConfig(config_basic)
logger = logging.getLogger(__name__)
load_dotenv()
app = FastAPI()
app.include_router(view.router)
@app.exception_handler(StarletteHTTPException)
async def http_exception_handler(request, exc):
return PlainTextResponse(str(exc.detail), status_code=exc.status_code)
@app.exception_handler(RequestValidationError)
async def validation_exception_handler(request: Request, exc: RequestValidationError):
return JSONResponse(
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
content=jsonable_encoder({"detail": exc.errors(), "body": exc.body}),
)
@app.exception_handler(Exception)
async def exception_general_handler(request: Request, exc: Exception):
return JSONResponse(
status_code=418,
content={"message": f"Nope wrong Ask"},
)
if __name__ == "__main__":
uvicorn.run(
"app:app", host="0.0.0.0", port=5001, reload=True
)
|
nilq/baby-python
|
python
|
import io
import nextcord
async def send_code_block_maybe_as_file(ctx, text):
"""
Sends a code block to the current context.
If it's too long to fit in a single message, it will
instead be sent as a file.
"""
if len(text) > 2000:
file = io.StringIO()
file.writelines(text)
file.seek(0)
await ctx.send(file=nextcord.File(file, filename="agenda.md"))
else:
await ctx.send(f"```{text}```")
|
nilq/baby-python
|
python
|
import itertools
import binascii
def detect_ecb(s,klen):
blocks = [s[i:i+klen] for i in range(0,len(s),klen)]
pairs = itertools.combinations(blocks,2)
score = 0
for p in pairs:
if p[0] == p[1]:
score += 1
return score > 0
def main():
f = open('8.txt', 'r')
data = f.read()
lines = data.split('\n')
for i,l in enumerate(lines):
if detect_ecb(binascii.unhexlify(l), 16):
print("Possible AES ECB mode on line: " + str(i))
main()
|
nilq/baby-python
|
python
|
# Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A minimal interface mlp module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import xrange # pylint: disable=redefined-builtin
from sonnet.python.modules import base
from sonnet.python.modules import basic
from sonnet.python.modules import util
import tensorflow as tf
class MLP(base.AbstractModule, base.Transposable):
"""A Multi-Layer perceptron module."""
def __init__(self,
output_sizes,
activation=tf.nn.relu,
activate_final=False,
initializers=None,
partitioners=None,
regularizers=None,
use_bias=True,
name="mlp"):
"""Constructs an MLP module.
Args:
output_sizes: An iterable of output dimensionalities as defined in
`basic.Linear`. Output size can be defined either as number or via a
callable. In the latter case, since the function invocation is deferred
to graph construction time, the user must only ensure that entries can
be called when build is called. Each entry in the iterable defines
properties in the corresponding linear layer.
activation: An activation op. The activation is applied to intermediate
layers, and optionally to the output of the final layer.
activate_final: Boolean determining if the activation is applied to
the output of the final layer. Default `False`.
initializers: Optional dict containing ops to initialize the linear
layers' weights (with key 'w') or biases (with key 'b').
partitioners: Optional dict containing partitioners to partition the
linear layers' weights (with key 'w') or biases (with key 'b').
regularizers: Optional dict containing regularizers for the linear layers'
weights (with key 'w') and the biases (with key 'b'). As a default, no
regularizers are used. A regularizer should be a function that takes
a single `Tensor` as an input and returns a scalar `Tensor` output, e.g.
the L1 and L2 regularizers in `tf.contrib.layers`.
use_bias: Whether to include bias parameters in the linear layers.
Default `True`.
name: Name of the module.
Raises:
KeyError: If initializers contains any keys other than 'w' or 'b'.
KeyError: If regularizers contains any keys other than 'w' or 'b'.
ValueError: If output_sizes is empty.
TypeError: If `activation` is not callable; or if `output_sizes` is not
iterable.
"""
super(MLP, self).__init__(name=name)
if not isinstance(output_sizes, collections.Iterable):
raise TypeError("output_sizes must be iterable")
output_sizes = tuple(output_sizes)
if not output_sizes:
raise ValueError("output_sizes must not be empty")
self._output_sizes = output_sizes
self._num_layers = len(self._output_sizes)
self._input_shape = None
self.possible_keys = self.get_possible_initializer_keys(use_bias=use_bias)
self._initializers = util.check_initializers(
initializers, self.possible_keys)
self._partitioners = util.check_partitioners(
partitioners, self.possible_keys)
self._regularizers = util.check_regularizers(
regularizers, self.possible_keys)
if not callable(activation):
raise TypeError("Input 'activation' must be callable")
self._activation = activation
self._activate_final = activate_final
self._use_bias = use_bias
self._instantiate_layers()
def _instantiate_layers(self):
"""Instantiates all the linear modules used in the network.
Layers are instantiated in the constructor, as opposed to the build
function, because MLP implements the Transposable interface, and the
transpose function can be called before the module is actually connected
to the graph and build is called.
Notice that this is safe since layers in the transposed module are
instantiated using a lambda returning input_size of the mlp layers, and
this doesn't have to return sensible values until the original module is
connected to the graph.
"""
with self._enter_variable_scope():
self._layers = [basic.Linear(self._output_sizes[i],
name="linear_{}".format(i),
initializers=self._initializers,
partitioners=self._partitioners,
regularizers=self._regularizers,
use_bias=self.use_bias)
for i in xrange(self._num_layers)]
@classmethod
def get_possible_initializer_keys(cls, use_bias=True):
return basic.Linear.get_possible_initializer_keys(use_bias=use_bias)
def _build(self, inputs):
"""Assembles the `MLP` and connects it to the graph.
Args:
inputs: A 2D Tensor of size `[batch_size, input_size]`.
Returns:
A 2D Tensor of size `[batch_size, output_sizes[-1]]`.
"""
self._input_shape = tuple(inputs.get_shape().as_list())
net = inputs
final_index = self._num_layers - 1
for layer_id in xrange(self._num_layers):
net = self._layers[layer_id](net)
if final_index != layer_id or self._activate_final:
net = self._activation(net)
return net
@property
def layers(self):
"""Returns a tuple containing the linear layers of the `MLP`."""
return self._layers
@property
def output_sizes(self):
return tuple([l() if callable(l) else l for l in self._output_sizes])
@property
def use_bias(self):
return self._use_bias
@property
def initializers(self):
"""Returns the intializers dictionary."""
return self._initializers
@property
def partitioners(self):
"""Returns the partitioners dictionary."""
return self._partitioners
@property
def regularizers(self):
"""Returns the regularizers dictionary."""
return self._regularizers
@property
def activation(self):
return self._activation
@property
def activate_final(self):
return self._activate_final
# Implements Transposable interface
@property
def input_shape(self):
"""Returns shape of input `Tensor` passed at last call to `build`."""
self._ensure_is_connected()
return self._input_shape
# Implements Transposable interface
def transpose(self, name=None, activate_final=None):
"""Returns transposed `MLP`.
Args:
name: Optional string specifiying the name of the transposed module. The
default name is constructed by appending "_transpose"
to `self.module_name`.
activate_final: Optional boolean determining if the activation and batch
normalization, if turned on, are applied to the final layer.
Returns:
Matching transposed `MLP` module.
"""
if name is None:
name = self.module_name + "_transpose"
if activate_final is None:
activate_final = self.activate_final
output_sizes = [lambda l=layer: l.input_shape[1] for layer in self._layers]
output_sizes.reverse()
return MLP(name=name,
output_sizes=output_sizes,
activation=self.activation,
activate_final=activate_final,
initializers=self.initializers,
partitioners=self.partitioners,
regularizers=self.regularizers,
use_bias=self.use_bias)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from collections import Counter, defaultdict
def load_fish(file):
with open(file) as f:
fish = f.read().strip()
fish = fish.split(",")
fish = [int(i) for i in fish]
return fish
def get_num_fish(fish, days):
counts = Counter(fish)
while days > 0:
new_counts = defaultdict(lambda: 0)
for k, v in counts.items():
if k == 0:
new_counts[6] += v
new_counts[8] += v
else:
new_counts[k - 1] += v
counts = new_counts
days -= 1
num_fish = sum(counts.values())
return num_fish
if __name__ == "__main__":
fish = load_fish("../data/06_input.txt")
part_one = get_num_fish(fish, 80)
print(part_one)
part_two = get_num_fish(fish, 256)
print(part_two)
|
nilq/baby-python
|
python
|
import chainer
import chainer.functions as F
import chainer.links as L
import inspect
import ast, gast
import itertools
from contextlib import ExitStack
from chainer_compiler.elichika.parser import config
from chainer_compiler.elichika.parser import nodes
from chainer_compiler.elichika.parser import values
from chainer_compiler.elichika.parser import functions
from chainer_compiler.elichika.parser import utils
from chainer_compiler.elichika.parser.graphs import Graph
from chainer_compiler.elichika.parser import veval_bin
from chainer_compiler.elichika.parser import veval_unary
from chainer_compiler.elichika.parser import veval_multiary
from chainer_compiler.elichika.parser import veval_aug_assign
import numpy as np
def get_ast_name_forcibly(ast):
if isinstance(ast, gast.gast.Name):
return ast.id
if isinstance(ast, gast.gast.Attribute):
return ast.attr
if isinstance(ast, str):
return ast
return ''
def return_value_or_obj(obj : 'value.Object'):
if isinstance(obj.get_value(), values.NumberValue):
return values.Object(obj.get_value())
if isinstance(obj.get_value(), values.StrValue):
return values.Object(obj.get_value())
if isinstance(obj.get_value(), values.BoolValue):
return values.Object(obj.get_value())
if isinstance(obj.get_value(), values.NoneValue):
return values.Object(obj.get_value())
if isinstance(obj.get_value(), values.TupleValue):
return values.Object(obj.get_value())
return obj
class AstContext:
def __init__(self, nast, lineno_offset : 'int', filename : 'str' = '' ):
self.nast = nast
self.lineno_offset = lineno_offset
self.lineno = self.lineno_offset
self.filename = filename
if hasattr(self.nast, 'lineno'):
self.lineno = self.nast.lineno + self.lineno_offset
def c(self, value) -> 'AstContext':
"""
get AstContext including value
"""
return AstContext(value, self.lineno_offset, filename=self.filename)
def veval_ast_attribute(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None) -> 'Attribute':
assert(isinstance(astc.nast, gast.gast.Attribute))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
from_module = True
if context is not None and context._eval_as_written_target:
from_module = False
value = veval_ast(astc.c(astc.nast.value), local_field, graph, context)
value_ref = utils.try_get_obj(value, 'attribute', lineprop)
if(value_ref is None):
utils.print_warning('Unknown or disabled attribute "{}" is accessed'.format(get_ast_name_forcibly(astc.nast.value)), lineprop)
return None
attr = value_ref.get_field().get_attribute(astc.nast.attr, graph.root_graph, False)
# property(getter)
if attr.has_obj() and isinstance(attr.get_obj().get_value(), values.FuncValue) and attr.get_obj().get_value().func.is_property:
func_value = attr.get_obj().get_value()
ret = func_value.func.vcall(func_value.module, graph, func_value.obj, functions.FunctionArgInput(), context, lineprop)
return ret
if attr.has_obj():
return attr
# if attr is not found
gotten_obj = value_ref.try_get_and_store_obj(astc.nast.attr, graph.root_graph)
if gotten_obj is not None:
return value_ref.get_field().get_attribute(astc.nast.attr, graph.root_graph, from_module)
if context is not None and context._eval_as_written_target:
return attr
# value is unknown
if value is None:
utils.print_warning('Assigning value {} is not found'.format(get_ast_name_forcibly(astc.nast.value)), lineprop)
else:
utils.print_warning('Assigning value {} is not found'.format(get_ast_name_forcibly(astc.nast.attr)), lineprop)
return None
def veval_ast_assign(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.Assign))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
value = veval_ast(astc.c(astc.nast.value), local_field, graph, context)
value_obj = utils.try_get_obj(value, 'assign', lineprop)
if value is None:
if config.show_warnings:
print('It is possible that assiging value is invalid in L.{}'.format(astc.lineno))
return None
with context.eval_as_written_target():
targets = veval_ast(astc.c(astc.nast.targets[0]), local_field, graph, context)
if isinstance(targets, list):
# ex. a,b = (1,2)
if not isinstance(value_obj.get_value(), values.TupleValue):
# TODO fix it
assert(False) # not supported
for i in range(len(targets)):
assert(value_obj.get_value().get_constant_value() is not None)
node_assign = nodes.NodeAssign(targets[i], value_obj.get_value().get_constant_value()[i], astc.lineno)
targets[i].revise(utils.try_get_obj(value_obj.get_value().get_constant_value()[i],'assign', lineprop))
graph.add_node(node_assign)
else:
assigned_obj = return_value_or_obj(value_obj)
node_assign = nodes.NodeAssign(targets, assigned_obj, astc.lineno)
targets.revise(assigned_obj)
graph.add_node(node_assign)
def veval_ast_name(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None) -> 'Attribute':
assert(isinstance(astc.nast, gast.gast.Name))
from_module = True
if context is not None and context._eval_as_written_target:
from_module = False
ret = local_field.get_attribute(astc.nast.id, graph.root_graph, from_module=from_module)
return ret
def veval_ast_call(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None) -> 'Attribute':
assert(isinstance(astc.nast, gast.gast.Call))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
func = veval_ast(astc.c(astc.nast.func), local_field, graph, context)
if func == None or not func.has_obj():
utils.print_warning('Unknown function "{}" is called'.format(get_ast_name_forcibly(astc.nast.func)), lineprop)
return None
func_obj = utils.try_get_obj(func, 'call', lineprop)
func_value = utils.try_get_value(func, 'call', lineprop)
finput = functions.FunctionArgInput()
for arg in astc.nast.args:
arg_ = veval_ast(astc.c(arg), local_field, graph, context)
finput.inputs.append(utils.try_get_obj(arg_, 'call', lineprop))
for keyword in astc.nast.keywords:
arg_ = veval_ast(astc.c(keyword.value), local_field, graph, context)
finput.keywords[keyword.arg] = utils.try_get_obj(arg_, 'call', lineprop)
lineprop = utils.LineProperty(astc.lineno, astc.filename)
# check arguments
for o in finput.inputs:
if o is None:
utils.print_warning('Invalid arguments exists in "{}"'.format(get_ast_name_forcibly(astc.nast.func)), lineprop)
return None
ret = None
if isinstance(func_value, values.FuncValue):
ret = func_value.func.vcall(func_value.module, graph, func_value.obj, finput, context, lineprop)
return ret
elif isinstance(func_value, values.Instance):
# __call__
call_func_ref = func_obj.try_get_and_store_obj('__call__', graph.root_graph)
if call_func_ref is not None:
func_value = call_func_ref.get_value()
ret = func_value.func.vcall(func_value.module, graph, func_obj, finput, context, lineprop)
return ret
if config.show_warnings:
print('Unknown function is called in L.{}'.format(astc.lineno))
return None
def veval_ast_return(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None) -> 'None':
assert(isinstance(astc.nast, gast.gast.Return))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
value = veval_ast(astc.c(astc.nast.value), local_field, graph, context)
value_obj = utils.try_get_obj(value, 'return', lineprop)
value_value = utils.try_get_value(value, 'return', lineprop)
if value_value is None:
if config.show_warnings:
print('Returned values are not found. in L.{}'.format(astc.lineno))
return None
node = nodes.NodeReturn(value_value,astc.lineno)
graph.add_node(node)
return value
def veval_ast_if(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.If))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
# if condition
test = veval_ast(astc.c(astc.nast.test), local_field, graph, context)
test_value = utils.try_get_value(test, 'if', lineprop)
id_str = str(utils.get_guid())
if_id = 'if_' + id_str
true_id = 'true_' + id_str
false_id = 'false_' + id_str
# True
values.push_history(true_id)
true_graph = Graph()
true_graph.root_graph = graph.root_graph
true_graph.name = 'True'
# Skip true body when the test_value is decidably False.
if test_value.has_constant_value() and test_value.internal_value == False:
true_body = []
else:
true_body = veval_ast(astc.c(astc.nast.body), local_field, true_graph, context)
true_value_inputs = values.get_inputs()
true_value_outputs = values.get_outputs()
values.pop_history()
# False
values.push_history(false_id)
false_graph = Graph()
false_graph.root_graph = graph.root_graph
false_graph.name = 'False'
# Skip false body when the test_value is decidably True.
if test_value.has_constant_value() and test_value.internal_value == True:
false_body = []
else:
false_body = veval_ast(astc.c(astc.nast.orelse), local_field, false_graph, context)
false_value_inputs = values.get_inputs()
false_value_outputs = values.get_outputs()
values.pop_history()
# generate pairs
value_pairs = {}
for v in true_value_inputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['true_input_value'] = v.input_value
value_pairs[key]['true_input_body_value'] = v.value
value_pairs[key]['true_input_obj'] = v.obj
for v in true_value_outputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['true_output_body_value'] = v.value
value_pairs[key]['true_output_obj'] = v.obj
for v in false_value_inputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['false_input_value'] = v.input_value
value_pairs[key]['false_input_body_value'] = v.value
value_pairs[key]['false_input_obj'] = v.obj
for v in false_value_outputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['false_output_body_value'] = v.value
value_pairs[key]['false_output_obj'] = v.obj
inputs = []
outputs = []
def get_input_value(v) -> "values.Value":
if 'true_input_value' in v:
return v['true_input_value']
elif 'false_input_value' in v:
return v['false_input_value']
else:
return None
def get_body_input_value(v, input_value) -> "values.Value":
if v is None:
return (None, None)
true_input_body_value = None
false_input_body_value = None
if 'true_input_body_value' in v:
true_input_body_value = v['true_input_body_value']
else:
true_input_body_value = functions.generate_value_with_same_type(input_value)
if 'false_input_body_value' in v:
false_input_body_value = v['false_input_body_value']
else:
false_input_body_value = functions.generate_value_with_same_type(input_value)
return (true_input_body_value, false_input_body_value)
# collect inputs
input_2_body_inputs = {}
for k, v in value_pairs.items():
input_value = get_input_value(v)
if input_value is None:
continue
if not (input_value in input_2_body_inputs.keys()):
body_input_value = get_body_input_value(v, input_value)
input_2_body_inputs[input_value] = body_input_value
for k, v in input_2_body_inputs.items():
inputs.append(k)
true_graph.add_input_value(v[0])
false_graph.add_input_value(v[1])
for k, v in value_pairs.items():
name = v['name']
field = v['field']
input_value = get_input_value(v)
true_input_body_value = None
false_input_body_value = None
if input_value in input_2_body_inputs.keys():
true_input_body_value = input_2_body_inputs[input_value][0]
false_input_body_value = input_2_body_inputs[input_value][1]
true_output_body_value = None
false_output_body_value = None
output_value = None
# search output value
if 'true_output_body_value' in v:
true_output_body_value = v['true_output_body_value']
if 'false_output_body_value' in v:
false_output_body_value = v['false_output_body_value']
if true_output_body_value is not None or false_output_body_value is not None:
if true_output_body_value is None:
if true_input_body_value is not None:
# e.x. not changed
true_output_body_value = true_input_body_value
else:
# e.x. make a value in false statement
true_output_body_value = functions.generate_value_with_same_type(false_output_body_value, is_dummy_value=True)
if false_output_body_value is None:
if false_input_body_value is not None:
# e.x. not changed
false_output_body_value = false_input_body_value
else:
# e.x. make a value in true statement
false_output_body_value = functions.generate_value_with_same_type(true_output_body_value, is_dummy_value=True)
# check types between true and false
true_output_body_value_type = None
false_output_body_value_type = None
if true_output_body_value is not None and true_output_body_value.is_not_none_or_any_value():
true_output_body_value_type = true_output_body_value
if false_output_body_value is not None and false_output_body_value.is_not_none_or_any_value():
false_output_body_value_type = false_output_body_value
if true_output_body_value_type is not None and false_output_body_value_type is not None and type(true_output_body_value_type) != type(false_output_body_value_type):
utils.print_warning('Values with differenet type were generated {} between true ande false'.format(k), lineprop)
if true_output_body_value_type != None:
output_value = functions.generate_value_with_same_type(true_output_body_value_type)
elif false_output_body_value_type != None:
output_value = functions.generate_value_with_same_type(false_output_body_value_type)
elif true_output_body_value is not None:
output_value = functions.generate_value_with_same_type(true_output_body_value)
elif false_output_body_value is not None:
output_value = functions.generate_value_with_same_type(false_output_body_value)
if output_value is not None:
outputs.append(output_value)
true_graph.add_output_value(true_output_body_value)
false_graph.add_output_value(false_output_body_value)
if 'true_output_obj' in v and not 'false_output_obj' in v:
obj = v['true_output_obj']
elif not 'true_output_obj' in v and 'false_output_obj' in v:
obj = v['false_output_obj']
elif 'true_output_obj' in v and 'false_output_obj' in v:
obj = None
else:
assert(False)
if obj is not None:
obj.revise(output_value)
field.get_attribute(name).revise(obj)
elif field.get_attribute(name).has_obj():
field.get_attribute(name).get_obj().revise(output_value)
else:
field.get_attribute(name).revise(values.Object(output_value))
node = nodes.NodeIf(test_value, inputs, true_graph, false_graph, astc.lineno)
node.set_outputs(outputs)
graph.add_node(node)
return None
def veval_ast_aug_assign(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.AugAssign))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
target = veval_ast(astc.c(astc.nast.target), local_field, graph, context)
value = veval_ast(astc.c(astc.nast.value), local_field, graph, context)
target_value = utils.try_get_value(target, 'aug_assign', lineprop)
value_value = utils.try_get_value(value, 'aug_assign', lineprop)
binop = nodes.BinOpType.Unknown
if isinstance(astc.nast.op, gast.Add):
binop = nodes.BinOpType.Add
elif isinstance(astc.nast.op, gast.Sub):
binop = nodes.BinOpType.Sub
elif isinstance(astc.nast.op, gast.Mult):
binop = nodes.BinOpType.Mul
elif isinstance(astc.nast.op, gast.Div):
binop = nodes.BinOpType.Div
elif isinstance(astc.nast.op, gast.FloorDiv):
binop = nodes.BinOpType.FloorDiv
else:
utils.print_warning('Unknown binary operator {}'.format(astc.nast.op), lineprop)
return None
node_aug_assign = nodes.NodeAugAssign(target_value, value_value, binop, astc.lineno)
graph.add_node(node_aug_assign)
new_value = veval_aug_assign.veval(binop, target_value, value_value, lineprop)
node_aug_assign.set_outputs([new_value])
utils.try_get_obj(target, 'aug_assign', lineprop).revise(new_value)
def veval_ast_expr(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
call a function without not assigning
Ex. b.x()
'''
assert(isinstance(astc.nast, gast.gast.Expr))
return veval_ast(astc.c(astc.nast.value), local_field, graph, context)
def veval_ast_subscript(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
Ex. x[1], x[y,z]
'''
assert(isinstance(astc.nast, gast.gast.Subscript))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
def veval_with_default(nast, default_value):
if nast is None:
ret = values.NumberValue(default_value)
ret.name = '@SliceDefault'
return ret
obj = veval_ast(astc.c(nast), local_field, graph, context)
return utils.try_get_value(obj, 'subscript', lineprop)
def get_slice_indices(slice):
if slice.lower is None and slice.upper is None and slice.step is None:
return []
indices = [veval_with_default(slice.lower, 0),
veval_with_default(slice.upper, utils.slice_int_max)]
if slice.step is not None:
indices.append(veval_with_default(slice.step, 1))
return indices
value = veval_ast(astc.c(astc.nast.value), local_field, graph, context)
value_value = utils.try_get_value(value, 'subscript', lineprop)
if isinstance(value_value, values.DictValue):
if isinstance(astc.nast.slice, gast.gast.Index):
slice_ = veval_ast(astc.c(astc.nast.slice.value), local_field, graph, context)
slice_value = utils.try_get_value(slice_, 'subscript', lineprop)
value_value.internal_keys[slice_value.encode()] = slice_
ret = value_value.internal_values.get_attribute(slice_value.encode())
return ret
elif isinstance(value_value, values.Instance):
if isinstance(astc.nast.slice, gast.gast.Index):
slice_ = veval_ast(astc.c(astc.nast.slice.value), local_field, graph, context)
finput = functions.FunctionArgInput()
finput.inputs.append(slice_)
value_ref = utils.try_get_obj(value, 'subscript', lineprop)
getitem_func = value_ref.get_field().get_attribute('__getitem__', graph.root_graph, False)
getitem_func_value = getitem_func.get_obj().get_value()
ret = getitem_func_value.func.vcall(getitem_func_value.module, graph, getitem_func_value.obj, finput, context, lineprop)
return ret
elif isinstance(value_value, (values.ListValue, values.TupleValue, values.TensorValue)):
if isinstance(astc.nast.slice, gast.gast.Index):
slice_ = veval_ast(astc.c(astc.nast.slice.value), local_field, graph, context)
slice_value = utils.try_get_value(slice_, 'subscript', lineprop)
if isinstance(slice_value, values.TupleValue):
# ex. x[1,2]
if slice_value.has_constant_value():
values_ = [utils.try_get_value(x, 'subscript', lineprop) for x in slice_value.get_constant_value()]
node = nodes.NodeGetItem(value_value, values_, line=lineprop)
else:
if config.show_warnings:
print('This subscript is not supported. in L.{}'.format(astc.lineno))
node = nodes.NodeInvalid(line=lineprop)
else:
# ex. x[1]
node = nodes.NodeGetItem(value_value, [slice_value])
if isinstance(value_value, values.TensorValue):
ret_value = values.TensorValue()
else:
if value_value.vtype != None and issubclass(value_value.vtype, values.Instance):
assert value_value.has_constant_value()
assert slice_value.has_constant_value()
return value_value.internal_value[slice_value.internal_value]
elif value_value.vtype != None:
ret_value = value_value.vtype(None)
ret_value.dtype = value_value.dtype
else:
utils.print_warning("Unable to determine element type of {}. Using TensorValue as default.".format(value_value), lineprop)
ret_value = values.TensorValue()
node.set_outputs([ret_value])
graph.add_node(node)
if isinstance(value, values.Attribute):
ret_attr = value.make_subscript_attribute(slice_, graph)
ret_attr.revise(values.Object(ret_value), update_parent=False)
return ret_attr
else:
return values.Object(ret_value)
elif isinstance(astc.nast.slice, gast.gast.Slice):
indices = get_slice_indices(astc.nast.slice)
node = nodes.NodeSlice(value_value, indices, [len(indices)])
ret_value = functions.generate_value_with_same_type(value_value)
# for constant propagation, populate ret_value when possible
if value_value.has_constant_value():
if all([value.has_constant_value() for value in indices]):
start, end = (indice.internal_value for indice in indices[:2])
step = indices[2].internal_value if len(indices) == 3 else None
ret_value.internal_value = value_value.internal_value[start:end:step]
node.set_outputs([ret_value])
graph.add_node(node)
return values.Object(ret_value)
elif isinstance(astc.nast.slice, gast.gast.ExtSlice):
indices = []
slice_specs = []
for dim in astc.nast.slice.dims:
if isinstance(dim, gast.gast.Index):
indices.append(utils.try_get_value(veval_ast(astc.c(dim.value), local_field, graph, context), 'subscript', lineprop))
slice_specs.append(1)
elif isinstance(dim, gast.gast.Slice):
ni = get_slice_indices(dim)
indices.extend(ni)
slice_specs.append(len(ni))
else:
assert False, 'Unknown slice: %s in %s' % (dim, nast.slice)
node = nodes.NodeSlice(value_value, indices, slice_specs)
ret_value = functions.generate_value_with_same_type(value_value)
node.set_outputs([ret_value])
graph.add_node(node)
return values.Object(ret_value)
else:
utils.print_warning("Subscript not possible for type {}".format(type(value_value)))
return None
def veval_ast_listcomp(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
Ex. [x for x in xx]
[elt for target in iter]
'''
assert(isinstance(astc.nast, gast.gast.ListComp))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
listcomp_guid = str(utils.get_guid())
listcomp_id = 'listcomp_' + listcomp_guid
body_id = 'listcomp_body_' + listcomp_guid
internal_counter_id = '@internal/listcomp_counter_' + listcomp_guid
internal_list_id = '@internal/listcomp_list_' + listcomp_guid
internal_cond_id = '@internal/listcomp_cond_' + listcomp_guid
generator = astc.nast.generators[0]
iter_value = utils.try_get_value(veval_ast(astc.c(generator.iter), local_field, graph, context), 'generator', lineprop)
list_value = values.ListValue()
list_obj = values.Object(list_value)
node_generate_list = nodes.NodeGenerate('List', [], lineprop)
node_generate_list.set_outputs([list_value])
graph.add_node(node_generate_list)
# body
target_name = ''
if isinstance(generator.target, gast.gast.Name):
target_name = generator.target.id
else:
if config.show_warnings:
print('This for is not supported. in L.{}'.format(astc.lineno))
return None
counter_value = values.NumberValue(None)
counter_value.dtype = np.array(0).dtype
counter_value.name = internal_counter_id
cond_value = values.BoolValue(None)
cond_value.name = internal_cond_id
# set values with internal name
local_field.get_attribute(internal_list_id).revise(list_obj)
values.push_history(listcomp_id)
body_graph = Graph()
body_graph.root_graph = graph.root_graph
body_graph.name = 'Body_' + listcomp_guid
node_forgen = nodes.NodeForGenerator(counter_value, iter_value)
target_obj = iter_value.get_iterator()
if target_obj is None:
target_obj = values.Object(values.UnknownValue())
if config.show_warnings:
print('unknown iteratable type in L.{}'.format(lineprop))
target_value = target_obj.get_value()
node_forgen.set_outputs([target_obj.get_value()])
local_field.get_attribute(target_name).revise(target_obj)
body_graph.add_node(node_forgen)
elt = veval_ast(astc.c(astc.nast.elt), local_field, body_graph, context)
elt_obj = utils.try_get_obj(elt, 'listcomp', lineprop)
finput = functions.FunctionArgInput()
finput.inputs.append(elt_obj)
append_value = local_field.get_attribute(internal_list_id).get_obj().get_field().get_attribute('append').get_obj().get_value()
append_value.func.vcall(None, body_graph, local_field.get_attribute(internal_list_id).get_obj(), finput, context, lineprop)
value_inputs = values.get_inputs()
value_outputs = values.get_outputs()
values.pop_history()
inputs = []
outputs = []
# default input for subgraph's input
body_graph.add_input_value(counter_value)
body_graph.add_input_value(cond_value)
body_graph.add_input_value(iter_value)
# default output for subgraph's output
body_graph.add_output_value(cond_value)
body_graph.add_output_value(iter_value)
# default output
outputs.append(functions.generate_value_with_same_type(iter_value))
# generate pairs
value_pairs = {}
for v in value_inputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['input_value'] = v.input_value
value_pairs[key]['input_body_value'] = v.value
for v in value_outputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['output_body_value'] = v.value
value_pairs[key]['output_obj'] = v.obj
# remove iterator
removed_name = str(local_field.id) + '_' + target_value.name
del value_pairs[removed_name]
for k, v in value_pairs.items():
name = v['name']
field = v['field']
if 'input_body_value' in v:
inputs.append(v['input_value'])
body_graph.add_input_value(v['input_body_value'])
else:
temp_value1 = functions.generate_value_with_same_type(v['output_body_value'])
temp_value2 = functions.generate_value_with_same_type(v['output_body_value'])
inputs.append(temp_value1)
body_graph.add_input_value(temp_value2)
if 'output_body_value' in v:
body_graph.add_output_value(v['output_body_value'])
output_value = functions.generate_value_with_same_type(v['output_body_value'])
outputs.append(output_value)
if 'output_obj' in v:
obj = v['output_obj']
obj.revise(output_value)
field.get_attribute(name).revise(obj)
elif field.get_attribute(name).has_obj():
field.get_attribute(name).get_obj().revise(output_value)
else:
field.get_attribute(name).revise(values.Object(output_value))
else:
temp_value1 = v['input_body_value']
temp_value2 = functions.generate_value_with_same_type(v['input_body_value'])
body_graph.add_output_value(temp_value1)
outputs.append(temp_value2)
node = nodes.NodeListcomp(iter_value, inputs, body_graph, astc.lineno)
node.set_outputs(outputs)
graph.add_node(node)
return local_field.get_attribute(internal_list_id).get_obj()
def veval_ast_bin_op(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
"""
eval binary operation.
Ex. a + b, b // c, etc
"""
assert(isinstance(astc.nast, gast.gast.BinOp))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
left = veval_ast(astc.c(astc.nast.left), local_field, graph, context)
right = veval_ast(astc.c(astc.nast.right), local_field, graph, context)
left_value = utils.try_get_value(left, 'compare', lineprop)
right_value = utils.try_get_value(right, 'compare', lineprop)
binop = nodes.BinOpType.Unknown
if isinstance(astc.nast.op, gast.Add):
binop = nodes.BinOpType.Add
elif isinstance(astc.nast.op, gast.Sub):
binop = nodes.BinOpType.Sub
elif isinstance(astc.nast.op, gast.Mult):
binop = nodes.BinOpType.Mul
elif isinstance(astc.nast.op, gast.Div):
binop = nodes.BinOpType.Div
elif isinstance(astc.nast.op, gast.FloorDiv):
binop = nodes.BinOpType.FloorDiv
elif isinstance(astc.nast.op, gast.Mod):
binop = nodes.BinOpType.Mod
else:
utils.print_warning('Unknown binary operator {}'.format(astc.nast.op), lineprop)
return None
node_bin_op = nodes.NodeBinOp(left_value, right_value, binop, astc.lineno)
ret_value = veval_bin.veval(binop, left_value, right_value, lineprop)
node_bin_op.set_outputs([ret_value])
graph.add_node(node_bin_op)
return values.Object(ret_value)
def veval_ast_bool_op(astc : 'AstContext', local_field : 'values.Field', graph : 'graphs.Graph', context : 'functions.VEvalContext' = None):
"""
eval bool operations.
Ex. x and y
"""
assert(isinstance(astc.nast, gast.gast.BoolOp))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
multiaryop = nodes.MultiaryOpType.Unknown
if isinstance(astc.nast.op, gast.And):
multiaryop = nodes.MultiaryOpType.And
if isinstance(astc.nast.op, gast.Or):
multiaryop = nodes.MultiaryOpType.Or
values_list = [veval_ast(astc.c(value_), local_field, graph, context) for value_ in astc.nast.values]
values_list_value = [utils.try_get_value(value_, 'multiary', lineprop) for value_ in values_list]
node = nodes.NodeMultiaryOp(values_list_value, multiaryop)
ret_value = veval_multiary.veval(multiaryop, values_list_value)
node.set_outputs([ret_value])
graph.add_node(node)
return values.Object(ret_value)
def veval_ast_unary_op(astc : 'AstContext', local_field : 'values.Field', graph : 'graphs.Graph', context : 'functions.VEvalContext' = None):
"""
eval unary operation.
Ex. -xx
"""
assert(isinstance(astc.nast, gast.gast.UnaryOp))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
unaryop = nodes.UnaryOpType.Unknown
if isinstance(astc.nast.op, gast.UAdd):
unaryop = nodes.UnaryOpType.UAdd
if isinstance(astc.nast.op, gast.USub):
unaryop = nodes.UnaryOpType.USub
if isinstance(astc.nast.op, gast.Not):
unaryop = nodes.UnaryOpType.Not
operand = veval_ast(astc.c(astc.nast.operand), local_field, graph, context)
operand_value = utils.try_get_value(operand, 'unary', lineprop)
node = nodes.NodeUnaryOp(operand_value, unaryop)
ret_value = veval_unary.veval(unaryop, operand_value)
node.set_outputs([ret_value])
graph.add_node(node)
return values.Object(ret_value)
def veval_ast_compare(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
"""
eval Compare.
Ex. a >= b, a != b, a is b, etc
"""
assert(isinstance(astc.nast, gast.gast.Compare))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
left = veval_ast(astc.c(astc.nast.left), local_field, graph, context)
right = veval_ast(astc.c(astc.nast.comparators[0]), local_field, graph, context)
left_value = utils.try_get_value(left, 'compare', lineprop)
right_value = utils.try_get_value(right, 'compare', lineprop)
compare = nodes.CompareType.unknown
if isinstance(astc.nast.ops[0], gast.Eq):
compare = nodes.CompareType.Eq
if isinstance(astc.nast.ops[0], gast.NotEq):
compare = nodes.CompareType.NotEq
if isinstance(astc.nast.ops[0], gast.Is):
compare = nodes.CompareType.Is
if isinstance(astc.nast.ops[0], gast.IsNot):
compare = nodes.CompareType.IsNot
if isinstance(astc.nast.ops[0], gast.Gt):
compare = nodes.CompareType.Gt
if isinstance(astc.nast.ops[0], gast.GtE):
compare = nodes.CompareType.GtE
if isinstance(astc.nast.ops[0], gast.Lt):
compare = nodes.CompareType.Lt
if isinstance(astc.nast.ops[0], gast.LtE):
compare = nodes.CompareType.LtE
if isinstance(astc.nast.ops[0], gast.In):
compare = nodes.CompareType.In
if isinstance(astc.nast.ops[0], gast.NotIn):
compare = nodes.CompareType.NotIn
node_compare = nodes.NodeCompare(left_value, right_value, compare, astc.lineno)
# constant propagation when possible
default_value = None
if left_value.has_constant_value() and right_value.has_constant_value():
if isinstance(astc.nast.ops[0], gast.Eq):
default_value = left_value.internal_value == right_value.internal_value
if isinstance(astc.nast.ops[0], gast.NotEq):
default_value = left_value.internal_value != right_value.internal_value
if isinstance(astc.nast.ops[0], gast.Is):
default_value = left_value.internal_value is right_value.internal_value
if isinstance(astc.nast.ops[0], gast.IsNot):
default_value = left_value.internal_value is not right_value.internal_value
if isinstance(astc.nast.ops[0], gast.Gt):
default_value = left_value.internal_value > right_value.internal_value
if isinstance(astc.nast.ops[0], gast.GtE):
default_value = left_value.internal_value >= right_value.internal_value
if isinstance(astc.nast.ops[0], gast.Lt):
default_value = left_value.internal_value < right_value.internal_value
if isinstance(astc.nast.ops[0], gast.LtE):
default_value = left_value.internal_value <= right_value.internal_value
if isinstance(astc.nast.ops[0], gast.In):
default_value = left_value.internal_value in map(lambda ref: ref.get_value().internal_value, right_value.internal_value)
if isinstance(astc.nast.ops[0], gast.NotIn):
default_value = left_value.internal_value not in map(lambda ref: ref.get_value().internal_value, right_value.internal_value)
ret_value = values.BoolValue(default_value)
ret_value.name = '@{}'.format(lineprop)
node_compare.set_outputs([ret_value])
graph.add_node(node_compare)
return values.Object(ret_value)
def veval_ast_num(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
Ex. 1, 2, ...
'''
assert(isinstance(astc.nast, gast.gast.Num))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
value = values.NumberValue(astc.nast.n)
ret = values.Object(value)
name = values.create_ref_value_name_with_constant(ret)
ret.name = name
ret.get_value().name = name
return ret
def veval_ast_str(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
Ex. "str"
'''
assert(isinstance(astc.nast, gast.gast.Str))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
value = values.StrValue(astc.nast.s)
ret = values.Object(value)
name = values.create_ref_value_name_with_constant(ret)
ret.name = name
ret.get_value().name = name
return ret
def veval_ast_name_constant(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
Ex. True
'''
assert(isinstance(astc.nast, gast.gast.NameConstant))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
ret = None
if astc.nast.value == True:
ret = values.Object(values.BoolValue(True))
if astc.nast.value == False:
ret = values.Object(values.BoolValue(False))
if astc.nast.value is None:
ret = values.Object(values.NoneValue())
name = values.create_ref_value_name_with_constant(ret)
ret.name = name
ret.get_value().name = name
return ret
def veval_ast_tuple(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.Tuple))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
if context is not None and context._eval_as_written_target:
vs = []
for v in astc.nast.elts:
a_ = veval_ast(astc.c(v), local_field, graph, context=context)
vs.append(a_)
return vs
else:
vs_ref = []
vs = []
for v in astc.nast.elts:
a_ = veval_ast(astc.c(v), local_field, graph, context=context)
v_ = utils.try_get_obj(a_, 'tuple', lineprop)
if v_ is None:
utils.print_warning('Unknown tuple element {}'.format(v), lineprop)
return None
vs_ref.append(v_)
vs.append(v_.get_value())
v_.in_container = True
tuple_value = values.TupleValue(vs_ref)
node = nodes.NodeGenerate('Tuple', vs, line=lineprop)
node.set_outputs([tuple_value])
graph.add_node(node)
return values.Object(tuple_value)
def veval_ast_list(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.List))
'''
Ex. [],[x,y,z]
TODO : Initializer
'''
lineprop = utils.LineProperty(astc.lineno, astc.filename)
elts = []
for elt in astc.nast.elts:
elt_ = veval_ast(astc.c(elt), local_field, graph, context)
elt_obj = utils.try_get_obj(elt_,'list', lineprop)
elts.append(elt_obj)
node = nodes.NodeGenerate('List', [elt.get_value() for elt in elts], lineprop)
graph.add_node(node)
value = values.ListValue(elts)
node.set_outputs([value])
return values.Object(value)
def veval_ast_dict(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.Dict))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
keys = []
elts = []
for key, elt in zip(astc.nast.keys, astc.nast.values):
key_ = veval_ast(astc.c(key), local_field, graph, context)
elt_ = veval_ast(astc.c(elt), local_field, graph, context)
key_obj = utils.try_get_obj(key_, 'dict', lineprop)
elt_obj = utils.try_get_obj(elt_,'dict', lineprop)
keys.append(key_obj)
elts.append(return_value_or_obj(elt_obj))
value = values.DictValue(keys, elts)
return values.Object(value)
def veval_ast_for_unroll(astc : 'AstContext', target_name, iter_ : 'values.ListValue', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
for target in iter:
...
with unroll
'''
assert(isinstance(astc.nast, gast.gast.For))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
for element in iter_.get_constant_value():
local_field.get_attribute(target_name).revise(element)
veval_ast(astc.c(astc.nast.body), local_field, graph, context)
return None
def veval_ast_for(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
for target in iter:
...
'''
assert(isinstance(astc.nast, gast.gast.For))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
# for target in iter:
iter_ = veval_ast(astc.c(astc.nast.iter), local_field, graph, context)
input_iter_value = utils.try_get_value(iter_, 'for', lineprop)
body_iter_value = functions.generate_value_with_same_type(input_iter_value, suffix_type=functions.SuffixType.Input)
# get target name
target_name = ''
if isinstance(astc.nast.target, gast.gast.Name):
target_name = astc.nast.target.id
else:
if config.show_warnings:
print('This for is not supported. in L.{}'.format(astc.lineno))
return None
# unroll?
if isinstance(input_iter_value, values.ListValue) and input_iter_value.has_constant_value() and input_iter_value.dtype is None:
return veval_ast_for_unroll(astc, target_name, input_iter_value, local_field, graph, context)
for_guid = utils.get_guid()
for_id = 'for_' + str(for_guid)
body_id = 'body_' + str(for_guid)
values.push_history(for_id)
# body
body_graph = Graph()
body_graph.root_graph = graph.root_graph
body_graph.name = 'Body_' + str(for_guid)
# generate a node for input
node_input = nodes.NodeInput('input')
body_graph.add_node(node_input)
body_counter_value = values.NumberValue(None)
body_counter_value.dtype = np.array(0).dtype
body_counter_value.name = 'for_counter_' + str(for_guid)
body_cond_value = values.BoolValue(None)
body_cond_value.name = 'for_cond_' + str(for_guid)
# create a node to lookup a value from sequence
node_forgen = nodes.NodeForGenerator(body_counter_value, body_iter_value)
# generate iterator
target_obj = input_iter_value.get_iterator()
if target_obj is None:
target_obj = values.Object(values.UnknownValue())
if config.show_warnings:
print('unknown iteratable type in L.{}'.format(astc.lineno))
target_value = target_obj.get_value()
node_forgen.set_outputs([target_obj.get_value()])
target_attribute = local_field.get_attribute(target_name)
target_attribute.revise(target_obj)
body_graph.add_node(node_forgen)
# veval body
body = veval_ast(astc.c(astc.nast.body), local_field, body_graph, context)
value_inputs = values.get_inputs()
value_outputs = values.get_outputs()
break_attribute = local_field.get_attribute('#keepgoing')
if break_attribute.has_obj():
break_attribute_ref = break_attribute.get_obj()
break_attribute_value = break_attribute_ref.get_value()
else:
break_attribute_value = body_cond_value
values.pop_history()
inputs = []
outputs = []
node_input_outputs = []
# default input for subgraph's input
body_graph.add_input_value(body_counter_value)
body_graph.add_input_value(body_cond_value)
body_graph.add_input_value(body_iter_value)
# default output for subgraph's output
body_graph.add_output_value(break_attribute_value)
body_graph.add_output_value(body_iter_value)
# default output
outputs.append(functions.generate_value_with_same_type(input_iter_value))
# generate pairs
value_pairs = {}
for v in value_inputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['input_value'] = v.input_value
value_pairs[key]['input_body_value'] = v.value
for v in value_outputs:
key = str(v.field.id) + '_' + v.name
if not (key in value_pairs.keys()):
value_pairs[key] = {}
value_pairs[key]['field'] = v.field
value_pairs[key]['name'] = v.name
value_pairs[key]['output_body_value'] = v.value
value_pairs[key]['output_obj'] = v.obj
for k, v in value_pairs.items():
name = v['name']
field = v['field']
if 'input_body_value' in v:
inputs.append(v['input_value'])
body_graph.add_input_value(v['input_body_value'])
else:
temp_value1 = functions.generate_value_with_same_type(v['output_body_value'], is_dummy_value=True, suffix_type=functions.SuffixType.Dummy)
temp_value2 = functions.generate_value_with_same_type(v['output_body_value'], suffix_type=functions.SuffixType.Dummy)
inputs.append(temp_value1)
body_graph.add_input_value(temp_value2)
node_input_outputs.append(temp_value2)
if 'output_body_value' in v:
body_graph.add_output_value(v['output_body_value'])
output_value = functions.generate_value_with_same_type(v['output_body_value'])
outputs.append(output_value)
if 'output_obj' in v:
obj = v['output_obj']
obj.revise(output_value)
field.get_attribute(name).revise(obj)
elif field.get_attribute(name).has_obj():
field.get_attribute(name).get_obj().revise(output_value)
else:
field.get_attribute(name).revise(values.Object(output_value))
else:
temp_value1 = v['input_body_value']
temp_value2 = functions.generate_value_with_same_type(v['input_body_value'])
body_graph.add_output_value(temp_value1)
outputs.append(temp_value2)
node = nodes.NodeFor(input_iter_value, inputs, body_graph, body_cond_value, astc.lineno)
node.set_outputs(outputs)
node_input.set_outputs(node_input_outputs)
graph.add_node(node)
return None
def veval_ast_continue(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.Continue))
return None
def veval_ast_break(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.Break))
return None
def veval_ast_with(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.With))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
from_module = True
if context is not None and context._eval_as_written_target:
from_module = False
context.flags_cache.clear()
exit_attrs = []
for item in astc.nast.items:
item_ref = veval_ast(astc.c(item), local_field, graph, context)
exit_attr = item_ref.get_field().get_attribute('__exit__', graph.root_graph, from_module)
exit_attrs.append(exit_attr)
with ExitStack() as stack:
managers = [stack.enter_context(getattr(context, flag)(*args)) for flag, args in context.flags_cache]
if not context._ignore_branch:
veval_ast(astc.c(astc.nast.body), local_field, graph, context)
for attr in exit_attrs:
if attr.has_obj() and isinstance(attr.get_obj().get_value(), values.FuncValue):
func_value = attr.get_obj().get_value()
finput = functions.FunctionArgInput()
# Adding exception_type, exception_value & traceback dummy arguments (None)
finput.inputs.extend([values.Object(values.NoneValue())] * 3)
func_value.func.vcall(func_value.module, graph, func_value.obj, finput, context, lineprop)
def veval_ast_withitem(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.withitem))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
from_module = True
if context is not None and context._eval_as_written_target:
from_module = False
value = veval_ast(astc.c(astc.nast.context_expr), local_field, graph, context)
value_obj = utils.try_get_obj(value, 'withitem', lineprop)
enter_attr = value_obj.get_field().get_attribute('__enter__', graph.root_graph, from_module)
if enter_attr.has_obj() and isinstance(enter_attr.get_obj().get_value(), values.FuncValue):
func_value = enter_attr.get_obj().get_value()
value_obj = func_value.func.vcall(func_value.module, graph, func_value.obj, functions.FunctionArgInput(), context, lineprop)
value_obj = utils.try_get_obj(value_obj, 'withitem', lineprop)
if value is None:
if config.show_warnings:
print('It is possible that one of those withitem is invalid in L.{}'.format(astc.lineno))
return None
value_obj = return_value_or_obj(value_obj)
if astc.nast.optional_vars is not None:
with context.eval_as_written_target():
optional_vars = veval_ast(astc.c(astc.nast.optional_vars), local_field, graph, context)
node_assign = nodes.NodeAssign(optional_vars, value_obj, astc.lineno)
optional_vars.revise(value_obj)
graph.add_node(node_assign)
return value_obj
def veval_ast_lambda(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
'''
lambda x, y, z=2: ...
Note: kwonly_args are not supported
'''
assert(isinstance(astc.nast, gast.gast.Lambda))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
lambda_id = 'lambda_' + str(utils.get_guid())
values.push_history(lambda_id)
args = veval_ast(astc.c(astc.nast.args), local_field, graph, context)
func = functions.UserDefinedFunctionFromAst(astc, args, local_field)
values.pop_history()
return values.Object(values.FuncValue(func, None))
def veval_ast_arguments(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
assert(isinstance(astc.nast, gast.gast.arguments))
lineprop = utils.LineProperty(astc.lineno, astc.filename)
ret = functions.FunctionArgCollection()
argspec = inspect.FullArgSpec(astc.nast.args, astc.nast.vararg, astc.nast.kwarg,
astc.nast.defaults, astc.nast.kwonlyargs, astc.nast.kw_defaults, None)
assert not argspec.kwonlyargs, "Keyword only args are not supported"
assert not argspec.varargs, "Varaibale arguments *args is not supported"
assert not argspec.varkw, "Variable keywords **kwargs is not supported"
defaults = [veval_ast(astc.c(default), local_field, graph, context) for default in argspec.defaults]
arg_list = []
for k, v in itertools.zip_longest(reversed(argspec.args), defaults):
arg_list.append((k.id, v))
# reverse the list
for k, v in reversed(arg_list):
ret.add_arg(k, v)
return ret
def veval_ast(astc : 'AstContext', local_field : 'values.Field', graph : 'Graph', context : 'functions.VEvalContext' = None):
if context is None:
context = functions.VEvalContext()
if isinstance(astc.nast, list):
ret = None
for nast_ in astc.nast:
ret = veval_ast(AstContext(nast_, astc.lineno_offset, filename=astc.filename), local_field, graph, context)
if ret is not None:
break
return ret
elif isinstance(astc.nast, gast.gast.Assign):
veval_ast_assign(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.Attribute):
ret = veval_ast_attribute(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Call):
ret = veval_ast_call(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.BinOp):
ret = veval_ast_bin_op(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.UnaryOp):
ret = veval_ast_unary_op(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Compare):
ret = veval_ast_compare(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Return):
ret = veval_ast_return(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Name):
ret = veval_ast_name(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.AugAssign):
veval_ast_aug_assign(astc, local_field, graph, context)
elif isinstance(astc.nast, gast.gast.Expr):
veval_ast_expr(astc, local_field, graph, context)
elif isinstance(astc.nast, gast.gast.Subscript):
return veval_ast_subscript(astc, local_field, graph, context)
elif isinstance(astc.nast, gast.gast.ListComp):
return veval_ast_listcomp(astc, local_field, graph, context)
elif isinstance(astc.nast, gast.gast.If):
veval_ast_if(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.Num):
ret = veval_ast_num(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Str):
ret = veval_ast_str(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.NameConstant):
ret = veval_ast_name_constant(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Tuple):
ret = veval_ast_tuple(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.List):
ret = veval_ast_list(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.For):
veval_ast_for(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.Continue):
veval_ast_continue(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.Break):
veval_ast_break(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.BoolOp):
ret = veval_ast_bool_op(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.With):
veval_ast_with(astc, local_field, graph, context)
return None
elif isinstance(astc.nast, gast.gast.withitem):
ret = veval_ast_withitem(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Dict):
ret = veval_ast_dict(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.Lambda):
ret = veval_ast_lambda(astc, local_field, graph, context)
return ret
elif isinstance(astc.nast, gast.gast.arguments):
ret = veval_ast_arguments(astc, local_field, graph, context)
return ret
else:
if config.show_warnings:
print('Unknown ast is found : {} in L.{}'.format(type(astc.nast),astc.lineno))
|
nilq/baby-python
|
python
|
import insightconnect_plugin_runtime
from .schema import GetAlertsInput, GetAlertsOutput, Input, Output, Component
# Custom imports below
class GetAlerts(insightconnect_plugin_runtime.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='get_alerts',
description=Component.DESCRIPTION,
input=GetAlertsInput(),
output=GetAlertsOutput())
def run(self, params={}):
alerts_response = self.connection.client.get_alerts(since=params.get(Input.FROM_DATE))
alerts = alerts_response.get("items", [])
for i in range(999):
if not alerts_response.get("has_more"):
break
alerts_response = self.connection.client.get_alerts(key=alerts_response.get("pages", {}).get("nextKey"))
alerts.extend(alerts_response.get("items"))
for alert in alerts:
alert['severity'] = alert['severity'].upper()
return {
Output.ALERTS: alerts
}
|
nilq/baby-python
|
python
|
from pydantic import BaseModel
from typing import Optional
import typing as T
class Member(BaseModel):
user_id: int
nickname: str
card: T.Optional[str]
sex: str
age: int
area: str
level: str
role: T.Optional[str]
title: T.Optional[str]
# 以下是 getGroupMemberInfo 返回的更多结果
group_id: Optional[int]
join_time: Optional[int]
last_sent_time: Optional[int]
unfriendly: Optional[bool]
title_expire_time: Optional[int]
card_changeable: Optional[bool]
shut_up_timestamp: Optional[int]
class Anonymous(BaseModel):
id: int
name: str
flag: str
class Group(BaseModel):
group_id: int
group_name: str
group_memo: str
group_create_time: int
group_level: int
member_count: int
max_member_count: int
class HonorListNode(BaseModel):
user_id: int
nickname: str
avatar: str
description: Optional[str]
day_count: Optional[int]
class Honor(BaseModel):
group_id: int
current_talkative: Optional[HonorListNode]
talkative_list: Optional[T.List[HonorListNode]]
performer_list: Optional[T.List[HonorListNode]]
legend_list: Optional[T.List[HonorListNode]]
strong_newbie_list: Optional[T.List[HonorListNode]]
emotion_list: Optional[T.List[HonorListNode]]
class AtAllRemain(BaseModel):
can_at_all: bool
remain_at_all_count_for_group: int
remain_at_all_count_for_uin: int
|
nilq/baby-python
|
python
|
import config as config
import utils.log as log
# import tests cases
import test_api_config
import test_api_crush_map
import test_api_crush_node
import test_api_crush_rule_set
import test_api_crush_rule
import test_api_crush_type
import test_api_logs
import test_api_mon
import test_api_pool
import test_api_request
import test_api_saltkey
import test_api_server_withinCluster
import test_api_sync
import test_api_event
import test_api_osd
import test_api_cli
import logout
if __name__ == '__main__':
config_data = config.get_config()
if not config_data['auth']:
log.error('auth failed')
else:
# call test_cases
# test_api_cli.exec_test(config_data) # test_id:0
test_api_config.exec_test(config_data) # test_id:1
test_api_crush_map.exec_test(config_data) # test_id:2
test_api_crush_node.exec_test(config_data) # test_id:3
test_api_crush_rule_set.exec_test(config_data) # test_id:4
test_api_crush_rule.exec_test(config_data) # test_id:5
test_api_crush_type.exec_test(config_data) # test_id:6
test_api_logs.exec_test(config_data) # test_id:7
test_api_mon.exec_test(config_data) # test_id:8
test_api_pool.exec_test(config_data) # test_id:9
test_api_request.exec_test(config_data) # test_id:10
test_api_saltkey.exec_test(config_data) # test_id:11
test_api_server_withinCluster.exec_test(config_data) # test_id:12
test_api_sync.exec_test(config_data) # test_id:13
test_api_event.exec_test(config_data) # test_id:14
test_api_osd.exec_test(config_data) # test_id:15
logout.exec_test(config_data)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 - 2022 -- Lars Heuer
# All rights reserved.
#
# License: BSD License
#
"""\
EPC QR Codes.
Test against issue <https://github.com/heuer/segno/issues/55>.
"""
from __future__ import absolute_import, unicode_literals
import decimal
import pytest
from segno.helpers import make_epc_qr, _make_epc_qr_data as make_epc_qr_data
@pytest.mark.parametrize('amount', [12.3,
12.30,
decimal.Decimal('12.3'),
decimal.Decimal('12.30'),
'12.3',
'12.30'])
def test_text_002(amount):
name = "François D'Alsace S.A."
iban = 'FR1420041010050500013M02606'
text = 'Client:Marie Louise La Lune'
kw = dict(name=name, iban=iban, text=text, amount=amount)
data = make_epc_qr_data(**kw)
# See. EPC069-12 Version 2.1 dtd. 9 February 2012 example 2
assert len(data) == 103
encoding = 'iso-8859-1'
d = [x.decode(encoding) for x in data.split(b'\n')]
assert 11 == len(d)
assert 'BCD' == d[0]
assert '002' == d[1]
assert '2' == d[2]
assert 'SCT' == d[3]
assert name == d[5]
assert iban == d[6]
assert 'EUR12.3' == d[7]
assert '' == d[8]
assert '' == d[9]
assert text == d[10]
qr = make_epc_qr(**kw)
assert qr
assert not qr.is_micro
assert qr.version <= 13
assert 'M' == qr.error
@pytest.mark.parametrize('expected_amount, amount', [('EUR1000', 1000),
('EUR1000', 1000.0),
('EUR2000', decimal.Decimal('2000'))])
def test_trailing_zeros(expected_amount, amount):
name = "François D'Alsace S.A."
iban = 'FR1420041010050500013M02606'
text = 'Client:Marie Louise La Lune'
kw = dict(name=name, iban=iban, text=text, amount=amount)
data = make_epc_qr_data(**kw)
assert len(data) == 103 # See. EPC069-12 Version 2.1 dtd. 9 February 2012 example 2
encoding = 'iso-8859-1'
d = [x.decode(encoding) for x in data.split(b'\n')]
assert expected_amount == d[7]
@pytest.mark.parametrize('amount', [5.0, 5, '5.00', decimal.Decimal('5.00000')])
def test_remove_dot(amount):
kw = _make_valid_kw()
kw['amount'] = amount
d = make_epc_qr_data(**kw).split(b'\n')
assert b'EUR5' == d[7]
@pytest.mark.parametrize('amount', [12.3,
12.30,
decimal.Decimal('12.3'),
decimal.Decimal('12.30'),
'12.3',
'12.30'])
def test_reference_002(amount):
name = 'Franz Mustermänn'
iban = 'DE71110220330123456789'
reference = 'RF18539007547034'
purpose = 'GDDS'
bic = 'BHBLDEHHXXX'
kw = dict(name=name,
iban=iban,
reference=reference,
bic=bic,
purpose=purpose,
amount=amount,
encoding=1)
data = make_epc_qr_data(**kw)
assert len(data) == 96 # See. EPC069-12 Version 2.1 dtd. 9 February 2012 example 1
encoding = 'utf-8'
d = [x.decode(encoding) for x in data.split(b'\n')]
assert 10 == len(d)
assert 'BCD' == d[0]
assert '002' == d[1]
assert '1' == d[2]
assert 'SCT' == d[3]
assert name == d[5]
assert iban == d[6]
assert 'EUR12.3' == d[7]
assert purpose == d[8]
assert reference == d[9]
qr = make_epc_qr(**kw)
assert qr
assert not qr.is_micro
assert qr.version <= 13
assert 'M' == qr.error
def _make_valid_kw():
return dict(name="François D'Alsace S.A.",
iban='FR1420041010050500013M02606',
text='Client:Marie Louise La Lune',
amount=12.3)
@pytest.mark.parametrize('amount', [0,
0.004,
'0.001',
'999999999.999',
9999999990.99])
def test_invalid_amount(amount):
kw = _make_valid_kw()
kw['amount'] = amount
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'amount' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'amount' in str(ex.value)
@pytest.mark.parametrize('bic', ['BHBLDE', # Too short
'BHBLDEHHXXXX', # Too long
'BHBLDEHHXX', # Too short (either 8 or 11) not 8 <= bic <= 11
'BHBLDEH ', # Too short after removing trailing WS
])
def test_invalid_bic(bic):
kw = _make_valid_kw()
kw['bic'] = bic
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'BIC' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'BIC' in str(ex.value)
def test_utf8_required():
kw = _make_valid_kw()
kw['name'] = 'Funny 😃 name'
d = make_epc_qr_data(**kw).split(b'\n')
assert b'1' == d[2]
def test_utf8_explicit():
kw = _make_valid_kw()
kw['encoding'] = 'utf-8'
kw['name'] = 'Funny 😃 name'
d = make_epc_qr_data(**kw).split(b'\n')
assert b'1' == d[2]
def test_utf8_explicit2():
kw = _make_valid_kw()
kw['encoding'] = 1
kw['name'] = 'Funny 😃 name'
d = make_epc_qr_data(**kw).split(b'\n')
assert b'1' == d[2]
@pytest.mark.parametrize('encoding', range(1, 9))
def test_valid_encoding(encoding):
kw = _make_valid_kw()
kw['name'] = 'Simple name'
kw['encoding'] = encoding
d = make_epc_qr_data(**kw).split(b'\n')
assert str(encoding).encode() == d[2]
qr = make_epc_qr(**kw)
assert qr
@pytest.mark.parametrize('encoding', [0, 9, '1', b'8', 1.0, 'shift-jis'])
def test_illegal_encoding(encoding):
kw = _make_valid_kw()
kw['encoding'] = encoding
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'encoding' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'encoding' in str(ex.value)
@pytest.mark.parametrize('text,reference', [('', ''), (' ', ' '),
('', None), (None, None),
(None, ' '),
])
def test_no_text_no_reference(text, reference):
kw = _make_valid_kw()
kw['text'] = text
kw['reference'] = reference
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'reference' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'reference' in str(ex.value)
@pytest.mark.parametrize('iban', ['DE1' + '1' * 34,
'',
None])
def test_illegal_iban(iban):
kw = _make_valid_kw()
kw['iban'] = iban
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'IBAN' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'IBAN' in str(ex.value)
@pytest.mark.parametrize('purpose', ['DE1', 'x', 'CDCBC'])
def test_illegal_purpose(purpose):
kw = _make_valid_kw()
kw['purpose'] = purpose
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'purpose' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'purpose' in str(ex.value)
@pytest.mark.parametrize('name', [None, '',
'a' * 71, # too long
])
def test_illegal_name(name):
kw = _make_valid_kw()
kw['name'] = name
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'name' in str(ex.value)
with pytest.raises(ValueError) as ex:
make_epc_qr(**kw)
assert 'name' in str(ex.value)
def test_text_too_long():
kw = _make_valid_kw()
kw['text'] = 'a' * 141
kw['reference'] = None
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'text' in str(ex.value)
def test_reference_too_long():
kw = _make_valid_kw()
kw['text'] = None
kw['reference'] = 'r' * 36
with pytest.raises(ValueError) as ex:
make_epc_qr_data(**kw)
assert 'reference' in str(ex.value)
if __name__ == '__main__':
pytest.main([__file__])
|
nilq/baby-python
|
python
|
import rope.base.builtins
import rope.base.codeanalyze
import rope.base.pynames
from rope.base import ast, exceptions, utils
from rope.refactor import patchedast
class Scope(object):
def __init__(self, pycore, pyobject, parent_scope):
self.pycore = pycore
self.pyobject = pyobject
self.parent = parent_scope
def get_names(self):
"""Return the names defined or imported in this scope"""
return self.pyobject.get_attributes()
def get_defined_names(self):
"""Return the names defined in this scope"""
return self.pyobject._get_structural_attributes()
def get_name(self, name):
"""Return name `PyName` defined in this scope"""
if name not in self.get_names():
raise exceptions.NameNotFoundError("name %s not found" % name)
return self.get_names()[name]
def __getitem__(self, key):
"""The same as ``get_name(key)``"""
return self.get_name(key)
def __contains__(self, key):
"""The same as ``key in self.get_names()``"""
return key in self.get_names()
@utils.saveit
def get_scopes(self):
"""Return the subscopes of this scope
The returned scopes should be sorted by the order they appear.
"""
return self._create_scopes()
def lookup(self, name):
if name in self.get_names():
return self.get_names()[name]
if self.parent is not None:
return self.parent._propagated_lookup(name)
return None
def get_propagated_names(self):
"""Return the visible names of this scope
Return the names defined in this scope that are visible from
scopes containing this scope. This method returns the same
dictionary returned by `get_names()` except for `ClassScope`
which returns an empty dict.
"""
return self.get_names()
def _propagated_lookup(self, name):
if name in self.get_propagated_names():
return self.get_propagated_names()[name]
if self.parent is not None:
return self.parent._propagated_lookup(name)
return None
def _create_scopes(self):
return [
pydefined.get_scope() for pydefined in self.pyobject._get_defined_objects()
]
def _get_global_scope(self):
current = self
while current.parent is not None:
current = current.parent
return current
def get_start(self):
return self.pyobject.get_ast().lineno
def get_body_start(self):
body = self.pyobject.get_ast().body
if body:
return body[0].lineno
return self.get_start()
def get_end(self):
pymodule = self._get_global_scope().pyobject
return pymodule.logical_lines.logical_line_in(self.logical_end)[1]
@utils.saveit
def get_logical_end(self):
global_scope = self._get_global_scope()
return global_scope._scope_finder.find_scope_end(self)
start = property(get_start)
end = property(get_end)
logical_end = property(get_logical_end)
def get_kind(self):
pass
def get_region(self):
self._calculate_scope_regions_for_module()
node = self.pyobject.get_ast()
region = patchedast.node_region(node)
return region
def _calculate_scope_regions_for_module(self):
self._get_global_scope()._calculate_scope_regions()
def in_region(self, offset):
"""Checks if offset is in scope region"""
region = self.get_region()
return region[0] < offset < region[1]
class GlobalScope(Scope):
def __init__(self, pycore, module):
super(GlobalScope, self).__init__(pycore, module, None)
self.names = module._get_concluded_data()
def get_start(self):
return 1
def get_kind(self):
return "Module"
def get_name(self, name):
try:
return self.pyobject[name]
except exceptions.AttributeNotFoundError:
if name in self.builtin_names:
return self.builtin_names[name]
raise exceptions.NameNotFoundError("name %s not found" % name)
@utils.saveit
def _calculate_scope_regions(self):
source = self._get_source()
patchedast.patch_ast(self.pyobject.get_ast(), source)
def _get_source(self):
return self.pyobject.source_code
def get_names(self):
if self.names.get() is None:
result = dict(self.builtin_names)
result.update(super(GlobalScope, self).get_names())
self.names.set(result)
return self.names.get()
def get_inner_scope_for_line(self, lineno, indents=None):
return self._scope_finder.get_holding_scope(self, lineno, indents)
def get_inner_scope_for_offset(self, offset):
return self._scope_finder.get_holding_scope_for_offset(self, offset)
@property
@utils.saveit
def _scope_finder(self):
return _HoldingScopeFinder(self.pyobject)
@property
def builtin_names(self):
return rope.base.builtins.builtins.get_attributes()
class ComprehensionScope(Scope):
def __init__(self, pycore, pyobject, visitor):
super(ComprehensionScope, self).__init__(
pycore, pyobject, pyobject.parent.get_scope()
)
self.names = None
self.returned_asts = None
self.defineds = None
self.visitor = visitor
def _get_names(self):
if self.names is None:
self._visit_comprehension()
return self.names
def get_names(self):
return self._get_names()
def _visit_comprehension(self):
if self.names is None:
new_visitor = self.visitor(self.pycore, self.pyobject)
for node in ast.get_child_nodes(self.pyobject.get_ast()):
ast.walk(node, new_visitor)
self.names = dict(self.parent.get_names())
self.names.update(new_visitor.names)
self.defineds = new_visitor.defineds
def get_logical_end(self):
return self.get_start()
logical_end = property(get_logical_end)
def get_body_start(self):
return self.get_start()
class FunctionScope(Scope):
def __init__(self, pycore, pyobject, visitor):
super(FunctionScope, self).__init__(
pycore, pyobject, pyobject.parent.get_scope()
)
self.names = None
self.returned_asts = None
self.is_generator = None
self.defineds = None
self.visitor = visitor
def _get_names(self):
if self.names is None:
self._visit_function()
return self.names
def _visit_function(self):
if self.names is None:
new_visitor = self.visitor(self.pycore, self.pyobject)
for n in ast.get_child_nodes(self.pyobject.get_ast()):
ast.walk(n, new_visitor)
self.names = new_visitor.names
self.names.update(self.pyobject.get_parameters())
self.returned_asts = new_visitor.returned_asts
self.is_generator = new_visitor.generator
self.defineds = new_visitor.defineds
def _get_returned_asts(self):
if self.names is None:
self._visit_function()
return self.returned_asts
def _is_generator(self):
if self.is_generator is None:
self._get_returned_asts()
return self.is_generator
def get_names(self):
return self._get_names()
def _create_scopes(self):
if self.defineds is None:
self._visit_function()
return [pydefined.get_scope() for pydefined in self.defineds]
def get_kind(self):
return "Function"
def invalidate_data(self):
for pyname in self.get_names().values():
if isinstance(
pyname,
(rope.base.pynames.AssignedName, rope.base.pynames.EvaluatedName),
):
pyname.invalidate()
class ClassScope(Scope):
def __init__(self, pycore, pyobject):
super(ClassScope, self).__init__(pycore, pyobject, pyobject.parent.get_scope())
def get_kind(self):
return "Class"
def get_propagated_names(self):
return {}
class _HoldingScopeFinder(object):
def __init__(self, pymodule):
self.pymodule = pymodule
def get_indents(self, lineno):
return rope.base.codeanalyze.count_line_indents(self.lines.get_line(lineno))
def _get_scope_indents(self, scope):
return self.get_indents(scope.get_start())
def get_holding_scope(self, module_scope, lineno, line_indents=None):
if line_indents is None:
line_indents = self.get_indents(lineno)
current_scope = module_scope
new_scope = current_scope
while new_scope is not None and (
new_scope.get_kind() == "Module"
or self._get_scope_indents(new_scope) <= line_indents
):
current_scope = new_scope
if (
current_scope.get_start() == lineno
and current_scope.get_kind() != "Module"
):
return current_scope
new_scope = None
for scope in current_scope.get_scopes():
if scope.get_start() <= lineno:
if lineno <= scope.get_end():
new_scope = scope
break
else:
break
return current_scope
def _is_empty_line(self, lineno):
line = self.lines.get_line(lineno)
return line.strip() == "" or line.lstrip().startswith("#")
def _get_body_indents(self, scope):
return self.get_indents(scope.get_body_start())
@staticmethod
def get_holding_scope_for_offset(scope, offset):
for inner_scope in scope.get_scopes():
if inner_scope.in_region(offset):
return _HoldingScopeFinder.get_holding_scope_for_offset(
inner_scope, offset
)
return scope
def find_scope_end(self, scope):
if not scope.parent:
return self.lines.length()
end = scope.pyobject.get_ast().body[-1].lineno
scope_start = self.pymodule.logical_lines.logical_line_in(scope.start)
if scope_start[1] >= end:
# handling one-liners
body_indents = self._get_scope_indents(scope) + 4
else:
body_indents = self._get_body_indents(scope)
for l in self.logical_lines.generate_starts(
min(end + 1, self.lines.length()), self.lines.length() + 1
):
if not self._is_empty_line(l):
if self.get_indents(l) < body_indents:
return end
else:
end = l
return end
@property
def lines(self):
return self.pymodule.lines
@property
def code(self):
return self.pymodule.source_code
@property
def logical_lines(self):
return self.pymodule.logical_lines
class TemporaryScope(Scope):
"""Currently used for list comprehensions and generator expressions
These scopes do not appear in the `get_scopes()` method of their
parent scopes.
"""
def __init__(self, pycore, parent_scope, names):
super(TemporaryScope, self).__init__(
pycore, parent_scope.pyobject, parent_scope
)
self.names = names
def get_names(self):
return self.names
def get_defined_names(self):
return self.names
def _create_scopes(self):
return []
def get_kind(self):
return "Temporary"
|
nilq/baby-python
|
python
|
import websocket
import json
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM) # set board mode to Broadcom
GPIO.setup(17, GPIO.OUT) # set up pin 17 TV
GPIO.setup(18, GPIO.OUT) # set up pin 18 Lights
GPIO.setup(22, GPIO.OUT) # set up pin 12 A/C
GPIO.setup(27, GPIO.OUT) # set up pin 27 Alarm
GPIO.output(17, 0) # turn off pin 17
GPIO.output(18, 0) # turn off pin 18
GPIO.output(22, 0) # turn off pin 22
GPIO.output(27, 0) # turn off pin 27
class Payload(object):
def __init__(self, j):
self.__dict__ = json.loads(j)
def ws_uri():
return "wss://smarthouseintern.azurewebsites.net/ws"
def ws_on_message(ws, msg):
message = str(Payload(msg).data).lower()
if message != "":
if "tv" in message or "television" in message:
if "open" in message or "on" in message or "opened" in message:
print("TV Opened!!")
GPIO.output(17, 1) # turn on pin 17
if "close" in message or "off" in message or "closed" in message:
print("TV Closed!!")
GPIO.output(17, 0) # turn off pin 17
if "light" in message or "lights" in message:
if "open" in message or "on" in message or "opened" in message:
print("Lights Opened!!")
GPIO.output(18, 1) # turn on pin 18
if "close" in message or "off" in message or "closed" in message:
print("Lights Closed!!")
GPIO.output(18, 0) # turn off pin 18
if "ac" in message or "air" in message or "condition" in message or "conditioner" in message:
if "open" in message or "on" in message or "opened" in message:
print("ac Opened!!")
GPIO.output(22, 1) # turn on pin 22
if "close" in message or "off" in message or "closed" in message:
print("ac Closed!!")
GPIO.output(22, 0) # turn off pin 22
if "alarm" in message or "alarms" in message:
if "open" in message or "on" in message or "opened" in message:
print("alarm Opened!!")
GPIO.output(27, 1) # turn on pin 27
if "close" in message or "off" in message or "closed" in message:
print("alarm Closed!!")
GPIO.output(27, 0) # turn off pin 27
if "all" in message or "whole" in message:
if "open" in message or "on" in message or "opened" in message:
print("All Opened!!")
GPIO.output(17, 1) # turn on pin 17
GPIO.output(18, 1) # turn on pin 18
GPIO.output(22, 1) # turn on pin 22
GPIO.output(27, 1) # turn on pin 27
if "close" in message or "off" in message or "closed" in message:
print("All Closed!!")
GPIO.output(17, 0) # turn off pin 17
GPIO.output(18, 0) # turn off pin 18
GPIO.output(22, 0) # turn off pin 22
GPIO.output(27, 0) # turn off pin 27
def ws_on_error(ws, err):
print(err)
def ws_on_open(ws):
print("### WebSocket Opened ###")
def ws_on_close(ws):
print("### WebSocket Closed ###")
if __name__ == "__main__":
websocket.enableTrace(True)
ws = websocket.WebSocketApp(ws_uri(),
on_message = ws_on_message,
on_close = ws_on_close,
on_error = ws_on_error)
ws.on_open = ws_on_open
ws.run_forever()
|
nilq/baby-python
|
python
|
from replays_fetching.replay_fetcher import ReplayFetcher
replay_fetcher = ReplayFetcher()
replays = replay_fetcher.get_replays()
for index in range(len(replays)):
print('Replay #{0}: '.format(index) + str(replays[index]))
|
nilq/baby-python
|
python
|
# 2.3.5 Example: Range Class
class Range:
"""A class that mimic's the built-in range class."""
def __init__(self,start,stop=None,step=1):
"""Initialize a Range instance.
Semantics is similar to built-in range class.
"""
if step == 0:
raise ValueError('step cannot be 0')
if stop is None: # special case of range(n)
start,stop = 0,start # should be treated as if range(0,n)
# calculate the effective length once
self._length = max(0,(stop - start + step - 1)//step)
# need knowledge of start and step (but not stop) to support __getitem__
self._start = start
self._step = step
def __len__(self):
"""Return number of entries in the range."""
return self._length
def __getitem__(self,k):
"""Return entry at index k (using standard interpretation
if negative).
"""
if k < 0:
k += len(self) # attempt to convert negative index
if not 0 <= k < self._length:
raise IndexError('index out of range')
return self._start + k * self._step
#----------------------------- my main function -----------------------------
import numpy as np
rg = Range(2,3.1,0.1)
print('0: length of rg is',rg.__len__())
for i in np.arange(0,1.1,0.1):
print(' ',rg.__getitem__(i))
|
nilq/baby-python
|
python
|
# Copyright 2018 Alibaba Group Holding Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import tf_euler
class DenseLogits(object):
def __init__(self, logits_dim):
self.out_fc = tf.layers.Dense(logits_dim, use_bias=False)
def __call__(self, inputs, **kwargs):
return self.out_fc(inputs)
class PosNegLogits(object):
def __call__(self, emb, pos_emb, neg_emb):
logit = tf.matmul(emb, pos_emb, transpose_b=True)
neg_logit = tf.matmul(emb, neg_emb, transpose_b=True)
return logit, neg_logit
class CosineLogits(object):
def __call__(self, target_emb, context_emb):
normalized_x = tf.nn.l2_normalize(target_emb, axis=-1)
normalized_y = tf.nn.l2_normalize(context_emb, axis=-1)
logits = tf.reduce_sum(normalized_x * normalized_y, -1, True)
logits = logits * 5.0
return logits
|
nilq/baby-python
|
python
|
#
# Generated with RIFLEXDynamicCalculationParametersBlueprint
from dmt.blueprint import Blueprint
from dmt.dimension import Dimension
from dmt.attribute import Attribute
from dmt.enum_attribute import EnumAttribute
from dmt.blueprint_attribute import BlueprintAttribute
from sima.sima.blueprints.moao import MOAOBlueprint
class RIFLEXDynamicCalculationParametersBlueprint(MOAOBlueprint):
""""""
def __init__(self, name="RIFLEXDynamicCalculationParameters", package_path="sima/riflex", description=""):
super().__init__(name,package_path,description)
self.attributes.append(Attribute("name","string","",default=""))
self.attributes.append(Attribute("description","string","",default=""))
self.attributes.append(Attribute("_id","string","",default=""))
self.attributes.append(BlueprintAttribute("scriptableValues","sima/sima/ScriptableValue","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("irregularTimeSeries","sima/riflex/IrregularTimeSeriesParameters","",True))
self.attributes.append(BlueprintAttribute("irregularResponseAnalysis","sima/riflex/IrregularResponseAnalysis","",True))
self.attributes.append(BlueprintAttribute("timeDomainProcedure","sima/riflex/TimeDomainProcedure","",True))
self.attributes.append(BlueprintAttribute("envelopeCurveSpecification","sima/riflex/EnvelopeCurveSpecification","",True))
self.attributes.append(BlueprintAttribute("displacementResponseStorage","sima/riflex/DisplacementResponseStorage","",True))
self.attributes.append(BlueprintAttribute("forceResponseStorage","sima/riflex/ForceResponseStorage","",True))
self.attributes.append(BlueprintAttribute("sumForceResponseStorage","sima/riflex/SumForceResponseStorage","",True))
self.attributes.append(BlueprintAttribute("curvatureResponseStorage","sima/riflex/CurvatureResponseStorage","",True))
self.attributes.append(BlueprintAttribute("stressStorage","sima/riflex/StressStorage","",True))
self.attributes.append(BlueprintAttribute("turbineResponseStorage","sima/riflex/TurbineResponseStorage","",True))
self.attributes.append(BlueprintAttribute("turbineBladeResponseStorage","sima/riflex/TurbineBladeResponseStorage","",True))
self.attributes.append(BlueprintAttribute("supportVesselForceStorage","sima/riflex/SupportVesselForceStorage","",True))
self.attributes.append(BlueprintAttribute("bodyForceStorage","sima/riflex/BodyForceStorage","",True))
self.attributes.append(BlueprintAttribute("hlaElementForces","sima/riflex/HLAElementForce","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("hlaImportedBodies","sima/riflex/ImportVesselItem","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("segmentLengthVariations","sima/riflex/SegmentLengthVariationItem","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("temperatureVariations","sima/riflex/DynamicTemperatureVariationItem","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("pressureVariations","sima/riflex/DynamicPressureVariationItem","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("winchVariations","sima/riflex/DynamicWinchVariationItem","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("dynamicWindChange","sima/riflex/DynamicWindChange","",True))
self.attributes.append(BlueprintAttribute("windTurbineShutdown","sima/riflex/WindTurbineShutdown","",True))
self.attributes.append(BlueprintAttribute("bladePitchFault","sima/riflex/BladePitchFault","",True))
self.attributes.append(BlueprintAttribute("boundaryChangeGroups","sima/riflex/BoundaryChangeGroup","",True,Dimension("*")))
self.attributes.append(BlueprintAttribute("visualisationResponses","sima/riflex/DynmodVisualisationResponses","",True))
self.attributes.append(BlueprintAttribute("regularWaveAnalysis","sima/riflex/RegularWaveAnalaysis","",True))
self.attributes.append(BlueprintAttribute("regularWaveLoading","sima/riflex/RegularWaveLoading","",True))
self.attributes.append(BlueprintAttribute("regularVesselMotions","sima/riflex/RegularVesselMotion","",True,Dimension("*")))
self.attributes.append(Attribute("volumeForcesScaling","number","Scaling of volume forces.",default=1.0))
self.attributes.append(Attribute("specifiedForcesScaling","number","Scaling of specified (nodal) forces.",default=1.0))
self.attributes.append(Attribute("currentVelocitiesScaling","number","Scaling of current velocities.",default=1.0))
self.attributes.append(Attribute("changeStaticLoads","boolean","Change applied static loads at the start of the dynamic analysis",default=False))
self.attributes.append(BlueprintAttribute("dynamicLoads","sima/riflex/DynamicLoads","",True))
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from permutive.exceptions import PermutiveApiException
from .util import none_default_namedtuple
from .base import Resource
User = none_default_namedtuple('User', 'id, custom_id, properties, updated')
class UserResource(Resource):
def create(self):
"""
Creates a new user in remote
:return: User object consisting of only an id
"""
result = self.client.request('POST', '/users')
return User(**result)
def identify(self, user_id, custom_id, **properties):
"""
Associate a user to a known custom_id. This custom id can then be used to fetch a user using self.get
:param user_id: string Permutive user_id (eg: from a object returned from self.create)
:param custom_id: string or stringifyable value.
:param properties: user properties
:return: User
"""
result = self.client.request('POST', '/identify', data={
'id': user_id,
'custom_id': str(custom_id),
'properties': properties
})
return User(result.get('id'), custom_id, properties)
def create_and_identify(self, custom_id, **properties):
"""
Convenience method that calls self.create and self.identify sequentially
:param custom_id: string or stringifyable value.
:param properties: user properties
:return: User
"""
user = self.create()
if not user.id:
raise ValueError('Id was not present in the response from Permutive API')
return self.identify(user.id, custom_id, **properties)
def get(self, custom_id):
"""
Fetch a user from remote using a custom id
:param custom_id: string or stringifyable value.
:return: User|None
"""
try:
result = self.client.request('GET', '/identities/{}'.format(custom_id))
result['id'] = result.pop('user_id') # smh
result['custom_id'] = custom_id
return User(**result)
except PermutiveApiException as e:
if e.status_code == 404:
return None
else:
raise e
def update(self, custom_id, **properties):
"""
Performs a partial update of a User object on remote.
NOTE: This method overwrites existing properties
:param custom_id: string or stringifyable value.
:param properties: user properties
:return: User|None
"""
result = self.client.request('PATCH', '/identities/{}'.format(custom_id), {
'properties': properties
})
if result is True:
return User(None, custom_id, properties)
return None
def delete(self, custom_id):
"""
Deletes a User object from remote using a custom_id
:param custom_id: string or stringifyable value
:return: Boolean
"""
return self.client.request('DELETE', '/identities/{}'.format(custom_id))
|
nilq/baby-python
|
python
|
# control.applications - Controller for comodit Applications entities.
# coding: utf-8
#
# Copyright 2010 Guardis SPRL, Liège, Belgium.
# Authors: Laurent Eschenauer <laurent.eschenauer@guardis.com>
#
# This software cannot be used and/or distributed without prior
# authorization from Guardis.
from __future__ import absolute_import
from comodit_client.api.exporter import Export
from comodit_client.api.importer import Import
from comodit_client.control.doc import ActionDoc
from comodit_client.control.exceptions import ArgumentException
from comodit_client.control.files import ApplicationFilesController
from comodit_client.control.organization_entity import OrganizationEntityController
from comodit_client.control.parameters import ApplicationParametersController
from comodit_client.control.store_helper import StoreHelper
from comodit_client.control.sync import AppSyncController
from . import completions
from comodit_client.util import prompt
from .rpmmodules import RpmModuleController
class ApplicationsController(OrganizationEntityController):
_template = "application.json"
def __init__(self):
super(ApplicationsController, self).__init__()
# sub-controllers
self._register_subcontroller(["files"], ApplicationFilesController())
self._register_subcontroller(["parameters"], ApplicationParametersController())
self._register_subcontroller(["rpm-module"], RpmModuleController())
self._register_subcontroller(["sync"], AppSyncController())
self._register(["lock"], self._lock, self._print_entity_completions)
self._register(["unlock"], self._unlock, self._print_entity_completions)
self._doc = "Applications handling."
# actions
self._register(["import"], self._import, self._print_import_completions)
self._register(["export"], self._export, self._print_export_completions)
helper = StoreHelper(self, "app")
self._register(["publish"], helper._publish, self._print_entity_completions)
self._register(["unpublish"], helper._unpublish, self._print_entity_completions)
self._register(["push"], helper._push, self._print_entity_completions)
self._register(["pull"], helper._pull, self._print_entity_completions)
self._register(["update-authorized"], helper._update_authorized, self._print_entity_completions)
self._register_action_doc(self._export_doc())
self._register_action_doc(self._import_doc())
self._register_action_doc(helper._publish_doc())
self._register_action_doc(helper._unpublish_doc())
self._register_action_doc(helper._push_doc())
self._register_action_doc(helper._pull_doc())
self._register_action_doc(self._lock_doc())
self._register_action_doc(self._unlock_doc())
self._register_action_doc(helper._update_authorized_doc())
def _get_collection(self, org_name):
return self._client.applications(org_name)
def _lock_doc(self):
return ActionDoc("lock"," <org_name> <app_name>", """
Lock disable update.""")
def _unlock_doc(self):
return ActionDoc("unlock", "<org_name> <app_name> [--force]", """
Unlock enable update.""")
def _prune_json_update(self, json_wrapper):
super(ApplicationsController, self)._prune_json_update(json_wrapper)
json_wrapper._del_field("organization")
json_wrapper._del_field("files")
json_wrapper._del_field("parameters")
# Export
def _print_export_completions(self, param_num, argv):
if param_num < 2:
self._print_entity_completions(param_num, argv)
elif param_num == 2:
completions.print_dir_completions()
def _export(self, argv):
self._options = self._config.options
app = self._get_entity(argv)
root_folder = app.name
if len(argv) > 2:
root_folder = argv[2]
export = Export(self._config.options.force)
export.export_application(app, root_folder)
def _export_doc(self):
return ActionDoc("export", "<org_name> <app_name> [<output_folder>] [--force]", """
Export application onto disk. --force option causes existing files to
be overwritten.""")
# Import
def _print_import_completions(self, param_num, argv):
if param_num < 1:
self._print_collection_completions(param_num, argv)
elif param_num == 1:
completions.print_dir_completions()
def _import(self, argv):
if len(argv) != 2:
raise ArgumentException("Wrong number of arguments")
org = self._client.get_organization(argv[0])
imp = Import(update_existing=self._config.options.update_existing)
imp.import_application(org, argv[1])
def _import_doc(self):
return ActionDoc("import", "<org_name> <src_folder> [--update-existing]", """
Import application from disk. --update-existing option causes existing entities
on server to be updated.""")
def _lock(self, argv):
app = self._get_entity(argv)
app.lock()
def _unlock(self, argv):
app = self._get_entity(argv)
if not app.locked :
print("application not locked")
elif self._config.options.force or (prompt.confirm(prompt="Unlock " + app.name + " ?", resp=False)) :
app.unlock()
|
nilq/baby-python
|
python
|
foods = ('yu','wa','fan','cai','tang')
for foods2 in foods:
print(foods2)
foods = ('yu','wa','fan','cai','tang')
print(foods)
foods = ('yu','wa','fan','cai','tang','xia')
print(foods)
# 4
|
nilq/baby-python
|
python
|
import dojo.dojo as d
def test():
print(dir(d))
assert(d.test_function() is True)
|
nilq/baby-python
|
python
|
"""
Unittests for staros plugin
Uses the mock_device.py script to test the plugin.
"""
__author__ = "dwapstra"
import unittest
from unicon import Connection
from unicon.core.errors import SubCommandFailure
class TestStarosPluginConnect(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.c = Connection(hostname='host_name',
start=['mock_device_cli --os staros --state staros_connect'],
os='staros',
username='cisco',
tacacs_password='cisco')
cls.c.connect()
def test_execute(self):
r = self.c.execute('')
self.assertEqual(r, '')
r = self.c.execute([''])
self.assertEqual(r, '')
r = self.c.execute(['']*2)
self.assertEqual(r, ['', ''])
def test_configure(self):
r = self.c.configure('test\ntest123')
self.assertEqual(r, {'test': '123', 'test123': 'abc'})
def test_truncation_add_state_pattern(self):
sm = self.c.state_machine.get_state('config')
sm.add_state_pattern(r'^(.*?)(newpattern)*#\s?$')
r = self.c.configure('test_command')
self.assertEqual(r, 'executing test command')
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
# -------------------------------------------------------------------------------
# Copyright (c) 2017, Battelle Memorial Institute All rights reserved.
# Battelle Memorial Institute (hereinafter Battelle) hereby grants permission to any person or entity
# lawfully obtaining a copy of this software and associated documentation files (hereinafter the
# Software) to redistribute and use the Software in source and binary forms, with or without modification.
# Such person or entity may use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and may permit others to do so, subject to the following conditions:
# Redistributions of source code must retain the above copyright notice, this list of conditions and the
# following disclaimers.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
# the following disclaimer in the documentation and/or other materials provided with the distribution.
# Other than as used herein, neither the name Battelle Memorial Institute or Battelle may be used in any
# form whatsoever without the express written consent of Battelle.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# BATTELLE OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
# General disclaimer for use with OSS licenses
#
# This material was prepared as an account of work sponsored by an agency of the United States Government.
# Neither the United States Government nor the United States Department of Energy, nor Battelle, nor any
# of their employees, nor any jurisdiction or organization that has cooperated in the development of these
# materials, makes any warranty, express or implied, or assumes any legal liability or responsibility for
# the accuracy, completeness, or usefulness or any information, apparatus, product, software, or process
# disclosed, or represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or service by trade name, trademark, manufacturer,
# or otherwise does not necessarily constitute or imply its endorsement, recommendation, or favoring by the United
# States Government or any agency thereof, or Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by BATTELLE for the
# UNITED STATES DEPARTMENT OF ENERGY under Contract DE-AC05-76RL01830
# -------------------------------------------------------------------------------
"""
Created on Sept 22, 2020
@author: Shiva Poudel
"""""
#from shared.sparql import SPARQLManager
#from shared.glm import GLMManager
import networkx as nx
import pandas as pd
import math
import argparse
import json
import sys
import os
import importlib
import numpy as np
import time
from tabulate import tabulate
from gridappsd import GridAPPSD, topics
from gridappsd.topics import simulation_output_topic, simulation_log_topic
global G, undirected_graph, loadbreaksw, exit_flag, measid_lbs, sw_status
global logfile
def on_message(headers, message):
global exit_flag
print('\nTOPOLOGY_VALIDATOR microservice response: ' + str(message), flush=True)
print('\nTOPOLOGY_VALIDATOR microservice response: ' + str(message), file=logfile)
exit_flag = True
def start(log_file, feeder_mrid, model_api_topic):
global logfile
logfile = log_file
global G, measid_lbs, loadbreaksw, undirected_graph
print("\nTOPOLOGY_VALIDATOR starting!!!------------------------------------------------------------")
print("\nTOPOLOGY_VALIDATOR starting!!!------------------------------------------------------------", file=logfile)
gapps = GridAPPSD()
# NOTE: Use of modelType as STATIC or OPERATIONAL will change the response
message = {"modelId": feeder_mrid,
"requestType": "LOOPS",
"modelType": "OPERATIONAL",
"resultFormat": "JSON"}
out_topic = "/topic/goss.gridappsd.model-validator.topology.out"
gapps.subscribe(out_topic, on_message)
in_topic = "/topic/goss.gridappsd.model-validator.topology.in"
gapps.send(in_topic, message)
print("TOPOLOGY_VALIDATOR sent request to microservice; waiting for response\n", flush=True)
print("TOPOLOGY_VALIDATOR sent request to microservice; waiting for response\n", file=logfile)
global exit_flag
exit_flag = False
while not exit_flag:
time.sleep(0.1)
def _main():
# for loading modules
if (os.path.isdir('shared')):
sys.path.append('.')
elif (os.path.isdir('../shared')):
sys.path.append('..')
parser = argparse.ArgumentParser()
parser.add_argument("--request", help="Simulation Request")
opts = parser.parse_args()
sim_request = json.loads(opts.request.replace("\'",""))
feeder_mrid = sim_request["power_system_config"]["Line_name"]
model_api_topic = "goss.gridappsd.process.request.data.powergridmodel"
log_file = open('topology_validator.log', 'w')
start(log_file, feeder_mrid, model_api_topic)
if __name__ == "__main__":
_main()
|
nilq/baby-python
|
python
|
import binascii
from web3.auto import w3
with open("/home/koshik/.ethereum/rinkeby/keystore/UTC--2018-06-10T05-43-22.134895238Z--9e63c0d223d9232a4f3076947ad7cff353cc1a28") as keyfile:
encrypted_key = keyfile.read()
private_key = w3.eth.account.decrypt(encrypted_key, 'koshik93')
print(binascii.b2a_hex(private_key))
|
nilq/baby-python
|
python
|
# Copyright 2021 Ian Eborn
# A sub-class of the "SimpleThirdPersonCamera" class, providing one implementaton of
# the collision-related elements of the camera-system.
#
# Specifically, this class primarily implements the "setupCollision" and "getNearestCollision" methods,
# using Panda3D's built-in collision system.
# Panda3D importations
from panda3d.core import CollisionNode, CollisionTraverser, CollisionHandlerQueue, CollisionSegment
# Import the base-class
from SimpleThirdPersonCamera import *
# The class that implements our camera-controller
class SimpleThirdPersonCameraPandaCollision(SimpleThirdPersonCamera):
def __init__(self, tilt, intendedDistance, shoulderSideDistance, height,
adjustmentSpeed, sideSwitchSpeed,
initialShoulderSide,
ownerNodePath,
camera,
colliderRadius = 1):
# This should be set before initialising the super-class, as
# it will be used in "setupCollision" (below), which is called
# by the super-class's constructor-method.
self.colliderRadius = colliderRadius
SimpleThirdPersonCamera.__init__(self, tilt, intendedDistance, shoulderSideDistance, height,
adjustmentSpeed, sideSwitchSpeed,
initialShoulderSide,
ownerNodePath,
camera)
# Build the collision-related elements that inform the camera's behaviour
#
# This implementation uses Panda's built-in collision-system
def setupCollision(self):
# A traverser, which enacts the actual collision-detection
self.traverser = CollisionTraverser()
# We'll use a queue, since we only want the nearest collision in a given update
self.collisionQueue = CollisionHandlerQueue()
# Our collision-objects: four segments, extending backwards for the "intended distance".
self.colliderNode = CollisionNode("camera collider")
self.colliderNode.addSolid(CollisionSegment(-self.colliderRadius, -self.colliderRadius, 0,
-self.colliderRadius, -self.intendedDistance, 0))
self.colliderNode.addSolid(CollisionSegment(self.colliderRadius, -self.colliderRadius, 0,
self.colliderRadius, -self.intendedDistance, 0))
self.colliderNode.addSolid(CollisionSegment(0, -self.colliderRadius, -self.colliderRadius,
0, -self.intendedDistance, -self.colliderRadius))
self.colliderNode.addSolid(CollisionSegment(0, -self.colliderRadius, self.colliderRadius,
0, -self.intendedDistance, self.colliderRadius))
self.colliderNode.setIntoCollideMask(0)
self.colliderNode.setFromCollideMask(1)
self.collider = self.cameraBase.attachNewNode(self.colliderNode)
# Add our collision -objects and -handler to our traverser
self.traverser.addCollider(self.collider, self.collisionQueue)
# Check for a collision relevant to the camera
#
# This implementation uses Panda's built-in collision-system
def getNearestCollision(self, sceneRoot):
# Ask the traverser to check for collisions
self.traverser.traverse(sceneRoot)
# If there have been any collisions...
if self.collisionQueue.getNumEntries() > 0:
# Sort the collision-entries, which orders them from
# nearest to furthest, I believe.
self.collisionQueue.sortEntries()
# Then get the first--i.e. nearest--of them.
entry = self.collisionQueue.getEntry(0)
# Now, use the collision-position to determine how far away the
# collision occurred from the camera's base-position, and return that.
pos = entry.getSurfacePoint(sceneRoot)
diff = self.cameraBase.getPos(sceneRoot) - pos
return diff.length()
# In there were no collisions, just return the "intended distance"
return self.intendedDistance
# A method to clean up the controller's collision elements
def cleanupCollision(self):
if self.collider is not None:
self.traverser.removeCollider(self.collider)
self.collider.removeNode()
self.collider = None
self.colliderNode = None
self.traverser = None
self.collisionQueue = None
|
nilq/baby-python
|
python
|
from hms_workflow_platform.core.queries.base.base_query import *
class EncounterQuery(BaseQuery):
def __init__(self, site):
super().__init__()
self.adapter = self.get_adapter(site)
self.query = self.adapter.query
self._site = site
def encounter_create(self, date_obj):
date = date_obj.strftime('%Y-%m-%d')
query = ("select format_an(an) en, (modify_date || 'T' || modify_time) mdate "
"from visit "
f"where fix_visit_type_id = '1' and modify_date >= '{date}' "
"union "
"select format_vn(vn) en, (modify_date || 'T' || modify_time) mdate "
"from visit "
f"where fix_visit_type_id <> '1' and modify_date >= '{date}' "
"order by mdate")
result = self.query(query)
return result if result else None
def encounter_discharge(self, date_obj):
date = date_obj.strftime('%Y-%m-%d')
query = (
"select format_vn(vn) en, (visit.financial_discharge_date || 'T' || visit.financial_discharge_time) mdate "
"from visit "
f"where visit.financial_discharge_date >= '{date}' and visit.fix_visit_type_id != '1' "
"union "
"select format_an(an) en, (visit.financial_discharge_date || 'T' || visit.financial_discharge_time) mdate "
"from visit "
f"where visit.financial_discharge_date >= '{date}' and visit.fix_visit_type_id = '1' "
"union "
"select format_vn(vn) en, (visit.doctor_discharge_date || 'T' || visit.doctor_discharge_time) mdate "
"from visit "
f"where visit.doctor_discharge_date >= '{date}' and visit.fix_visit_type_id != '1' "
"union "
"select format_an(an) en, (visit.doctor_discharge_date || 'T' || visit.doctor_discharge_time) mdate "
"from visit "
f"where visit.doctor_discharge_date >= '{date}' and visit.fix_visit_type_id = '1' "
"union "
"select format_an(an) en, (admit.ipd_discharge_date || 'T' || admit.ipd_discharge_time) mdate "
"from admit "
f"where admit.ipd_discharge_date >= '{date}'")
result = self.query(query)
return result if result else None
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Email: chenwx716@163.com
# DateTime: 2019-08-06 22:08:14
__author__ = 'chenwx'
import json
import requests
app_url = "http://127.0.0.1:9002"
req_url = app_url + "/api/v2/local"
json_headers = {"content-type": "application/json"}
class ShowLocal(object):
"""docstring for ShowLocal"""
def __init__(self):
super(ShowLocal, self).__init__()
def post(self, content):
mess = {
"key": "c1c2",
"obj": "local",
"content": content
}
r = requests.post(req_url, data=json.dumps(mess), headers=json_headers)
print("http status--------->> %s" % r.status_code)
print(r.text)
def cmd(self, body):
content = {
"task": "cmd",
"arg": body
}
self.post(content)
def unit(self, body):
content = {
"task": "unit",
"arg": body
}
self.post(content)
def srcipt(self, file):
content = {
"task": "script",
"arg": file
}
self.post(content)
task = ShowLocal()
task.cmd('ls /tmp')
task.cmd('uptime')
task.cmd('df -h')
task.unit('disk')
task.unit('disk_dict')
task.unit('uptime')
task.unit('uptime_dict')
task.unit('cpu')
task.unit('mem_dict')
task.srcipt('/home/wait/code/f1.sh')
|
nilq/baby-python
|
python
|
#42) Coded triangle numbers
#The nth term of the sequence of triangle numbers is given by, tn = (1/2)*n*(n+1); so the first ten triangle numbers are:
#1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
#By converting each letter in a word to a number corresponding to its alphabetical position and adding these values we form a word value. For example, the word value for SKY is 19 + 11 + 25 = 55 = t10. If the word value is a triangle number then we shall call the word a triangle word.
#Using words.txt (right click and 'Save Link/Target As...'), a 16K text file containing nearly two-thousand common English words, how many are triangle words?
#%% Solution
def triangle_nums(x):
n = 1
while int(1/2*n*(n+1)) <= x:
yield int(1/2*n*(n+1))
n += 1
with open("p042_words.txt", mode='r') as doc:
list_words = doc.read().replace('"', '').split(',')
list_values = [sum([ord(x)-64 for x in word]) for word in list_words]
list_triangle = [x for x in list_values if x in triangle_nums(max(list_values))]
len(list_triangle)
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.